repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Qasak/csapp-notes-and_labs
|
all-code/code/netp/dd2hex.c
|
<gh_stars>1-10
/* $begin dd2hex */
#include "csapp.h"
int main(int argc, char **argv)
{
struct in_addr inaddr; /* Address in network byte order */
int rc;
if (argc != 2) {
fprintf(stderr, "usage: %s <dotted-decimal>\n", argv[0]);
exit(0);
}
rc = inet_pton(AF_INET, argv[1], &inaddr);
if (rc == 0)
app_error("inet_pton error: invalid dotted-decimal address");
else if (rc < 0)
unix_error("inet_pton error");
printf("0x%x\n", ntohl(inaddr.s_addr));
exit(0);
}
/* $end dd2hex */
|
TearsOfKhyber/RPKit
|
bukkit/rpk-languages-bukkit/src/main/java/com/rpkit/languages/bukkit/database/jooq/rpkit/tables/RpkitCharacterLanguage.java
|
<reponame>TearsOfKhyber/RPKit
/*
* Copyright 2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file is generated by jOOQ.
*/
package com.rpkit.languages.bukkit.database.jooq.rpkit.tables;
import com.rpkit.languages.bukkit.database.jooq.rpkit.Rpkit;
import com.rpkit.languages.bukkit.database.jooq.rpkit.tables.records.RpkitCharacterLanguageRecord;
import org.jooq.Field;
import org.jooq.ForeignKey;
import org.jooq.Name;
import org.jooq.Record;
import org.jooq.Row4;
import org.jooq.Schema;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.TableOptions;
import org.jooq.impl.DSL;
import org.jooq.impl.TableImpl;
/**
* This class is generated by jOOQ.
*/
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class RpkitCharacterLanguage extends TableImpl<RpkitCharacterLanguageRecord> {
private static final long serialVersionUID = -1241393567;
/**
* The reference instance of <code>rpkit.rpkit_character_language</code>
*/
public static final RpkitCharacterLanguage RPKIT_CHARACTER_LANGUAGE = new RpkitCharacterLanguage();
/**
* The class holding records for this type
*/
@Override
public Class<RpkitCharacterLanguageRecord> getRecordType() {
return RpkitCharacterLanguageRecord.class;
}
/**
* The column <code>rpkit.rpkit_character_language.id</code>.
*/
public final TableField<RpkitCharacterLanguageRecord, Integer> ID = createField(DSL.name("id"), org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, "");
/**
* The column <code>rpkit.rpkit_character_language.character_id</code>.
*/
public final TableField<RpkitCharacterLanguageRecord, Integer> CHARACTER_ID = createField(DSL.name("character_id"), org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, "");
/**
* The column <code>rpkit.rpkit_character_language.language_name</code>.
*/
public final TableField<RpkitCharacterLanguageRecord, String> LANGUAGE_NAME = createField(DSL.name("language_name"), org.jooq.impl.SQLDataType.VARCHAR(256).nullable(false), this, "");
/**
* The column <code>rpkit.rpkit_character_language.understanding</code>.
*/
public final TableField<RpkitCharacterLanguageRecord, Double> UNDERSTANDING = createField(DSL.name("understanding"), org.jooq.impl.SQLDataType.DOUBLE.nullable(false), this, "");
/**
* Create a <code>rpkit.rpkit_character_language</code> table reference
*/
public RpkitCharacterLanguage() {
this(DSL.name("rpkit_character_language"), null);
}
/**
* Create an aliased <code>rpkit.rpkit_character_language</code> table reference
*/
public RpkitCharacterLanguage(String alias) {
this(DSL.name(alias), RPKIT_CHARACTER_LANGUAGE);
}
/**
* Create an aliased <code>rpkit.rpkit_character_language</code> table reference
*/
public RpkitCharacterLanguage(Name alias) {
this(alias, RPKIT_CHARACTER_LANGUAGE);
}
private RpkitCharacterLanguage(Name alias, Table<RpkitCharacterLanguageRecord> aliased) {
this(alias, aliased, null);
}
private RpkitCharacterLanguage(Name alias, Table<RpkitCharacterLanguageRecord> aliased, Field<?>[] parameters) {
super(alias, null, aliased, parameters, DSL.comment(""), TableOptions.table());
}
public <O extends Record> RpkitCharacterLanguage(Table<O> child, ForeignKey<O, RpkitCharacterLanguageRecord> key) {
super(child, key, RPKIT_CHARACTER_LANGUAGE);
}
@Override
public Schema getSchema() {
return Rpkit.RPKIT;
}
@Override
public RpkitCharacterLanguage as(String alias) {
return new RpkitCharacterLanguage(DSL.name(alias), this);
}
@Override
public RpkitCharacterLanguage as(Name alias) {
return new RpkitCharacterLanguage(alias, this);
}
/**
* Rename this table
*/
@Override
public RpkitCharacterLanguage rename(String name) {
return new RpkitCharacterLanguage(DSL.name(name), null);
}
/**
* Rename this table
*/
@Override
public RpkitCharacterLanguage rename(Name name) {
return new RpkitCharacterLanguage(name, null);
}
// -------------------------------------------------------------------------
// Row4 type methods
// -------------------------------------------------------------------------
@Override
public Row4<Integer, Integer, String, Double> fieldsRow() {
return (Row4) super.fieldsRow();
}
}
|
SpontaneousCMS/spontaneous
|
lib/spontaneous/asset/app_compiler.rb
|
<filename>lib/spontaneous/asset/app_compiler.rb
require 'sprockets'
require 'uglifier'
require 'sass'
module Spontaneous::Asset
# Takes assets from a source directory & compiles them to some destination directory.
# This is deliberatly dumb about the path to the gem and destination
class AppCompiler
attr_reader :environment, :manifest
def initialize(gem_path, dest_path, options = {})
@options = {:compress => true}.merge(options)
@gem_path = gem_path
@dest_path = dest_path
@environment = Sprockets::Environment.new(gem_path / "application" )
@manifest = Sprockets::Manifest.new(@environment, @dest_path / "public/@spontaneous/assets")
@environment.append_path(gem_path / "application/js")
@environment.append_path(gem_path / "application/css")
if @options[:compress]
@environment.register_bundle_processor "application/javascript", :uglifier do |context, data|
Uglifier.compile(data)
end
@environment.register_bundle_processor "text/css", :sass_compressor do |context, css|
# By this point the SCSS has already been compiled, so SASS is merely a CSS compressor
# and I can ignore crap around loadpaths or filenames.
engine = ::Sass::Engine.new(css, :style => :compressed, :syntax => :scss, :quiet => true, :custom => { :resolver => self })
engine.render
end
end
end
def compile
@manifest.compile("spontaneous.js", "login.js", "require.js", "vendor/jquery.js", "spontaneous.css")
end
def image_path(path)
path
end
end
end
|
grovyle/schema-gen-greendao
|
gen/com/company/professor/PalParkDao.java
|
<gh_stars>0
package com.company.professor;
import java.util.List;
import java.util.ArrayList;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteStatement;
import de.greenrobot.dao.AbstractDao;
import de.greenrobot.dao.Property;
import de.greenrobot.dao.internal.SqlUtils;
import de.greenrobot.dao.internal.DaoConfig;
import com.company.professor.PalPark;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* DAO for table "pal_park".
*/
public class PalParkDao extends AbstractDao<PalPark, Long> {
public static final String TABLENAME = "pal_park";
/**
* Properties of entity PalPark.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static class Properties {
public final static Property SpeciesId = new Property(0, long.class, "SpeciesId", true, "species_id");
public final static Property AreaId = new Property(1, long.class, "AreaId", false, "area_id");
public final static Property BaseScore = new Property(2, long.class, "BaseScore", false, "base_score");
public final static Property Rate = new Property(3, long.class, "Rate", false, "rate");
};
private DaoSession daoSession;
public PalParkDao(DaoConfig config) {
super(config);
}
public PalParkDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
this.daoSession = daoSession;
}
/** Creates the underlying database table. */
public static void createTable(SQLiteDatabase db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"pal_park\" (" + //
"\"species_id\" INTEGER PRIMARY KEY NOT NULL ," + // 0: SpeciesId
"\"area_id\" INTEGER NOT NULL ," + // 1: AreaId
"\"base_score\" INTEGER NOT NULL ," + // 2: BaseScore
"\"rate\" INTEGER NOT NULL );"); // 3: Rate
}
/** Drops the underlying database table. */
public static void dropTable(SQLiteDatabase db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"pal_park\"";
db.execSQL(sql);
}
/** @inheritdoc */
@Override
protected void bindValues(SQLiteStatement stmt, PalPark entity) {
stmt.clearBindings();
stmt.bindLong(1, entity.getSpeciesId());
stmt.bindLong(2, entity.getAreaId());
stmt.bindLong(3, entity.getBaseScore());
stmt.bindLong(4, entity.getRate());
}
@Override
protected void attachEntity(PalPark entity) {
super.attachEntity(entity);
entity.__setDaoSession(daoSession);
}
/** @inheritdoc */
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.getLong(offset + 0);
}
/** @inheritdoc */
@Override
public PalPark readEntity(Cursor cursor, int offset) {
PalPark entity = new PalPark( //
cursor.getLong(offset + 0), // SpeciesId
cursor.getLong(offset + 1), // AreaId
cursor.getLong(offset + 2), // BaseScore
cursor.getLong(offset + 3) // Rate
);
return entity;
}
/** @inheritdoc */
@Override
public void readEntity(Cursor cursor, PalPark entity, int offset) {
entity.setSpeciesId(cursor.getLong(offset + 0));
entity.setAreaId(cursor.getLong(offset + 1));
entity.setBaseScore(cursor.getLong(offset + 2));
entity.setRate(cursor.getLong(offset + 3));
}
/** @inheritdoc */
@Override
protected Long updateKeyAfterInsert(PalPark entity, long rowId) {
entity.setSpeciesId(rowId);
return rowId;
}
/** @inheritdoc */
@Override
public Long getKey(PalPark entity) {
if(entity != null) {
return entity.getSpeciesId();
} else {
return null;
}
}
/** @inheritdoc */
@Override
protected boolean isEntityUpdateable() {
return true;
}
private String selectDeep;
protected String getSelectDeep() {
if (selectDeep == null) {
StringBuilder builder = new StringBuilder("SELECT ");
SqlUtils.appendColumns(builder, "T", getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T0", daoSession.getPokemonSpeciesDao().getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T1", daoSession.getPalParkAreasDao().getAllColumns());
builder.append(" FROM pal_park T");
builder.append(" LEFT JOIN pokemon_species T0 ON T.\"species_id\"=T0.\"id\"");
builder.append(" LEFT JOIN pal_park_areas T1 ON T.\"area_id\"=T1.\"id\"");
builder.append(' ');
selectDeep = builder.toString();
}
return selectDeep;
}
protected PalPark loadCurrentDeep(Cursor cursor, boolean lock) {
PalPark entity = loadCurrent(cursor, 0, lock);
int offset = getAllColumns().length;
PokemonSpecies PokemonSpecies = loadCurrentOther(daoSession.getPokemonSpeciesDao(), cursor, offset);
if(PokemonSpecies != null) {
entity.setPokemonSpecies(PokemonSpecies);
}
offset += daoSession.getPokemonSpeciesDao().getAllColumns().length;
PalParkAreas PalParkAreas = loadCurrentOther(daoSession.getPalParkAreasDao(), cursor, offset);
if(PalParkAreas != null) {
entity.setPalParkAreas(PalParkAreas);
}
return entity;
}
public PalPark loadDeep(Long key) {
assertSinglePk();
if (key == null) {
return null;
}
StringBuilder builder = new StringBuilder(getSelectDeep());
builder.append("WHERE ");
SqlUtils.appendColumnsEqValue(builder, "T", getPkColumns());
String sql = builder.toString();
String[] keyArray = new String[] { key.toString() };
Cursor cursor = db.rawQuery(sql, keyArray);
try {
boolean available = cursor.moveToFirst();
if (!available) {
return null;
} else if (!cursor.isLast()) {
throw new IllegalStateException("Expected unique result, but count was " + cursor.getCount());
}
return loadCurrentDeep(cursor, true);
} finally {
cursor.close();
}
}
/** Reads all available rows from the given cursor and returns a list of new ImageTO objects. */
public List<PalPark> loadAllDeepFromCursor(Cursor cursor) {
int count = cursor.getCount();
List<PalPark> list = new ArrayList<PalPark>(count);
if (cursor.moveToFirst()) {
if (identityScope != null) {
identityScope.lock();
identityScope.reserveRoom(count);
}
try {
do {
list.add(loadCurrentDeep(cursor, false));
} while (cursor.moveToNext());
} finally {
if (identityScope != null) {
identityScope.unlock();
}
}
}
return list;
}
protected List<PalPark> loadDeepAllAndCloseCursor(Cursor cursor) {
try {
return loadAllDeepFromCursor(cursor);
} finally {
cursor.close();
}
}
/** A raw-style query where you can pass any WHERE clause and arguments. */
public List<PalPark> queryDeep(String where, String... selectionArg) {
Cursor cursor = db.rawQuery(getSelectDeep() + where, selectionArg);
return loadDeepAllAndCloseCursor(cursor);
}
}
|
RogerioY/starshatter-open
|
Doc/doxygen/html/_mod_info_dlg_8cpp.js
|
var _mod_info_dlg_8cpp =
[
[ "DEF_MAP_CLIENT", "_mod_info_dlg_8cpp.html#ab4e0f01db84a4f79d3ae7f829d36add5", null ]
];
|
Y-D-Lu/rr_frameworks_base
|
packages/SystemUI/src/com/android/systemui/qs/tiles/UserDetailItemView.java
|
<filename>packages/SystemUI/src/com/android/systemui/qs/tiles/UserDetailItemView.java
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.systemui.qs.tiles;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.android.internal.util.ArrayUtils;
import com.android.settingslib.drawable.UserIconDrawable;
import com.android.systemui.FontSizeUtils;
import com.android.systemui.R;
import com.android.systemui.statusbar.phone.UserAvatarView;
/**
* Displays one user in the {@link UserDetailView} view.
*/
public class UserDetailItemView extends LinearLayout {
protected static int layoutResId = R.layout.qs_user_detail_item;
private UserAvatarView mAvatar;
private TextView mName;
private Typeface mRegularTypeface;
private Typeface mActivatedTypeface;
private View mRestrictedPadlock;
public UserDetailItemView(Context context) {
this(context, null);
}
public UserDetailItemView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public UserDetailItemView(Context context, AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public UserDetailItemView(Context context, AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
final TypedArray a = context.obtainStyledAttributes(
attrs, R.styleable.UserDetailItemView, defStyleAttr, defStyleRes);
final int N = a.getIndexCount();
for (int i = 0; i < N; i++) {
int attr = a.getIndex(i);
switch (attr) {
case R.styleable.UserDetailItemView_regularFontFamily:
mRegularTypeface = Typeface.create(a.getString(attr), 0 /* style */);
break;
case R.styleable.UserDetailItemView_activatedFontFamily:
mActivatedTypeface = Typeface.create(a.getString(attr), 0 /* style */);
break;
}
}
a.recycle();
}
public static UserDetailItemView convertOrInflate(Context context, View convertView,
ViewGroup root) {
if (!(convertView instanceof UserDetailItemView)) {
convertView = LayoutInflater.from(context).inflate(
layoutResId, root, false);
}
return (UserDetailItemView) convertView;
}
public void bind(String name, Bitmap picture, int userId) {
mName.setText(name);
mAvatar.setAvatarWithBadge(picture, userId);
}
public void bind(String name, Drawable picture, int userId) {
mName.setText(name);
mAvatar.setDrawableWithBadge(picture, userId);
}
public void setAvatarEnabled(boolean enabled) {
mAvatar.setEnabled(enabled);
}
public void setDisabledByAdmin(boolean disabled) {
mRestrictedPadlock.setVisibility(disabled ? View.VISIBLE : View.GONE);
mName.setEnabled(!disabled);
mAvatar.setEnabled(!disabled);
}
public void setEnabled(boolean enabled) {
mName.setEnabled(enabled);
mAvatar.setEnabled(enabled);
}
@Override
protected void onFinishInflate() {
mAvatar = findViewById(R.id.user_picture);
mName = findViewById(R.id.user_name);
if (mRegularTypeface == null) {
mRegularTypeface = mName.getTypeface();
}
if (mActivatedTypeface == null) {
mActivatedTypeface = mName.getTypeface();
}
updateTypeface();
mRestrictedPadlock = findViewById(R.id.restricted_padlock);
}
@Override
protected void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
FontSizeUtils.updateFontSize(mName, R.dimen.qs_detail_item_secondary_text_size);
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
updateTypeface();
}
private void updateTypeface() {
boolean activated = ArrayUtils.contains(getDrawableState(), android.R.attr.state_activated);
mName.setTypeface(activated ? mActivatedTypeface : mRegularTypeface);
}
@Override
public boolean hasOverlappingRendering() {
return false;
}
}
|
egch/coding-challenges
|
src/main/java/org/enricogiurin/codingchallenges/leetcode/challenge022021/SearchA2DMatrixII.java
|
package org.enricogiurin.codingchallenges.leetcode.challenge022021;
import java.util.Arrays;
/*
Write an efficient algorithm that searches for a target value in an m x n integer matrix. The matrix has the following properties:
Integers in each row are sorted in ascending from left to right.
Integers in each column are sorted in ascending from top to bottom.
Example 1:
Input: matrix = [[1,4,7,11,15],[2,5,8,12,19],[3,6,9,16,22],[10,13,14,17,24],[18,21,23,26,30]], target = 5
Output: true
Example 2:
Input: matrix = [[1,4,7,11,15],[2,5,8,12,19],[3,6,9,16,22],[10,13,14,17,24],[18,21,23,26,30]], target = 20
Output: false
Constraints:
m == matrix.length
n == matrix[i].length
1 <= n, m <= 300
-109 <= matix[i][j] <= 109
All the integers in each row are sorted in ascending order.
All the integers in each column are sorted in ascending order.
-109 <= target <= 109
*/
public class SearchA2DMatrixII {
//my first solution - use only sorting by row
//O(n 8 logN)
public boolean searchMatrixMine(int[][] matrix, int target) {
for (int j = 0; j < matrix.length; j++) {
if (Arrays.binarySearch(matrix[j], target) >= 0) {
return true;
}
}
return false;
}
//solution (4) search by row/col
public boolean searchMatrix(int[][] matrix, int target) {
// start our "pointer" in the bottom-left
int row = matrix.length - 1;
int col = 0;
while (row >= 0 && col < matrix[0].length) {
if (matrix[row][col] > target) {
row--;
} else if (matrix[row][col] < target) {
col++;
} else { // found it
return true;
}
}
return false;
}
public static void main(String[] args) {
int[][] matrix = {{1, 2, 3}, {5, 8, 11}, {7, 9, 15}};
int target = 8;
boolean result = new SearchA2DMatrixII().searchMatrix(matrix, target);
System.out.println(result);
}
}
|
YC-S/LeetCode
|
src/all_problems/P1754_LargestMergeOfTwoStrings.java
|
<gh_stars>1-10
package all_problems;
public class P1754_LargestMergeOfTwoStrings {
public String largestMerge(String word1, String word2) {
if (word1.length() == 0 || word2.length() == 0) return word1 + word2;
if (word1.compareTo(word2) > 0)
return word1.charAt(0) + largestMerge(word1.substring(1), word2);
return word2.charAt(0) + largestMerge(word1, word2.substring(1));
}
}
|
denisbakhtin/projectmanager
|
controllers/mocks/task_logs_db_mock.go
|
package mocks
import (
"fmt"
"github.com/denisbakhtin/projectmanager/models"
)
//TaskLogsDBMock is a TaskLogsDB repository mock
type TaskLogsDBMock struct {
TaskLogs []models.TaskLog
}
//Create inserts new record in db
func (r *TaskLogsDBMock) Create(userID uint64, taskLog models.TaskLog) (models.TaskLog, error) {
taskLog.UserID = userID
taskLog.SessionID = 0
taskLog.ID = 111
r.TaskLogs = append(r.TaskLogs, taskLog)
return taskLog, nil
}
//Update updates a taskLog in db
func (r *TaskLogsDBMock) Update(userID uint64, taskLog models.TaskLog) (models.TaskLog, error) {
for i := range r.TaskLogs {
if r.TaskLogs[i].ID == taskLog.ID {
taskLog.UserID = userID
taskLog.SessionID = 0
r.TaskLogs[i] = taskLog
return r.TaskLogs[i], nil
}
}
return models.TaskLog{}, fmt.Errorf("Task log not found")
}
//Latest returns a fixed number of latest task logs
func (r *TaskLogsDBMock) Latest(userID uint64) ([]models.TaskLog, error) {
if len(r.TaskLogs) <= 5 {
return r.TaskLogs[0:len(r.TaskLogs)], nil
}
return r.TaskLogs[0:5], nil
}
|
jingcao80/Elastos
|
Sources/Elastos/Frameworks/Droid/Base/Packages/SystemUI/src/elastos/droid/systemui/qs/tiles/CellularTile.cpp
|
//=========================================================================
// Copyright (C) 2012 The Elastos Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//=========================================================================
#include "elastos/droid/systemui/qs/tiles/CellularTile.h"
#include "elastos/droid/systemui/qs/SignalTileView.h"
#include "../R.h"
#include "Elastos.Droid.View.h"
#include <elastos/droid/view/LayoutInflater.h>
using Elastos::Droid::Content::CComponentName;
using Elastos::Droid::Content::CIntent;
using Elastos::Droid::Content::IComponentName;
using Elastos::Droid::SystemUI::StatusBar::Policy::EIID_INetworkSignalChangedCallback;
using Elastos::Droid::SystemUI::StatusBar::Policy::INetworkControllerDataUsageInfo;
using Elastos::Droid::View::ILayoutInflater;
using Elastos::Droid::View::LayoutInflater;
using Elastos::Core::CBoolean;
using Elastos::Core::CString;
using Elastos::Core::ICharSequence;
namespace Elastos {
namespace Droid {
namespace SystemUI {
namespace Qs {
namespace Tiles {
static AutoPtr<IIntent> InitCELLULAR_SETTINGS()
{
AutoPtr<IIntent> intent;
CIntent::New((IIntent**)&intent);
AutoPtr<IComponentName> cn;
CComponentName::New(
String("Elastos.Droid.Settings"),
String("Elastos.Droid.Settings.CSettingsDataUsageSummaryActivity"), (IComponentName**)&cn);
intent->SetComponent(cn);
return intent;
}
AutoPtr<IIntent> CellularTile::CELLULAR_SETTINGS = InitCELLULAR_SETTINGS();
CellularTile::CallbackInfo::CallbackInfo()
: mEnabled(FALSE)
, mWifiEnabled(FALSE)
, mWifiConnected(FALSE)
, mAirplaneModeEnabled(FALSE)
, mMobileSignalIconId(0)
, mDataTypeIconId(0)
, mActivityIn(FALSE)
, mActivityOut(FALSE)
, mNoSim(FALSE)
, mIsDataTypeIconWide(FALSE)
{}
CAR_INTERFACE_IMPL(CellularTile::NetworkSignalChangedCallback, Object, INetworkSignalChangedCallback)
CellularTile::NetworkSignalChangedCallback::NetworkSignalChangedCallback(
/* [in] */ CellularTile* host)
: mHost(host)
, mWifiEnabled(FALSE)
, mWifiConnected(FALSE)
, mAirplaneModeEnabled(FALSE)
{}
ECode CellularTile::NetworkSignalChangedCallback::OnWifiSignalChanged(
/* [in] */ Boolean enabled,
/* [in] */ Boolean connected,
/* [in] */ Int32 wifiSignalIconId,
/* [in] */ Boolean activityIn,
/* [in] */ Boolean activityOut,
/* [in] */ const String& wifiSignalContentDescriptionId,
/* [in] */ const String& description)
{
mWifiEnabled = enabled;
mWifiConnected = connected;
return NOERROR;
}
ECode CellularTile::NetworkSignalChangedCallback::OnMobileDataSignalChanged(
/* [in] */ Boolean enabled,
/* [in] */ Int32 mobileSignalIconId,
/* [in] */ const String& mobileSignalContentDescriptionId,
/* [in] */ Int32 dataTypeIconId,
/* [in] */ Boolean activityIn,
/* [in] */ Boolean activityOut,
/* [in] */ const String& dataTypeContentDescriptionId,
/* [in] */ const String& description,
/* [in] */ Boolean noSim,
/* [in] */ Boolean isDataTypeIconWide)
{
AutoPtr<CallbackInfo> info = new CallbackInfo(); // TODO pool?
info->mEnabled = enabled;
info->mWifiEnabled = mWifiEnabled;
info->mWifiConnected = mWifiConnected;
info->mAirplaneModeEnabled = mAirplaneModeEnabled;
info->mMobileSignalIconId = mobileSignalIconId;
info->mSignalContentDescription = mobileSignalContentDescriptionId;
info->mDataTypeIconId = dataTypeIconId;
info->mDataContentDescription = dataTypeContentDescriptionId;
info->mActivityIn = activityIn;
info->mActivityOut = activityOut;
info->mEnabledDesc = description;
info->mNoSim = noSim;
info->mIsDataTypeIconWide = isDataTypeIconWide;
mHost->RefreshState(info->Probe(EIID_IInterface));
return NOERROR;
}
ECode CellularTile::NetworkSignalChangedCallback::OnAirplaneModeChanged(
/* [in] */ Boolean enabled)
{
mAirplaneModeEnabled = enabled;
return NOERROR;
}
ECode CellularTile::NetworkSignalChangedCallback::OnMobileDataEnabled(
/* [in] */ Boolean enabled)
{
mHost->mDetailAdapter->SetMobileDataEnabled(enabled);
return NOERROR;
}
CAR_INTERFACE_IMPL(CellularTile::CellularDetailAdapter, Object, IQSTileDetailAdapter)
CellularTile::CellularDetailAdapter::CellularDetailAdapter(
/* [in] */ CellularTile* host)
: mHost(host)
{}
ECode CellularTile::CellularDetailAdapter::GetTitle(
/* [out] */ Int32* result)
{
VALIDATE_NOT_NULL(result);
*result = R::string::quick_settings_cellular_detail_title;
return NOERROR;
}
ECode CellularTile::CellularDetailAdapter::GetToggleState(
/* [out] */ IBoolean** result)
{
VALIDATE_NOT_NULL(result);
Boolean tmp = FALSE;
mHost->mController->IsMobileDataSupported(&tmp);
if (tmp) {
mHost->mController->IsMobileDataEnabled(&tmp);
return CBoolean::New(tmp, result);
}
*result = NULL;
return NOERROR;
}
ECode CellularTile::CellularDetailAdapter::GetSettingsIntent(
/* [out] */ IIntent** result)
{
VALIDATE_NOT_NULL(result);
*result = CELLULAR_SETTINGS;
REFCOUNT_ADD(*result);
return NOERROR;
}
ECode CellularTile::CellularDetailAdapter::SetToggleState(
/* [in] */ IBoolean* state)
{
Boolean v = FALSE;
state->GetValue(&v);
mHost->mController->SetMobileDataEnabled(v);
return NOERROR;
}
ECode CellularTile::CellularDetailAdapter::CreateDetailView(
/* [in] */ IContext* context,
/* [in] */ IView* convertView,
/* [in] */ IViewGroup* parent,
/* [out] */ IView** result)
{
VALIDATE_NOT_NULL(result);
AutoPtr<IDataUsageDetailView> v;
if (convertView != NULL) {
v = IDataUsageDetailView::Probe(convertView);
}
else {
AutoPtr<ILayoutInflater> inflater;
LayoutInflater::From(mHost->mContext, (ILayoutInflater**)&inflater);
AutoPtr<IView> view;
inflater->Inflate(R::layout::data_usage, parent, FALSE, (IView**)&view);
v = IDataUsageDetailView::Probe(view);
}
AutoPtr<INetworkControllerDataUsageInfo> info;
mHost->mController->GetDataUsageInfo((INetworkControllerDataUsageInfo**)&info);
if (info == NULL) {
*result = IView::Probe(v);
REFCOUNT_ADD(*result);
return NOERROR;
}
v->Bind(info);
*result = IView::Probe(v);
REFCOUNT_ADD(*result);
return NOERROR;
}
ECode CellularTile::CellularDetailAdapter::SetMobileDataEnabled(
/* [in] */ Boolean enabled)
{
mHost->FireToggleStateChanged(enabled);
return NOERROR;
}
ECode CellularTile::constructor(
/* [in] */ IQSTileHost* host)
{
QSTile::constructor(host);
mCallback = new NetworkSignalChangedCallback(this);
host->GetNetworkController((INetworkController**)&mController);
mDetailAdapter = new CellularDetailAdapter(this);
return NOERROR;
}
AutoPtr<QSTile::State> CellularTile::NewTileState()
{
return new SignalState();
}
ECode CellularTile::GetDetailAdapter(
/* [out] */ IQSTileDetailAdapter** result)
{
VALIDATE_NOT_NULL(result);
*result = mDetailAdapter;
REFCOUNT_ADD(*result);
return NOERROR;
}
ECode CellularTile::SetListening(
/* [in] */ Boolean listening)
{
if (listening) {
mController->AddNetworkSignalChangedCallback(mCallback);
}
else {
mController->RemoveNetworkSignalChangedCallback(mCallback);
}
return NOERROR;
}
ECode CellularTile::CreateTileView(
/* [in] */ IContext* context,
/* [out] */ IQSTileView** result)
{
VALIDATE_NOT_NULL(result);
*result = new SignalTileView(context);
REFCOUNT_ADD(*result);
return NOERROR;
}
void CellularTile::HandleClick()
{
Boolean tmp = FALSE;
if (mController->IsMobileDataSupported(&tmp), tmp) {
ShowDetail(TRUE);
}
else {
mHost->StartSettingsActivity(CELLULAR_SETTINGS);
}
}
void CellularTile::HandleUpdateState(
/* [in] */ State* state,
/* [in] */ IInterface* arg)
{
mController->HasMobileDataFeature(&state->mVisible);
if (!state->mVisible) return;
CallbackInfo* cb = (CallbackInfo*)IObject::Probe(arg);
if (cb == NULL) return;
AutoPtr<IResources> r;
mContext->GetResources((IResources**)&r);
state->mIconId = cb->mNoSim ? R::drawable::ic_qs_no_sim
: !cb->mEnabled || cb->mAirplaneModeEnabled ? R::drawable::ic_qs_signal_disabled
: cb->mMobileSignalIconId > 0 ? cb->mMobileSignalIconId
: R::drawable::ic_qs_signal_no_signal;
((SignalState*)state)->mIsOverlayIconWide = cb->mIsDataTypeIconWide;
state->mAutoMirrorDrawable = !cb->mNoSim;
((SignalState*)state)->mOverlayIconId = cb->mEnabled && (cb->mDataTypeIconId > 0) && !cb->mWifiConnected
? cb->mDataTypeIconId
: 0;
((SignalState*)state)->mFilter = state->mIconId != R::drawable::ic_qs_no_sim;
((SignalState*)state)->mActivityIn = cb->mEnabled && cb->mActivityIn;
((SignalState*)state)->mActivityOut = cb->mEnabled && cb->mActivityOut;
String str;
state->mLabel = cb->mEnabled
? RemoveTrailingPeriod(cb->mEnabledDesc)
: (r->GetString(R::string::quick_settings_rssi_emergency_only, &str), str);
String signalContentDesc = cb->mEnabled && (cb->mMobileSignalIconId > 0)
? cb->mSignalContentDescription
: (r->GetString(R::string::accessibility_no_signal, &str), str);
String dataContentDesc = cb->mEnabled && (cb->mDataTypeIconId > 0) && !cb->mWifiEnabled
? cb->mDataContentDescription
: (r->GetString(R::string::accessibility_no_data, &str), str);
AutoPtr<ArrayOf<IInterface*> > objs = ArrayOf<IInterface*>::Alloc(3);
AutoPtr<ICharSequence> obj;
CString::New(signalContentDesc, (ICharSequence**)&obj);
objs->Set(0, obj);
obj = NULL;
CString::New(dataContentDesc, (ICharSequence**)&obj);
objs->Set(1, obj);
obj = NULL;
CString::New(state->mLabel, (ICharSequence**)&obj);
objs->Set(2, obj);
r->GetString(R::string::accessibility_quick_settings_mobile, objs, &state->mContentDescription);
}
// Remove the period from the network name
String CellularTile::RemoveTrailingPeriod(
/* [in] */ const String& string)
{
if (string == NULL) return String(NULL);
const Int32 length = string.GetLength();
if (string.EndWith(".")) {
return string.Substring(0, length - 1);
}
return string;
}
String CellularTile::GetSimpleName()
{
return String("CellularTile");
}
} // namespace Tiles
} // namespace Qs
} // namespace SystemUI
} // namespace Droid
} // namespace Elastos
|
xichen2020/eventdb
|
values/decoding/default_filtered_fs_value_iterator.gen.go
|
// This file was automatically generated by genny.
// Any changes will be lost if this file is regenerated.
// see https://github.com/mauricelam/genny
package decoding
import (
"fmt"
"github.com/xichen2020/eventdb/document/field"
"github.com/xichen2020/eventdb/filter"
"github.com/xichen2020/eventdb/values/iterator"
iterimpl "github.com/xichen2020/eventdb/values/iterator/impl"
)
// defaultFilteredFsBasedBoolValueIterator creates a default bool value iterator.
func defaultFilteredFsBasedBoolValueIterator(
values *fsBasedBoolValues,
op filter.Op,
filterValue *field.ValueUnion,
) (iterator.PositionIterator, error) {
flt, err := op.BoolFilter(filterValue)
if err != nil {
return nil, fmt.Errorf("invalid bool filter op %v with filter value %v", op, filterValue)
}
valuesIt, err := values.Iter()
if err != nil {
return nil, err
}
return iterimpl.NewFilteredBoolIterator(valuesIt, flt), nil
}
// defaultFilteredFsBasedIntValueIterator creates a default int value iterator.
func defaultFilteredFsBasedIntValueIterator(
values *fsBasedIntValues,
op filter.Op,
filterValue *field.ValueUnion,
) (iterator.PositionIterator, error) {
flt, err := op.IntFilter(filterValue)
if err != nil {
return nil, fmt.Errorf("invalid int filter op %v with filter value %v", op, filterValue)
}
valuesIt, err := values.Iter()
if err != nil {
return nil, err
}
return iterimpl.NewFilteredIntIterator(valuesIt, flt), nil
}
// defaultFilteredFsBasedDoubleValueIterator creates a default double value iterator.
func defaultFilteredFsBasedDoubleValueIterator(
values *fsBasedDoubleValues,
op filter.Op,
filterValue *field.ValueUnion,
) (iterator.PositionIterator, error) {
flt, err := op.DoubleFilter(filterValue)
if err != nil {
return nil, fmt.Errorf("invalid double filter op %v with filter value %v", op, filterValue)
}
valuesIt, err := values.Iter()
if err != nil {
return nil, err
}
return iterimpl.NewFilteredDoubleIterator(valuesIt, flt), nil
}
// defaultFilteredFsBasedBytesValueIterator creates a default bytes value iterator.
func defaultFilteredFsBasedBytesValueIterator(
values *fsBasedBytesValues,
op filter.Op,
filterValue *field.ValueUnion,
) (iterator.PositionIterator, error) {
flt, err := op.BytesFilter(filterValue)
if err != nil {
return nil, fmt.Errorf("invalid bytes filter op %v with filter value %v", op, filterValue)
}
valuesIt, err := values.Iter()
if err != nil {
return nil, err
}
return iterimpl.NewFilteredBytesIterator(valuesIt, flt), nil
}
// defaultFilteredFsBasedTimeValueIterator creates a default time value iterator.
func defaultFilteredFsBasedTimeValueIterator(
values *fsBasedTimeValues,
op filter.Op,
filterValue *field.ValueUnion,
) (iterator.PositionIterator, error) {
flt, err := op.TimeFilter(filterValue)
if err != nil {
return nil, fmt.Errorf("invalid time filter op %v with filter value %v", op, filterValue)
}
valuesIt, err := values.Iter()
if err != nil {
return nil, err
}
return iterimpl.NewFilteredTimeIterator(valuesIt, flt), nil
}
|
d503int/sdl_ios
|
SmartDeviceLink/SDLAlertManeuverResponse.h
|
// SDLAlertManeuverResponse.h
//
#import "SDLRPCResponse.h"
/** SDLAlertManeuverResponse is sent, when SDLAlertManeuver has been called.
* @since SmartDeviceLink 1.0
*/
NS_ASSUME_NONNULL_BEGIN
@interface SDLAlertManeuverResponse : SDLRPCResponse
@end
NS_ASSUME_NONNULL_END
|
partials-dev/pulse
|
scripts/src/components/app.js
|
<gh_stars>0
import React from 'react'
import MetronomePage from './work/metronome-page'
import SearchPage from './work/search'
import { SET_SPIRIT_ANIMAL_SRC } from '../reducers/actions'
import gifCache from '../gif-cache'
import path from 'path'
import { connect } from 'react-redux'
function mapStateToProps ({ search }) {
return {
showSearch: search.show
}
}
const preventDefault = e => {
if (e.preventDefault) e.preventDefault()
return false
}
function mapDispatchToProps (dispatch) {
const dragAndDrop = {
onDrop: e => {
e.preventDefault()
const src = e.dataTransfer.files[0].path
gifCache.set({
id: path.parse(src).name,
url: src
})
dispatch({ type: SET_SPIRIT_ANIMAL_SRC, src })
return false
},
onDragOver: preventDefault,
onDragLeave: preventDefault,
onDragEnd: preventDefault
}
return { dragAndDrop }
}
function App ({ showSearch, dragAndDrop }) {
const showIfSearching = showSearch ? null : { display: 'none' }
const showUnlessSearching = showSearch ? { display: 'none' } : null
return <div id='app-wrapper' {...dragAndDrop}>
<SearchPage id='search-page' style={showIfSearching} />
<MetronomePage id='metronome' style={showUnlessSearching} />
</div>
}
export default connect(mapStateToProps, mapDispatchToProps)(App)
|
mjrgh/mbed
|
libraries/mbed/targets/hal/TARGET_Freescale/TARGET_K20D50M/us_ticker.c
|
/* mbed Microcontroller Library
* Copyright (c) 2006-2013 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <stddef.h>
#include "us_ticker_api.h"
#include "PeripheralNames.h"
#include "clk_freqs.h"
#define PIT_TIMER PIT->CHANNEL[0]
#define PIT_TIMER_IRQ PIT0_IRQn
#define PIT_TICKER PIT->CHANNEL[1]
#define PIT_TICKER_IRQ PIT1_IRQn
static void timer_init(void);
static void ticker_init(void);
static int us_ticker_inited = 0;
static uint32_t clk_mhz;
void us_ticker_init(void) {
if (us_ticker_inited)
return;
us_ticker_inited = 1;
SIM->SCGC6 |= SIM_SCGC6_PIT_MASK; // Clock PIT
PIT->MCR = 0; // Enable PIT
clk_mhz = bus_frequency() / 1000000;
timer_init();
ticker_init();
}
/******************************************************************************
* Timer for us timing.
*
* The K20D5M does not have a prescaler on its PIT timer nor the option
* to chain timers, which is why a software timer is required to get 32-bit
* word length.
******************************************************************************/
static volatile uint32_t msb_counter = 0;
static uint32_t timer_ldval = 0;
static void timer_isr(void) {
msb_counter++;
PIT_TIMER.TFLG = 1;
}
static void timer_init(void) {
//CLZ counts the leading zeros, returning number of bits not used by clk_mhz
timer_ldval = clk_mhz << __CLZ(clk_mhz);
PIT_TIMER.LDVAL = timer_ldval; // 1us
PIT_TIMER.TCTRL |= PIT_TCTRL_TIE_MASK;
PIT_TIMER.TCTRL |= PIT_TCTRL_TEN_MASK; // Start timer 0
NVIC_SetVector(PIT_TIMER_IRQ, (uint32_t)timer_isr);
NVIC_EnableIRQ(PIT_TIMER_IRQ);
}
uint32_t us_ticker_read() {
if (!us_ticker_inited)
us_ticker_init();
uint32_t retval;
__disable_irq();
retval = (timer_ldval - PIT_TIMER.CVAL) / clk_mhz; //Hardware bits
retval |= msb_counter << __CLZ(clk_mhz); //Software bits
if (PIT_TIMER.TFLG == 1) { //If overflow bit is set, force it to be handled
timer_isr(); //Handle IRQ, read again to make sure software/hardware bits are synced
NVIC_ClearPendingIRQ(PIT_TIMER_IRQ);
return us_ticker_read();
}
__enable_irq();
return retval;
}
/******************************************************************************
* Timer Event
*
* It schedules interrupts at given (32bit)us interval of time.
* It is implemented using PIT channel 1, since no prescaler is available,
* some bits are implemented in software.
******************************************************************************/
static void ticker_isr(void);
static void ticker_init(void) {
/* Set interrupt handler */
NVIC_SetVector(PIT_TICKER_IRQ, (uint32_t)ticker_isr);
NVIC_EnableIRQ(PIT_TICKER_IRQ);
}
void us_ticker_disable_interrupt(void) {
PIT_TICKER.TCTRL &= ~PIT_TCTRL_TIE_MASK;
}
void us_ticker_clear_interrupt(void) {
// we already clear interrupt in lptmr_isr
}
static uint32_t us_ticker_int_counter = 0;
inline static void ticker_set(uint32_t count) {
PIT_TICKER.TCTRL = 0;
PIT_TICKER.LDVAL = count;
PIT_TICKER.TCTRL = PIT_TCTRL_TIE_MASK | PIT_TCTRL_TEN_MASK;
}
static void ticker_isr(void) {
// Clear IRQ flag
PIT_TICKER.TFLG = 1;
if (us_ticker_int_counter > 0) {
ticker_set(0xFFFFFFFF);
us_ticker_int_counter--;
} else {
// This function is going to disable the interrupts if there are
// no other events in the queue
us_ticker_irq_handler();
}
}
void us_ticker_set_interrupt(unsigned int timestamp) {
int delta = (int)(timestamp - us_ticker_read());
if (delta <= 0) {
// This event was in the past:
us_ticker_irq_handler();
return;
}
//Calculate how much falls outside the 32-bit after multiplying with clk_mhz
//We shift twice 16-bit to keep everything within the 32-bit variable
us_ticker_int_counter = (uint32_t)(delta >> 16);
us_ticker_int_counter *= clk_mhz;
us_ticker_int_counter >>= 16;
uint32_t us_ticker_int_remainder = (uint32_t)delta * clk_mhz;
if (us_ticker_int_remainder == 0) {
ticker_set(0xFFFFFFFF);
us_ticker_int_counter--;
} else {
ticker_set(us_ticker_int_remainder);
}
}
|
tanbinh123/addax
|
addax/src/main/java/com/github/jitwxs/addax/core/mock/mocker/explicit/FloatMocker.java
|
<gh_stars>1-10
package com.github.jitwxs.addax.core.mock.mocker.explicit;
import com.github.jitwxs.addax.common.bean.MockConfig;
import com.github.jitwxs.addax.core.mock.mocker.IMocker;
import org.apache.commons.lang3.RandomUtils;
/**
* Float对象模拟器
*/
public class FloatMocker implements IMocker<Float> {
@Override
public Float mock(MockConfig mockConfig) {
return RandomUtils.nextFloat(mockConfig.getFloatRange()[0], mockConfig.getFloatRange()[1]);
}
}
|
rdunlop/unicycling-registration
|
spec/controllers/preliminary_external_results_controller_spec.rb
|
require 'spec_helper'
describe PreliminaryExternalResultsController do
before do
@admin_user = FactoryBot.create(:super_admin_user)
sign_in @admin_user
@competition = FactoryBot.create(:competition)
@competitor = FactoryBot.create(:event_competitor, competition: @competition)
end
# This should return the minimal set of attributes required to create a valid
# ExternalResult. As you add validations to ExternalResult, be sure to
# update the return value of this method accordingly.
def valid_attributes
{
competitor_id: @competitor.id,
details: "soomething",
status: "active",
points: 1
}
end
describe "GET index" do
it "shows all external_results" do
external_result = FactoryBot.create(:external_result, :preliminary, competitor: @competitor, details: "My details")
get :index, params: { competition_id: @competition.id }
assert_select "h1", "Data Recording Form - Entry Form (External Results)"
assert_select "td", external_result.details
end
end
describe "POST approve" do
it "redirects" do
# external_result = FactoryBot.create(:external_result, :preliminary, competitor: @competitor, details: "My details")
post :approve, params: { competition_id: @competition.id }
expect(response).to redirect_to(competition_preliminary_external_results_path(@competition))
end
end
describe "GET edit" do
it "assigns the requested external_result as @external_result" do
external_result = FactoryBot.create(:external_result, :preliminary)
get :edit, params: { id: external_result.to_param }
assert_select "h1", "Editing points result"
end
end
describe "POST create" do
describe "with valid params" do
it "creates a new ExternalResult" do
expect do
post :create, params: { external_result: valid_attributes, competition_id: @competition.id }
end.to change(ExternalResult, :count).by(1)
end
it "redirects to the created external_result" do
post :create, params: { external_result: valid_attributes, competition_id: @competition.id }
expect(response).to redirect_to(competition_preliminary_external_results_path(@competition))
end
end
describe "with invalid params" do
it "does not create external_result" do
# Trigger the behavior that occurs when invalid params are submitted
allow_any_instance_of(ExternalResult).to receive(:save).and_return(false)
expect do
post :create, params: { external_result: { competitor_id: "invalid value" }, competition_id: @competition.id }
end.not_to change(ExternalResult, :count)
end
it "re-renders the 'index' template" do
# Trigger the behavior that occurs when invalid params are submitted
allow_any_instance_of(ExternalResult).to receive(:save).and_return(false)
post :create, params: { external_result: { "competitor_id" => "invalid value" }, competition_id: @competition.id }
assert_select "h1", "Data Recording Form - Entry Form (External Results)"
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the external_result" do
external_result = FactoryBot.create(:external_result, :preliminary)
expect do
put :update, params: { id: external_result.to_param, external_result: valid_attributes }
end.to change { external_result.reload.details }
end
it "redirects to the external_result index" do
external_result = FactoryBot.create(:external_result, :preliminary)
put :update, params: { id: external_result.to_param, external_result: valid_attributes }
expect(response).to redirect_to(competition_preliminary_external_results_path(@competition))
end
end
describe "with invalid params" do
it "does not update the external_result" do
external_result = FactoryBot.create(:external_result, :preliminary)
# Trigger the behavior that occurs when invalid params are submitted
allow_any_instance_of(ExternalResult).to receive(:save).and_return(false)
expect do
put :update, params: { id: external_result.to_param, external_result: { competitor_id: "invalid value" } }
end.not_to change { external_result.reload.details }
end
it "re-renders the 'edit' template" do
external_result = FactoryBot.create(:external_result, :preliminary)
# Trigger the behavior that occurs when invalid params are submitted
allow_any_instance_of(ExternalResult).to receive(:save).and_return(false)
put :update, params: { id: external_result.to_param, external_result: { "competitor_id" => "invalid value" } }
assert_select "h1", "Editing points result"
end
end
end
describe "DELETE destroy" do
it "destroys the requested external_result" do
external_result = FactoryBot.create(:external_result, :preliminary)
expect do
delete :destroy, params: { id: external_result.to_param }
end.to change(ExternalResult, :count).by(-1)
end
it "redirects to the external_results list" do
external_result = FactoryBot.create(:external_result, :preliminary, competitor: @competitor)
delete :destroy, params: { id: external_result.to_param }
expect(response).to redirect_to(competition_preliminary_external_results_path(@competition))
end
end
describe "import_csv" do
let(:test_file_name) { fixture_path + '/external_results.csv' }
let(:test_file) { Rack::Test::UploadedFile.new(test_file_name, "text/plain") }
before do
registrant1 = FactoryBot.create(:competitor, bib_number: 101)
registrant2 = FactoryBot.create(:competitor, bib_number: 102)
registrant3 = FactoryBot.create(:competitor, bib_number: 103)
registrant4 = FactoryBot.create(:competitor, bib_number: 104)
comp1 = FactoryBot.create(:event_competitor, competition: @competition)
comp2 = FactoryBot.create(:event_competitor, competition: @competition)
comp3 = FactoryBot.create(:event_competitor, competition: @competition)
comp4 = FactoryBot.create(:event_competitor, competition: @competition)
comp1.members.first.update_attribute(:registrant, registrant1)
comp2.members.first.update_attribute(:registrant, registrant2)
comp3.members.first.update_attribute(:registrant, registrant3)
comp4.members.first.update_attribute(:registrant, registrant4)
end
it "creates entries" do
expect do
post :import_csv, params: { competition_id: @competition.id, file: test_file }
end.to change(ExternalResult, :count).by 4
expect(ExternalResult.preliminary.count).to eq(4)
end
end
end
|
chenkanqin/uplus
|
src/config/index.js
|
const env = require('./env');
const setStorageSyncField = require('./setStorageSync-field');
// 全局配置
const config = {
title: 'QQ:2052021114',
currentService: env.currentService,
serverUrl: env.EVENTHOST,
pages: [],
routes: {},
// 保存到本地的字段
setStorageSyncField: {
...setStorageSyncField
},
// 本地存储时间 天
storageExpired: 365,
// 资源
staticUrl: '',
//不填就不记录, 错误日记 'production' | ['production', 'development']
errorLog: 'development',
// 底部导航参数配置
footerTabConfig: {
max: 5, // 最大值
min: 2, // 最小值
}
}
export default config;
|
mattstyles/ecs-benchmark
|
src/cases/javelin-ecs/packed_1.js
|
import { createComponentType, createWorld, number, query } from "@javelin/ecs";
const A = createComponentType({
type: 0,
schema: {
value: number,
},
});
const B = createComponentType({
type: 1,
schema: {
value: number,
},
});
const C = createComponentType({
type: 2,
schema: {
value: number,
},
});
const D = createComponentType({
type: 3,
schema: {
value: number,
},
});
const E = createComponentType({
type: 4,
schema: {
value: number,
},
});
export default (count) => {
const world = createWorld();
const qa = query(A);
world.addSystem(() => {
for (const [entities, [a]] of qa) {
for (let i = 0; i < entities.length; i++) {
a[i].value *= 2;
}
}
});
for (let i = 0; i < count; i++) {
world.spawn(
world.component(A),
world.component(B),
world.component(C),
world.component(D),
world.component(E)
);
}
return world.tick;
};
|
w99427/eNotes-Android-SDK
|
core/src/main/java/io/enotes/sdk/repository/api/entity/response/btc/chainso/SpendTxForChainSo.java
|
package io.enotes.sdk.repository.api.entity.response.btc.chainso;
import java.util.List;
public class SpendTxForChainSo {
private String status;
private Data data;
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public Data getData() {
return data;
}
public void setData(Data data) {
this.data = data;
}
public class Data {
private String network;
private String address;
private List<Tx> txs;
public String getNetwork() {
return network;
}
public void setNetwork(String network) {
this.network = network;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public List<Tx> getTxs() {
return txs;
}
public void setTxs(List<Tx> txs) {
this.txs = txs;
}
}
public static class Tx {
private String txid;
private String value;
private String time;
public String getTxid() {
return txid;
}
public void setTxid(String txid) {
this.txid = txid;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getTime() {
return time;
}
public void setTime(String time) {
this.time = time;
}
}
}
|
Ignite-Discord/Ignite
|
src/main/java/com/general_hello/commands/RPG/Commands/JourneyCommand.java
|
package com.general_hello.commands.RPG.Commands;
import com.general_hello.commands.RPG.RpgUser.RPGUser;
import com.general_hello.commands.commands.CommandContext;
import com.general_hello.commands.commands.CommandType;
import com.general_hello.commands.commands.ICommand;
import net.dv8tion.jda.api.EmbedBuilder;
import net.dv8tion.jda.api.entities.Emoji;
import net.dv8tion.jda.api.interactions.components.Button;
import net.dv8tion.jda.api.interactions.components.ButtonStyle;
import java.io.IOException;
import java.sql.SQLException;
import java.time.OffsetDateTime;
public class JourneyCommand implements ICommand {
@Override
public void handle(CommandContext ctx) throws InterruptedException, IOException, SQLException {
EmbedBuilder embedBuilder = new EmbedBuilder();
embedBuilder.setTitle(ctx.getAuthor().getName() + "'s Journey");
embedBuilder.setDescription("Welcome " + ctx.getAuthor().getAsMention() + "\n" +
"\n" +
"**RPG** - Role-playing video game, electronic game genre in which players advance through a story quest, and often many side quests, for which their character or party of characters gain experience that improves various attributes and abilities.\n" +
"\n" +
"For the **RPG** of " + ctx.getSelfUser().getAsMention() + ", you will be a soldier of France who got stranded and lost after a fierce battle!\n" +
"Your goal will be to go back to your home country, France. Good luck soldier!\n" +
"\n" +
"**Start your journey** <a:crabby:900170344202113096> - Creates a new data for RPG\n" +
"**End your journey** <:dinosaur:905241832550699099> - Deletes all your data on RPG");
embedBuilder.setFooter("Start your journey now!").setTimestamp(OffsetDateTime.now());
boolean start = true;
boolean end = true;
int shekels = RPGUser.getShekels(ctx.getAuthor().getIdLong());
if (shekels == -1) {
start = false;
} else {
end = false;
}
ctx.getChannel().sendMessageEmbeds(embedBuilder.build()).setActionRow(
Button.of(ButtonStyle.PRIMARY, ctx.getAuthor().getId() + ":start", "Start your journey").withEmoji(Emoji.fromEmote("crabby", 900170344202113096L, true)).withDisabled(start),
Button.of(ButtonStyle.DANGER, ctx.getAuthor().getId() + ":end", "End your journey").withEmoji(Emoji.fromEmote("dinosaur", 905241832550699099L, false)).withDisabled(end)
).queue();
}
@Override
public String getName() {
return "journey";
}
@Override
public String getHelp(String prefix) {
return "Starts or restarts your journey of the land of RPG";
}
@Override
public CommandType getCategory() {
return CommandType.SPECIAL;
}
}
|
Rubentxu/GDX-Logic-Bricks
|
gdx-logic-bricks/src/main/java/com/indignado/logicbricks/components/LightComponent.java
|
<gh_stars>1-10
package com.indignado.logicbricks.components;
import box2dLight.Light;
import com.badlogic.ashley.core.Component;
/**
* @author Rubentxu.
*/
public class LightComponent implements Component {
public Light light;
}
|
rockeet/terark-zip
|
src/terark/zbs/simple_zip_blob_store.cpp
|
#include "blob_store_file_header.hpp"
#include "simple_zip_blob_store.hpp"
#include <terark/io/FileStream.hpp>
#include <terark/io/MemStream.hpp>
#include <terark/io/DataIO.hpp>
#include <terark/util/mmap.hpp>
#include <terark/fsa/nest_louds_trie.hpp>
#include <assert.h>
#include <algorithm>
#if defined(_WIN32) || defined(_WIN64)
#define WIN32_LEAN_AND_MEAN
#define NOMINMAX
#include <Windows.h>
#else
#include <unistd.h> // for usleep
#endif
namespace terark {
SimpleZipBlobStore::SimpleZipBlobStore() {
m_mmapBase = nullptr;
m_unzipSize = 0;
m_lenBits = 0;
m_get_record_append = static_cast<get_record_append_func_t>
(&SimpleZipBlobStore::get_record_append_imp);
// binary compatible:
m_get_record_append_CacheOffsets =
reinterpret_cast<get_record_append_CacheOffsets_func_t>
(m_get_record_append);
}
SimpleZipBlobStore::~SimpleZipBlobStore() {
if (m_isUserMem) {
if (m_isMmapData) {
mmap_close((void*)m_mmapBase, m_mmapBase->fileSize);
}
m_mmapBase = nullptr;
m_isMmapData = false;
m_isUserMem = false;
m_strpool.risk_release_ownership();
m_off_len.risk_release_ownership();
m_records.risk_release_ownership();
}
}
void
SimpleZipBlobStore::get_record_append_imp(size_t recId, valvec<byte_t>* recData)
const {
assert(recId + 1 < m_records.size());
size_t beg = m_records.get(recId + 0);
size_t end = m_records.get(recId + 1);
auto strpool = m_strpool.data();
auto ol_data = m_off_len.data();
auto ol_bits = m_off_len.uintbits();
auto ol_mask = m_off_len.uintmask();
auto ol_minVal = m_off_len.min_val();
auto lenBits = m_lenBits;
auto lenMask = (size_t(1) << m_lenBits) - 1;
for (size_t i = beg; i < end; ++i) {
uint64_t off_len = m_off_len.fast_get(ol_data, ol_bits, ol_mask, ol_minVal, i);
size_t offset = size_t(off_len >> lenBits);
size_t length = size_t(off_len & lenMask);
recData->append(strpool + offset, length);
}
}
void
SimpleZipBlobStore::build_from(SortableStrVec& strVec, const NestLoudsTrieConfig& conf) {
m_unzipSize = strVec.str_size();
valvec<size_t> records(strVec.size(), valvec_reserve());
valvec<SortableStrVec::SEntry> subStrNode;
auto strbase = strVec.m_strpool.data();
records.push_back(0);
size_t maxLen = 0;
size_t minFragLen = std::max(conf.minFragLen, 1);
size_t maxFragLen = conf.maxFragLen;
for (size_t i = 0; i < strVec.size(); ++i) {
fstring row = strVec[i];
for(auto p = row.udata(), end = p + row.size(); p < end; ) {
auto t = std::min(p + maxFragLen, end);
auto q = std::min(p + minFragLen, end);
while (q < t && !conf.bestDelimBits[*q]) ++q;
SortableStrVec::SEntry ent;
ent.seq_id = subStrNode.size();
ent.offset = p - strbase;
ent.length = q - p;
maxLen = std::max(size_t(q - p), maxLen);
subStrNode.push_back(ent);
p = q;
}
records.push_back(subStrNode.size());
}
strVec.build_subkeys(conf.speedupNestTrieBuild, subStrNode);
strVec.compress_strpool(1);
strVec.sort_by_seq_id();
int lenBits = terark_bsr_u32(uint32_t(maxLen)) + 1;
valvec<uint64_t> off_len(strVec.size(), valvec_no_init());
auto ents = strVec.m_index.data();
for (size_t i = 0; i < strVec.size(); ++i) {
off_len[i] = ents[i].offset << lenBits | ents[i].length;
}
m_off_len.build_from(off_len);
m_records.build_from(records);
m_strpool.swap(strVec.m_strpool);
m_lenBits = lenBits;
m_numRecords = records.size() - 1;
}
#pragma pack(push,8)
struct SimpleZipBlobStore::FileHeader : public FileHeaderBase {
uint64_t pading21;
uint64_t off_len_number;
uint64_t off_len_minVal;
uint32_t strpoolsize; // div 16
uint32_t pading22;
uint08_t lenBits;
uint08_t offBits;
uint08_t pad1[2];
uint32_t pad2[3];
};
#pragma pack(pop)
void SimpleZipBlobStore::load_mmap(fstring fpath, const void* mmapBase, size_t mmapSize) {
BOOST_STATIC_ASSERT(sizeof(FileHeader) == 128);
m_mmapBase = (FileHeader*)mmapBase;
auto header = (FileHeader*)mmapBase;
if (fstring(m_mmapBase->magic, MagicStrLen) != MagicString) {
throw std::invalid_argument("magic = "
+ fstring(m_mmapBase->magic, MagicStrLen)
+ " is not a SimpleZipBlobStore");
}
m_lenBits = header->lenBits;
m_unzipSize = header->unzipSize;
m_numRecords = header->records;
size_t realpoolsize = header->strpoolsize * 16;
m_strpool.risk_set_data((byte*)(header + 1) , realpoolsize);
m_records.risk_set_data((byte*)(header + 1) + realpoolsize
, header->records + 1
, My_bsr_size_t(header->off_len_number) + 1 );
m_off_len.risk_set_data((byte*)m_records.data() + m_records.mem_size()
, header->off_len_number
, header->off_len_minVal
, header->offBits + header->lenBits);
}
void SimpleZipBlobStore::save_mmap(function<void(const void*, size_t)> write) const {
FunctionAdaptBuffer adaptBuffer(write);
OutputBuffer buffer(&adaptBuffer);
FileHeader h;
memset(&h, 0, sizeof(h));
h.magic_len = MagicStrLen;
strcpy(h.magic, MagicString);
strcpy(h.className, "SimpleZipBlobStore");
h.unzipSize = m_unzipSize;
h.records = m_records.size() - 1;
h.off_len_minVal = m_off_len.min_val();
h.off_len_number = m_off_len.size();
h.lenBits = (uint08_t)(m_lenBits);
h.offBits = (uint08_t)(m_off_len.uintbits() - m_lenBits);
h.strpoolsize = (m_strpool.size() + 15) / 16;
buffer.ensureWrite(&h, sizeof(h));
buffer.ensureWrite(m_strpool.data(), m_strpool.size());
PadzeroForAlign<16>(buffer, m_strpool.size());
buffer.ensureWrite(m_records.data(), m_records.mem_size());
buffer.ensureWrite(m_off_len.data(), m_off_len.mem_size());
}
void SimpleZipBlobStore::get_meta_blocks(valvec<Block>* blocks) const {
blocks->erase_all();
blocks->push_back({"L0-Index", {m_records.data(), (ptrdiff_t)m_records.mem_size()}});
blocks->push_back({"L1-Index", {m_off_len.data(), (ptrdiff_t)m_off_len.mem_size()}});
}
void SimpleZipBlobStore::get_data_blocks(valvec<Block>* blocks) const {
blocks->erase_all();
blocks->push_back({"data", {m_strpool.data(), (ptrdiff_t)m_strpool.used_mem_size()}});
}
void SimpleZipBlobStore::detach_meta_blocks(const valvec<Block>& blocks) {
THROW_STD(invalid_argument
, "SimpleZipBlobStore detach_meta_blocks unsupported !");
}
size_t SimpleZipBlobStore::mem_size() const {
return m_strpool.used_mem_size() + m_off_len.mem_size() + m_records.mem_size();
}
fstring SimpleZipBlobStore::get_mmap() const {
return fstring((const char*)m_mmapBase, m_mmapBase->fileSize);
}
void SimpleZipBlobStore::reorder_zip_data(ZReorderMap& newToOld,
function<void(const void* data, size_t size)> writeAppend,
fstring tmpFile)
const {
THROW_STD(invalid_argument, "Not implemented");
}
void SimpleZipBlobStore::init_from_memory(fstring dataMem, Dictionary/*dict*/) {
THROW_STD(invalid_argument, "Not implemented");
}
} // namespace terark
|
xraycat123/spaCy
|
spacy/tests/regression/test_issue1945.py
|
<reponame>xraycat123/spaCy
'''Test regression in Matcher introduced in v2.0.6.'''
from __future__ import unicode_literals
import pytest
from ...vocab import Vocab
from ...tokens import Doc
from ...matcher import Matcher
def test_issue1945():
text = "a a a"
matcher = Matcher(Vocab())
matcher.add('MWE', None, [{'orth': 'a'}, {'orth': 'a'}])
doc = Doc(matcher.vocab, words=['a', 'a', 'a'])
matches = matcher(doc)
# We should see two overlapping matches here
assert len(matches) == 2
assert matches[0][1:] == (0, 2)
assert matches[1][1:] == (1, 3)
|
pythonpeixun/algo-1
|
stack/postfix/postfix2.c
|
#include "postfix2.h"
/* The algorithm used is called "Shunting-yard" algorithm
* see more: https://en.wikipedia.org/wiki/Shunting-yard_algorithm
* Operator precedence assumed (from highest to lowest):
* ()
* ^
* *, /
* +, -
*/
void
infixTopostfix(char* filename)
{
int maxElements = 20;
Stack s = createStack(maxElements);
FILE *fp;
fp = fopen(filename, "r");
if (fp == NULL)
{
fprintf(stderr, "can't open %s\n", filename);
exit(1);
}
int c;
while((c=getc(fp)) != EOF)
{
/* printf("character c read: "); */
/* putc(c, stdout); */
/* printf("\n"); */
if(isalpha(c) || isdigit(c))
{
putchar(c);
}
else
{
switch(c)
{
case ' ':
break;
case '-':
case '+':
while(!isEmpty(s) && top(s) != '(')
{
putchar(topAndPop(s));
}
push(c, s);
break;
case '/':
case '*':
if (top(s) == '+' ||
top(s) == '-' ||
top(s) == '(' ||
isEmpty(s))
{
push(c, s);
}
else
{
while (top(s) != '(' && !isEmpty(s))
{
putchar(topAndPop(s));
}
push(c, s);
}
break;
case '(':
push('(', s);
break;
case ')':
while(top(s) != '(')
{
putchar(topAndPop(s));
}
pop(s); //remove '('
break;
case '^':
push('^', s); // by assumption, there is no operator has higher precedence than '^', so we push directly
break;
default:
fprintf(stderr, "Invalid operator %c in expression\n", c);
exit(1);
}
}
}
while(!isEmpty(s))
{
putchar(topAndPop(s));
}
disposeStack(s);
printf("\n");
}
|
kurpicz/pwm
|
external/wavelet_construction/onlineWT/src/strstream.hpp
|
<gh_stars>10-100
#ifndef STRSTREAM_H
#define STRSTREAM_H
#include <stddef.h>
#include "cocadautil.hpp"
/**
* @file strstream.h
* @author <NAME>
*
* @brief String stream.
*/
/**
* String stream type
*/
typedef struct _strstream strstream;
/**
* @brief Opens a stream for a in-memory source string.
* @param str The source string.
* @param slen The source string length.
*/
strstream *strstream_open_str(char *str, size_t slen);
/**
* @brief Opens a stream for a source text file.
*/
strstream *strstream_open_file(char *filename);
/**
* @brief Resets the stream, i.e. moves cursor to initial position.
*/
void strstream_reset(strstream *sst);
/**
* @brief Tests whether a stream has reached its end.
*/
bool strstream_end(strstream *sst);
/**
* @brief Reads the next char from a stream.
* @returns The next character as an int, or EOF if the stream has
* reached its end.
*
* Example of usage:
* @code
* strstream *fsst = strstream_open_file(filename);
* for (int c; (c=strstream_getc(fsst)) != EOF;)
* printf ("Read c=%c\n", (char)c);
* strstream_close(fsst);
* @endcode
*/
int strstream_getc(strstream *sst);
/**
* @brief Attempts to read the next @p n chars into the string *dest.
* Less than @p n characters can be read if the stream reaches its end.
* @returns The number of chars actually read.
*/
size_t strstream_reads(strstream *sst, char *dest, size_t n);
/**
* @brief Closes the stream and disposes the stream object.
*/
void strstream_close(strstream *sst);
#endif
|
smegurus/smegurus-django
|
tenant_configuration/tests/test_entrepreneur_view.py
|
from django.core.signing import Signer
from django.db import transaction
from django.contrib.auth.models import User, Group
from django.utils import translation
from django.core.urlresolvers import resolve, reverse
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_framework import status
from django_tenants.test.cases import TenantTestCase
from django_tenants.test.client import TenantClient
from foundation_public.models.organization import PublicOrganization
from foundation_tenant.models.base.me import Me
from foundation_tenant.models.base.postaladdress import PostalAddress
from foundation_tenant.models.base.contactpoint import ContactPoint
from foundation_tenant.models.base.businessidea import BusinessIdea
from foundation_tenant.models.base.tag import Tag
from smegurus import constants
TEST_USER_EMAIL = "<EMAIL>"
TEST_USER_USERNAME = "ledo"
TEST_USER_PASSWORD = "<PASSWORD>"
class FoundationConfigurationEntrepreneurViewsWithTenatSchemaTestCases(APITestCase, TenantTestCase):
fixtures = []
def setup_tenant(self, tenant):
"""Tenant Schema"""
tenant.schema_name = 'galacticalliance'
tenant.name = "Galactic Alliance of Humankind"
tenant.has_perks=True
tenant.has_mentors=True
tenant.how_discovered = "Command HQ"
tenant.how_many_served = 1
@classmethod
def setUpTestData(cls):
Group.objects.bulk_create([
Group(id=constants.ENTREPRENEUR_GROUP_ID, name="Entreprenuer",),
Group(id=constants.MENTOR_GROUP_ID, name="Mentor",),
Group(id=constants.ADVISOR_GROUP_ID, name="Advisor",),
Group(id=constants.ORGANIZATION_MANAGER_GROUP_ID, name="Org Manager",),
Group(id=constants.ORGANIZATION_ADMIN_GROUP_ID, name="Org Admin",),
Group(id=constants.CLIENT_MANAGER_GROUP_ID, name="Client Manager",),
Group(id=constants.SYSTEM_ADMIN_GROUP_ID, name="System Admin",),
])
user = User.objects.create_user( # Create our User.
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=<PASSWORD>
)
user.is_active = True
user.save()
@transaction.atomic
def setUp(self):
translation.activate('en') # Set English
super(FoundationConfigurationEntrepreneurViewsWithTenatSchemaTestCases, self).setUp()
# Initialize our test data.
self.user = User.objects.get()
token = Token.objects.get(user__username=TEST_USER_USERNAME)
# Setup.
self.unauthorized_client = TenantClient(self.tenant)
self.authorized_client = TenantClient(self.tenant, HTTP_AUTHORIZATION='Token ' + token.key)
self.authorized_client.login(
username=TEST_USER_USERNAME,
password=<PASSWORD>
)
# Update Organization.
self.tenant.users.add(self.user)
self.tenant.save()
@transaction.atomic
def tearDown(self):
BusinessIdea.objects.delete_all()
Tag.objects.delete_all()
PostalAddress.objects.delete_all()
ContactPoint.objects.delete_all()
Me.objects.delete_all()
users = User.objects.all()
for user in users.all():
user.delete()
groups = Group.objects.all()
for group in groups.all():
group.delete()
# super(FoundationConfigurationEntrepreneurViewsWithTenatSchemaTestCases, self).tearDown()
@transaction.atomic
def test_config_entr_step_one_page_view_with_success(self):
# Setup our User.
entrepreneur_group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(entrepreneur_group)
self.user.save()
# Test & verify.
url = reverse('foundation_auth_config_entr_step_one')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_config_entr_step_one_page_view_with_failure(self):
url = reverse('foundation_auth_config_entr_step_one')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@transaction.atomic
def test_config_entr_step_two_page_view_with_success(self):
# Setup our User.
entrepreneur_group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(entrepreneur_group)
self.user.save()
# Test & verify.
url = reverse('foundation_auth_config_entr_step_two')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_config_entr_step_two_page_view_with_failure(self):
url = reverse('foundation_auth_config_entr_step_two')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@transaction.atomic
def test_config_entr_step_three_page_view_with_success(self):
# Setup our User.
entrepreneur_group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(entrepreneur_group)
self.user.save()
# Test & verify.
url = reverse('foundation_auth_config_entr_step_three')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_config_entr_step_three_page_view_with_failure(self):
url = reverse('foundation_auth_config_entr_step_three')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@transaction.atomic
def test_config_entr_step_four_page_view_with_success(self):
# Setup our User.
entrepreneur_group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(entrepreneur_group)
self.user.save()
# Test & verify.
url = reverse('foundation_auth_config_entr_step_four')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_config_entr_step_four_page_view_with_failure(self):
url = reverse('foundation_auth_config_entr_step_four')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@transaction.atomic
def test_config_entr_step_five_page_view_with_success(self):
# Setup our User.
entrepreneur_group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(entrepreneur_group)
self.user.save()
# Test & verify.
url = reverse('foundation_auth_config_entr_step_five')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_config_entr_step_five_page_view_with_failure(self):
url = reverse('foundation_auth_config_entr_step_five')
response = self.authorized_client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
pmac1965/proteus
|
source/display/prSpriteAnimation.cpp
|
<reponame>pmac1965/proteus
/**
* prSpriteAnimation.cpp
*
* Copyright 2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "../prConfig.h"
#include "prSpriteAnimation.h"
#include "prSpriteAnimationSequence.h"
#include "prSprite.h"
#include "../core/prMacros.h"
#include "../core/prStringUtil.h"
#include "../debug/prAssert.h"
#include "../debug/prTrace.h"
//using namespace Proteus::Core;
/// ---------------------------------------------------------------------------
/// Ctor
/// ---------------------------------------------------------------------------
prSpriteAnimation::prSpriteAnimation(prSprite *sprite) : m_sprite(*sprite)
{
m_currSequence = nullptr;
}
/// ---------------------------------------------------------------------------
/// Dtor
/// ---------------------------------------------------------------------------
prSpriteAnimation::~prSpriteAnimation()
{
std::list<prSpriteAnimationSequence*>::iterator it = m_sequences.begin();
std::list<prSpriteAnimationSequence*>::iterator end = m_sequences.end();
for (; it != end; ++it)
{
delete *it;
}
}
/// ---------------------------------------------------------------------------
/// Updates the sprites animation.
/// ---------------------------------------------------------------------------
void prSpriteAnimation::Update(float dt)
{
if (m_currSequence)
{
m_currSequence->Animate(dt);
// if (m_sprite.GetCurrentFrame() != m_currSequence->frameIndex)
// {
m_sprite.SetFrame(m_currSequence->GetFrame());
// }
}
}
/// ---------------------------------------------------------------------------
/// Adds an animation sequence.
/// ---------------------------------------------------------------------------
void prSpriteAnimation::AddSequence(prSpriteAnimationSequence* sequence)
{
PRASSERT(sequence);
m_sequences.push_back(sequence);
}
/// ---------------------------------------------------------------------------
/// Plays a named animation sequence.
/// ---------------------------------------------------------------------------
void prSpriteAnimation::PlaySequence(const char *name)
{
PRASSERT(name && *name);
u32 hash = prStringHash(name);
std::list<prSpriteAnimationSequence*>::iterator it = m_sequences.begin();
std::list<prSpriteAnimationSequence*>::iterator end = m_sequences.end();
for (; it != end; ++it)
{
if ((*it)->GetHash() == hash)
{
m_currSequence = *it;
m_currSequence->Begin();
m_sprite.SetFrame(m_currSequence->GetFrame());
return;
}
}
PRWARN("Failed to play animation sequence '%s'", name);
}
/// ---------------------------------------------------------------------------
/// Gets the user data for the current frame.
/// ---------------------------------------------------------------------------
s32 prSpriteAnimation::GetUserDataForCurrentFrame(s32 index) const
{
PRASSERT(PRBETWEEN(index, 0, 3));
if (m_currSequence)
{
return m_currSequence->GetUserDataForCurrentFrame(index);
}
return -1;
}
/// ---------------------------------------------------------------------------
/// Has the current animation stopped.
/// ---------------------------------------------------------------------------
bool prSpriteAnimation::HasAnimationStopped() const
{
bool result = false;
if (m_currSequence)
{
if (m_currSequence->GetAnimState() == ANIM_STATE_STOPPED)
{
result = true;
}
}
return result;
}
/// ---------------------------------------------------------------------------
/// Has any animation been played and animation is not in its default state.
/// ---------------------------------------------------------------------------
bool prSpriteAnimation::HasAnimationStarted() const
{
bool result = false;
if (m_currSequence)
{
if (m_currSequence->GetAnimState() != ANIM_STATE_NONE)
{
result = true;
}
}
return result;
}
|
Yechan0815/cubrid
|
src/method/method_query_util.cpp
|
/*
*
* Copyright 2016 CUBRID Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "method_query_util.hpp"
#include <algorithm>
#include <cstring>
#include "dbtype.h"
#if !defined(SERVER_MODE)
#include "dbi.h"
#include "object_domain.h"
#include "object_primitive.h"
#endif
namespace cubmethod
{
#define STK_SIZE 100
void
str_trim (std::string &str)
{
str.erase (0, str.find_first_not_of ("\t\n\r "));
str.erase (str.find_last_not_of ("\t\n\r ") + 1);
}
char *
get_backslash_escape_string (void)
{
if (prm_get_bool_value (PRM_ID_NO_BACKSLASH_ESCAPES))
{
return (char *) "\\";
}
else
{
return (char *) "\\\\";
}
}
#define B_ERROR -1
#define B_TRUE 1
#define B_FALSE 0
int
is_korean (unsigned char ch)
{
return (ch >= 0xb0 && ch <= 0xc8) || (ch >= 0xa1 && ch <= 0xfe);
}
int
str_eval_like (const unsigned char *tar, const unsigned char *expr, unsigned char escape)
{
const int IN_CHECK = 0;
const int IN_PERCENT = 1;
const int IN_PERCENT_UNDERSCORE = 2;
int status = IN_CHECK;
const unsigned char *tarstack[STK_SIZE], *exprstack[STK_SIZE];
int stackp = -1;
int inescape = 0;
if (escape == 0)
{
escape = 2;
}
while (1)
{
if (status == IN_CHECK)
{
if (*expr == escape)
{
expr++;
if (*expr == '%' || *expr == '_')
{
inescape = 1;
continue;
}
else if (*tar
&& ((!is_korean (*tar) && *tar == *expr)
|| (is_korean (*tar) && *tar == *expr && * (tar + 1) == * (expr + 1))))
{
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
if (is_korean (*expr))
{
expr += 2;
}
else
{
expr++;
}
continue;
}
}
if (inescape)
{
if (*tar == *expr)
{
tar++;
expr++;
}
else
{
if (stackp >= 0 && stackp < STK_SIZE)
{
tar = tarstack[stackp];
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
expr = exprstack[stackp--];
}
else
{
return B_FALSE;
}
}
inescape = 0;
continue;
}
/* goto check */
if (*expr == 0)
{
while (*tar == ' ')
{
tar++;
}
if (*tar == 0)
{
return B_TRUE;
}
else
{
if (stackp >= 0 && stackp < STK_SIZE)
{
tar = tarstack[stackp];
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
expr = exprstack[stackp--];
}
else
{
return B_FALSE;
}
}
}
else if (*expr == '%')
{
status = IN_PERCENT;
while (* (expr + 1) == '%')
{
expr++;
}
}
else if ((*expr == '_') || (!is_korean (*tar) && *tar == *expr)
|| (is_korean (*tar) && *tar == *expr && * (tar + 1) == * (expr + 1)))
{
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
if (is_korean (*expr))
{
expr += 2;
}
else
{
expr++;
}
}
else if (stackp >= 0 && stackp < STK_SIZE)
{
tar = tarstack[stackp];
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
expr = exprstack[stackp--];
}
else if (stackp >= STK_SIZE)
{
return B_ERROR;
}
else
{
return B_FALSE;
}
}
else if (status == IN_PERCENT)
{
if (* (expr + 1) == '_')
{
if (stackp >= STK_SIZE - 1)
{
return B_ERROR;
}
tarstack[++stackp] = tar;
exprstack[stackp] = expr;
expr++;
inescape = 0;
status = IN_PERCENT_UNDERSCORE;
continue;
}
if (* (expr + 1) == escape)
{
expr++;
inescape = 1;
if (* (expr + 1) != '%' && * (expr + 1) != '_')
{
return B_ERROR;
}
}
while (*tar && *tar != * (expr + 1))
{
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
}
if (*tar == * (expr + 1))
{
if (stackp >= STK_SIZE - 1)
{
return B_ERROR;
}
tarstack[++stackp] = tar;
exprstack[stackp] = expr;
if (is_korean (*expr))
{
expr += 2;
}
else
{
expr++;
}
inescape = 0;
status = IN_CHECK;
}
}
if (status == IN_PERCENT_UNDERSCORE)
{
if (*expr == escape)
{
expr++;
inescape = 1;
if (*expr != '%' && *expr != '_')
{
return B_ERROR;
}
continue;
}
if (inescape)
{
if (*tar == *expr)
{
tar++;
expr++;
}
else
{
if (stackp >= 0 && stackp < STK_SIZE)
{
tar = tarstack[stackp];
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
expr = exprstack[stackp--];
}
else
{
return B_FALSE;
}
}
inescape = 0;
continue;
}
/* goto check */
if (*expr == 0)
{
while (*tar == ' ')
{
tar++;
}
if (*tar == 0)
{
return B_TRUE;
}
else
{
if (stackp >= 0 && stackp < STK_SIZE)
{
tar = tarstack[stackp];
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
expr = exprstack[stackp--];
}
else
{
return B_FALSE;
}
}
}
else if (*expr == '%')
{
status = IN_PERCENT;
while (* (expr + 1) == '%')
{
expr++;
}
}
else if ((*expr == '_') || (!is_korean (*tar) && *tar == *expr)
|| (is_korean (*tar) && *tar == *expr && * (tar + 1) == * (expr + 1)))
{
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
if (is_korean (*expr))
{
expr += 2;
}
else
{
expr++;
}
}
else if (stackp >= 0 && stackp < STK_SIZE)
{
tar = tarstack[stackp];
if (is_korean (*tar))
{
tar += 2;
}
else
{
tar++;
}
expr = exprstack[stackp--];
}
else if (stackp >= STK_SIZE)
{
return B_ERROR;
}
else
{
return B_FALSE;
}
}
if (*tar == 0)
{
if (*expr)
{
while (*expr == '%')
{
expr++;
}
}
if (*expr == 0)
{
return B_TRUE;
}
else
{
return B_FALSE;
}
}
}
}
int
str_like (std::string src, std::string pattern, char esc_char)
{
int result;
std::transform (src.begin(), src.end(), src.begin(), ::tolower);
std::transform (pattern.begin(), pattern.end(), pattern.begin(), ::tolower);
result =
str_eval_like ((const unsigned char *) src.c_str(), (const unsigned char *) pattern.c_str (), (unsigned char) esc_char);
return result;
}
std::string convert_db_value_to_string (DB_VALUE *value, DB_VALUE *value_string)
{
const char *val_str = NULL;
int err, len;
DB_TYPE val_type = db_value_type (value);
if (val_type == DB_TYPE_NCHAR || val_type == DB_TYPE_VARNCHAR)
{
err = db_value_coerce (value, value_string, db_type_to_db_domain (DB_TYPE_VARNCHAR));
if (err >= 0)
{
val_str = db_get_nchar (value_string, &len);
}
}
else
{
err = db_value_coerce (value, value_string, db_type_to_db_domain (DB_TYPE_VARCHAR));
if (err >= 0)
{
val_str = db_get_char (value_string, &len);
}
}
return std::string (val_str);
}
#if !defined(SERVER_MODE)
int
get_stmt_type (std::string sql)
{
char *stmt = sql.data ();
if (strncasecmp (stmt, "insert", 6) == 0)
{
return CUBRID_STMT_INSERT;
}
else if (strncasecmp (stmt, "update", 6) == 0)
{
return CUBRID_STMT_UPDATE;
}
else if (strncasecmp (stmt, "delete", 6) == 0)
{
return CUBRID_STMT_DELETE;
}
else if (strncasecmp (stmt, "call", 4) == 0)
{
return CUBRID_STMT_CALL;
}
else if (strncasecmp (stmt, "evaluate", 8) == 0)
{
return CUBRID_STMT_EVALUATE;
}
else
{
return CUBRID_MAX_STMT_TYPE;
}
}
int
calculate_num_markers (const std::string &sql)
{
if (sql.empty())
{
return -1;
}
int num_markers = 0;
int sql_len = sql.size ();
for (int i = 0; i < sql_len; i++)
{
if (sql[i] == '?')
{
num_markers++;
}
else if (sql[i] == '-' && sql[i + 1] == '-')
{
i = consume_tokens (sql, i + 2, SQL_STYLE_COMMENT);
}
else if (sql[i] == '/' && sql[i + 1] == '*')
{
i = consume_tokens (sql, i + 2, C_STYLE_COMMENT);
}
else if (sql[i] == '/' && sql[i + 1] == '/')
{
i = consume_tokens (sql, i + 2, CPP_STYLE_COMMENT);
}
else if (sql[i] == '\'')
{
i = consume_tokens (sql, i + 1, SINGLE_QUOTED_STRING);
}
else if (/* cas_default_ansi_quotes == false && */ sql[i] == '\"')
{
i = consume_tokens (sql, i + 1, DOUBLE_QUOTED_STRING);
}
}
return num_markers;
}
int
consume_tokens (std::string sql, int index, STATEMENT_STATUS stmt_status)
{
int sql_len = sql.size ();
if (stmt_status == SQL_STYLE_COMMENT || stmt_status == CPP_STYLE_COMMENT)
{
for (; index < sql_len; index++)
{
if (sql[index] == '\n')
{
break;
}
}
}
else if (stmt_status == C_STYLE_COMMENT)
{
for (; index < sql_len; index++)
{
if (sql[index] == '*' && sql[index + 1] == '/')
{
index++;
break;
}
}
}
else if (stmt_status == SINGLE_QUOTED_STRING)
{
for (; index < sql_len; index++)
{
if (sql[index] == '\'' && sql[index + 1] == '\'')
{
index++;
}
else if (/* cas_default_no_backslash_escapes == false && */ sql[index] == '\\')
{
index++;
}
else if (sql[index] == '\'')
{
break;
}
}
}
else if (stmt_status == DOUBLE_QUOTED_STRING)
{
for (; index < sql_len; index++)
{
if (sql[index] == '\"' && sql[index + 1] == '\"')
{
index++;
}
else if (/* cas_default_no_backslash_escapes == false && */ sql[index] == '\\')
{
index++;
}
else if (sql[index] == '\"')
{
break;
}
}
}
return index;
}
std::string
get_column_default_as_string (DB_ATTRIBUTE *attr)
{
int error = NO_ERROR;
std::string result_default_value_string;
char *default_value_string = NULL;
/* Get default value string */
DB_VALUE *def = db_attribute_default (attr);
if (def == NULL)
{
return "";
}
const char *default_value_expr_type_string = NULL, *default_expr_format = NULL;
const char *default_value_expr_op_string = NULL;
default_value_expr_type_string = db_default_expression_string (attr->default_value.default_expr.default_expr_type);
if (default_value_expr_type_string != NULL)
{
/* default expression case */
int len;
if (attr->default_value.default_expr.default_expr_op != NULL_DEFAULT_EXPRESSION_OPERATOR)
{
/* We now accept only T_TO_CHAR for attr->default_value.default_expr.default_expr_op */
default_value_expr_op_string = "TO_CHAR"; /* FIXME - remove this hard code */
}
default_expr_format = attr->default_value.default_expr.default_expr_format;
if (default_value_expr_op_string != NULL)
{
result_default_value_string.assign (default_value_expr_op_string);
result_default_value_string.append ("(");
result_default_value_string.append (default_value_expr_type_string);
if (default_expr_format)
{
result_default_value_string.append (", \'");
result_default_value_string.append (default_expr_format);
result_default_value_string.append ("\'");
}
result_default_value_string.append (")");
}
else
{
result_default_value_string.assign (default_value_expr_type_string);
}
return result_default_value_string;
}
if (db_value_is_null (def))
{
return "NULL";
}
/* default value case */
switch (db_value_type (def))
{
case DB_TYPE_UNKNOWN:
break;
case DB_TYPE_SET:
case DB_TYPE_MULTISET:
case DB_TYPE_SEQUENCE: /* DB_TYPE_LIST */
serialize_collection_as_string (def, result_default_value_string);
break;
case DB_TYPE_CHAR:
case DB_TYPE_NCHAR:
case DB_TYPE_VARCHAR:
case DB_TYPE_VARNCHAR:
{
int def_size = db_get_string_size (def);
const char *def_str_p = db_get_string (def);
if (def_str_p)
{
result_default_value_string.push_back ('\'');
result_default_value_string.append (def_str_p);
result_default_value_string.push_back ('\'');
result_default_value_string.push_back ('\0');
}
}
break;
default:
{
DB_VALUE tmp_val;
error = db_value_coerce (def, &tmp_val, db_type_to_db_domain (DB_TYPE_VARCHAR));
if (error == NO_ERROR)
{
int def_size = db_get_string_size (&tmp_val);
const char *def_str_p = db_get_string (&tmp_val);
result_default_value_string.assign (def_str_p);
}
db_value_clear (&tmp_val);
}
break;
}
return result_default_value_string;
}
void
serialize_collection_as_string (DB_VALUE *col, std::string &out)
{
out.clear ();
if (!TP_IS_SET_TYPE (db_value_type (col)))
{
return;
}
DB_COLLECTION *db_set = db_get_collection (col);
int size = db_set_size (db_set);
/* first compute the size of the result */
const char *single_value = NULL;
DB_VALUE value, value_string;
out.push_back ('{');
for (int i = 0; i < size; i++)
{
if (db_set_get (db_set, i, &value) != NO_ERROR)
{
out.clear ();
return;
}
std::string single_value = convert_db_value_to_string (&value, &value_string);
out.append (single_value);
if (i != size - 1)
{
out.append (", ");
}
db_value_clear (&value_string);
db_value_clear (&value);
}
out.push_back ('}');
}
char
get_set_domain (DB_DOMAIN *set_domain, int &precision, short &scale, char &charset)
{
int set_domain_count = 0;
int set_type = DB_TYPE_NULL;
precision = 0;
scale = 0;
charset = lang_charset ();
DB_DOMAIN *ele_domain = db_domain_set (set_domain);
for (; ele_domain; ele_domain = db_domain_next (ele_domain))
{
set_domain_count++;
set_type = TP_DOMAIN_TYPE (ele_domain);
precision = db_domain_precision (ele_domain);
scale = (short) db_domain_scale (ele_domain);
charset = db_domain_codeset (ele_domain);
}
return (set_domain_count != 1) ? DB_TYPE_NULL : set_type;
}
#endif
}
|
oxygen1997/SpringCloud-OrderSys
|
order/src/main/java/com/czy/order/service/impl/OrderServiceImpl.java
|
package com.czy.order.service.impl;
import com.czy.order.client.ProductClient;
import com.czy.order.dao.OrderDetailRepository;
import com.czy.order.dao.OrderMasterRepository;
import com.czy.order.dto.CarDTO;
import com.czy.order.dto.OrderDTO;
import com.czy.order.enums.OrderStatusEnum;
import com.czy.order.enums.PayStatusEnum;
import com.czy.order.pojo.OrderDetail;
import com.czy.order.pojo.OrderMaster;
import com.czy.order.pojo.ProductInfo;
import com.czy.order.service.OrderService;
import com.czy.order.util.GenUniqueKeyUtil;
import org.springframework.amqp.rabbit.annotation.Exchange;
import org.springframework.amqp.rabbit.annotation.Queue;
import org.springframework.amqp.rabbit.annotation.QueueBinding;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
@Service
public class OrderServiceImpl implements OrderService {
@Autowired
OrderMasterRepository orderMasterRepository;
@Autowired
OrderDetailRepository orderDetailRepository;
@Autowired
ProductClient productClient;
@Override
@Transactional
public OrderDTO create(OrderDTO orderDTO) {
//生成唯一id设置orderid
String orderId = GenUniqueKeyUtil.getUniqueKey();
// 查询商品信息(调用商品服务)
//获得所有订单的items中商品id集合,并添加到orderIdList中
List<OrderDetail> orderDetailList = orderDTO.getOrderDetailList();
List<String> productIdList = new ArrayList<>();
for (OrderDetail orderDetail :orderDetailList){
productIdList.add(orderDetail.getProductId());
}
List<ProductInfo> productInfoList = productClient.listForOrder(productIdList);
// 计算总价 单价*数量
BigDecimal orderAmount = new BigDecimal("0");
for (OrderDetail orderDetail : orderDetailList){
for (ProductInfo productInfo : productInfoList){
if(productInfo.getProductId().equals(orderDetail.getProductId())){
orderAmount = productInfo.getProductPrice()
.multiply(new BigDecimal(orderDetail.getProductQuantity()))
.add(orderAmount);
//订单详情入库
orderDetail.setProductId(productInfo.getProductId());
orderDetail.setProductName(productInfo.getProductName());
orderDetail.setProductPrice(productInfo.getProductPrice());
orderDetail.setDetailId(GenUniqueKeyUtil.getUniqueKey());
orderDetail.setOrderId(orderId);
orderDetailRepository.save(orderDetail);
}
}
}
// 扣库存(调用商品服务)
CarDTO carDTO = new CarDTO();
List<CarDTO> carDTOs = new ArrayList<>();
for (OrderDetail orderDetail : orderDetailList){
String productId = orderDetail.getProductId();
Integer productQuantity = orderDetail.getProductQuantity();
carDTO.setProductId(productId);
carDTO.setProductQuantity(productQuantity);
carDTOs.add(carDTO);
}
productClient.decreaseProduct(carDTOs);
//订单主体入库
OrderMaster orderMaster = new OrderMaster();
orderDTO.setOrderId(orderId);
BeanUtils.copyProperties(orderDTO,orderMaster);
orderMaster.setOrderAmount(orderAmount);
orderMaster.setOrderStatus(OrderStatusEnum.NEW.getCode());
orderMaster.setPayStatus(PayStatusEnum.WAIT.getCode());
orderMasterRepository.save(orderMaster);
return orderDTO;
}
}
|
crici/gradle-native
|
subprojects/internal-testing/src/main/java/dev/nokee/internal/testing/util/ProjectTestUtils.java
|
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.nokee.internal.testing.util;
import dev.nokee.internal.testing.file.TestNameTestDirectoryProvider;
import lombok.val;
import org.gradle.api.Action;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.internal.project.ProjectInternal;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.ProviderFactory;
import org.gradle.testfixtures.ProjectBuilder;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Test utilities to access various Gradle services useful during testing as well as Project instances.
*/
public final class ProjectTestUtils {
private static final Object lock = new Object();
private static final String CLEANUP_THREAD_NAME = "project-test-utils-cleanup";
private static final AtomicBoolean SHUTDOWN_REGISTERED = new AtomicBoolean();
private static final List<TestNameTestDirectoryProvider> PROJECT_DIRECTORIES_TO_CLEANUP = Collections.synchronizedList(new LinkedList<>());
private static Project _use_project_method = null;
private ProjectTestUtils() {}
private static void maybeRegisterCleanup() {
if (SHUTDOWN_REGISTERED.compareAndSet(false, true)) {
Runtime.getRuntime().addShutdownHook(new Thread(ProjectTestUtils::cleanup, CLEANUP_THREAD_NAME));
}
}
private static Project project() {
if (_use_project_method == null) {
_use_project_method = rootProject();
}
return _use_project_method;
}
/**
* Returns an functional {@link ObjectFactory} instance.
*
* The project associated to the returned ObjectFactory is unspecified meaning tests should not depend on it.
* This particularity means that each file related operation that would spawn from this ObjectFactory will be resolved from an unspecified, but valid, file system location.
* If the file resolution needs to be specified, create a {@link Project} instance using {@link #createRootProject(File)}.
*
* @return a {@link ObjectFactory} instance, never null
*/
public static ObjectFactory objectFactory() {
return project().getObjects();
}
/**
* Returns a functional {@link ProviderFactory} instance.
*
* @return a {@link ProviderFactory} instance, never null
*/
public static ProviderFactory providerFactory() {
return project().getProviders();
}
/**
* Creates a {@link Dependency} instance for the specified notation.
*
* @param notation dependency notation, must not be null
* @return a {@link Dependency} instance for the notation, never null
*/
public static Dependency createDependency(Object notation) {
return project().getDependencies().create(notation);
}
/**
* Creates a new root project instance.
*
* @return a new {@link Project} instance, never null
*/
public static Project rootProject() {
maybeRegisterCleanup();
val testDirectory = new TestNameTestDirectoryProvider(ProjectTestUtils.class);
PROJECT_DIRECTORIES_TO_CLEANUP.add(testDirectory);
synchronized (lock) {
return ProjectBuilder.builder().withProjectDir(testDirectory.getTestDirectory().toFile()).build();
}
}
/**
* Creates a new root project instance for the given project directory.
*
* @param rootDirectory a project directory for the root project, must not be null
* @return a new {@link Project} instance, never null
*/
public static Project createRootProject(File rootDirectory) {
synchronized (lock) {
return ProjectBuilder
.builder()
.withProjectDir(rootDirectory)
.build();
}
}
/**
* Creates a new root project instance for the given project directory.
*
* @param rootDirectory a project directory for the root project, must not be null
* @return a new {@link Project} instance, never null
*/
public static Project createRootProject(Path rootDirectory) {
return createRootProject(rootDirectory.toFile());
}
/**
* Creates a child project instance with the specified parent project.
* The child project name and directory defaults to {@literal test} and <pre>${parent.projectDir}/test</pre> respectively.
*
* @param parent the parent project for the child project, must not be null
* @return a new child {@link Project} instance of the specified parent project, never null
*/
public static Project createChildProject(Project parent) {
return createChildProject(parent, "test");
}
/**
* Creates a child project instance with the specified parent project and name.
* The child project directory defaults to <pre>${parent.projectDir}/${name}</pre>.
*
* @param parent the parent project for the child project, must not be null
* @param name the child project name, must not be null
* @return a new child {@link Project} instance of the specified parent project, never null
*/
public static Project createChildProject(Project parent, String name) {
return createChildProject(parent, name, new File(parent.getProjectDir(), name));
}
/**
* Creates a child project instance with the specified parent project, name and directory.
*
* @param parent the parent project for the child project, must not be null
* @param name the child project name, must not be null
* @param projectDirectory the child project directory, must not be null
* @return a new child {@link Project} instance of the specified parent project, never null
*/
public static Project createChildProject(Project parent, String name, File projectDirectory) {
synchronized (lock) {
return ProjectBuilder
.builder()
.withName(name)
.withParent(parent)
.withProjectDir(toCanonicalFile(projectDirectory))
.build();
}
}
/**
* Creates a child project instance with the specified parent project, name and directory.
*
* @param parent the parent project for the child project, must not be null
* @param name the child project name, must not be null
* @param projectDirectory the child project directory, must not be null
* @return a new child {@link Project} instance of the specified parent project, never null
*/
public static Project createChildProject(Project parent, String name, Path projectDirectory) {
return createChildProject(parent, name, projectDirectory.toFile());
}
/**
* Force the evaluation of the specified Gradle project.
* It will execute all {@link Project#afterEvaluate(Action)} configuration action.
*
* Note: It is generally preferable to write functional test using Gradle Runner Kit to test after evaluate behavior.
*
* Implementation Note: It uses a call to an internal Gradle API, e.g. {@link ProjectInternal#evaluate()}.
*
* @param project the project to evaluate, must not be null
* @return the specified project, never null
*/
public static Project evaluate(Project project) {
return ((ProjectInternal) project).evaluate();
}
private static File toCanonicalFile(File file) {
try {
return file.getCanonicalFile();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Force cleanup of the temporary project directories.
*/
public static void cleanup() {
try {
synchronized (PROJECT_DIRECTORIES_TO_CLEANUP) {
try {
for (TestNameTestDirectoryProvider testDirectory : PROJECT_DIRECTORIES_TO_CLEANUP) {
testDirectory.cleanup();
}
} finally {
PROJECT_DIRECTORIES_TO_CLEANUP.clear();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
EbiDa/incubator-streampipes
|
streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/RuntimeResolvableResource.java
|
<reponame>EbiDa/incubator-streampipes<filename>streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/RuntimeResolvableResource.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.streampipes.connect.container.worker.rest;
import org.apache.streampipes.connect.management.RuntimeResovable;
import org.apache.streampipes.connect.rest.AbstractContainerResource;
import org.apache.streampipes.container.api.ResolvesContainerProvidedOptions;
import org.apache.streampipes.model.runtime.RuntimeOptionsRequest;
import org.apache.streampipes.model.runtime.RuntimeOptionsResponse;
import org.apache.streampipes.model.staticproperty.Option;
import org.apache.streampipes.rest.shared.annotation.JsonLdSerialized;
import org.apache.streampipes.rest.shared.util.SpMediaType;
import org.apache.streampipes.sdk.extractor.StaticPropertyExtractor;
import org.apache.streampipes.serializers.jsonld.JsonLdTransformer;
import java.io.IOException;
import java.util.List;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
@Path("/api/v1/{username}/worker/resolvable")
public class RuntimeResolvableResource extends AbstractContainerResource {
@POST
@Path("{id}/configurations")
@JsonLdSerialized
@Produces(SpMediaType.JSONLD)
public Response fetchConfigurations(@PathParam("id") String elementId,
String payload) {
try {
RuntimeOptionsRequest runtimeOptionsRequest = new JsonLdTransformer().fromJsonLd(payload,
RuntimeOptionsRequest.class);
ResolvesContainerProvidedOptions adapterClass =
RuntimeResovable.getRuntimeResolvableAdapter(elementId);
List<Option> availableOptions =
adapterClass.resolveOptions(runtimeOptionsRequest.getRequestId(),
StaticPropertyExtractor.from(runtimeOptionsRequest.getStaticProperties(),
runtimeOptionsRequest.getInputStreams(),
runtimeOptionsRequest.getAppId()));
return ok(new RuntimeOptionsResponse(runtimeOptionsRequest,
availableOptions));
} catch (IOException e) {
e.printStackTrace();
return fail();
}
}
}
|
ggml1/Competitive-Programming
|
UVa/11727.cpp
|
#include <bits/stdc++.h>
#define fast_io ios::sync_with_stdio(0);cin.tie(0)
using namespace std;
int main(){
fast_io;
int t; cin >> t;
int kse = 0;
while(t--){
++kse;
int a, b, c; cin >> a >> b >> c;
cout << "Case " << kse << ": " << a + b + c - min(a, min(b, c)) - max(a, max(b, c)) << endl;
}
return 0;
}
|
monaka/buck
|
test/com/facebook/buck/jvm/java/ExternalJavacTest.java
|
<gh_stars>1-10
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.NoopBuildRule;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.RuleKeyBuilder;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.UncachedRuleKeyBuilder;
import com.facebook.buck.rules.keys.DefaultRuleKeyBuilderFactory;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.util.FakeProcess;
import com.facebook.buck.util.FakeProcessExecutor;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import org.easymock.EasyMockSupport;
import org.junit.Rule;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
public class ExternalJavacTest extends EasyMockSupport {
private static final Path PATH_TO_SRCS_LIST = Paths.get("srcs_list");
public static final ImmutableSortedSet<Path> SOURCE_PATHS =
ImmutableSortedSet.of(Paths.get("foobar.java"));
@Rule
public DebuggableTemporaryFolder root = new DebuggableTemporaryFolder();
@Rule
public DebuggableTemporaryFolder tmpFolder = new DebuggableTemporaryFolder();
@Test
public void testJavacCommand() {
ExternalJavac firstOrder = createTestStep();
ExternalJavac warn = createTestStep();
ExternalJavac transitive = createTestStep();
assertEquals("fakeJavac -source 6 -target 6 -g -d . -classpath foo.jar @" + PATH_TO_SRCS_LIST,
firstOrder.getDescription(
getArgs().add("foo.jar").build(),
SOURCE_PATHS,
PATH_TO_SRCS_LIST));
assertEquals("fakeJavac -source 6 -target 6 -g -d . -classpath foo.jar @" + PATH_TO_SRCS_LIST,
warn.getDescription(
getArgs().add("foo.jar").build(),
SOURCE_PATHS,
PATH_TO_SRCS_LIST));
assertEquals("fakeJavac -source 6 -target 6 -g -d . -classpath bar.jar" + File.pathSeparator +
"foo.jar @" + PATH_TO_SRCS_LIST,
transitive.getDescription(
getArgs().add("bar.jar" + File.pathSeparator + "foo.jar").build(),
SOURCE_PATHS,
PATH_TO_SRCS_LIST));
}
@Test
public void externalJavacWillHashTheExternalIfNoVersionInformationIsReturned()
throws IOException {
Path javac = Files.createTempFile("fake", "javac");
javac.toFile().deleteOnExit();
Map<Path, HashCode> hashCodes = ImmutableMap.of(javac, Hashing.sha1().hashInt(42));
FakeFileHashCache fileHashCache = new FakeFileHashCache(hashCodes);
SourcePathResolver pathResolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
BuildRuleParams params = new FakeBuildRuleParamsBuilder("//example:target").build();
BuildRule buildRule = new NoopBuildRule(params, pathResolver);
DefaultRuleKeyBuilderFactory fakeRuleKeyBuilderFactory =
new DefaultRuleKeyBuilderFactory(0, fileHashCache, pathResolver);
RuleKey javacKey = new UncachedRuleKeyBuilder(
pathResolver,
fileHashCache,
fakeRuleKeyBuilderFactory)
.setReflectively("javac", javac.toString())
.build();
RuleKeyBuilder<RuleKey> builder = fakeRuleKeyBuilderFactory.newInstance(buildRule);
builder.setReflectively("key.appendableSubKey", javacKey);
RuleKey expected = builder.build();
ProcessExecutorParams javacExe = ProcessExecutorParams.builder().addCommand(
javac.toAbsolutePath().toString(),
"-version").build();
FakeProcess javacProc = new FakeProcess(0, "", "");
final FakeProcessExecutor executor = new FakeProcessExecutor(
ImmutableMap.of(javacExe, javacProc));
builder = fakeRuleKeyBuilderFactory.newInstance(buildRule);
ExternalJavac compiler = new ExternalJavac(javac) {
@Override
ProcessExecutor createProcessExecutor(
PrintStream stdout, PrintStream stderr) {
return executor;
}
};
builder.setReflectively("key", compiler);
RuleKey seen = builder.build();
assertEquals(expected, seen);
}
@Test
public void externalJavacWillHashTheJavacVersionIfPresent()
throws IOException {
Path javac = Files.createTempFile("fake", "javac");
javac.toFile().deleteOnExit();
String reportedJavacVersion = "mozzarella";
JavacVersion javacVersion = JavacVersion.of(reportedJavacVersion);
Map<Path, HashCode> hashCodes = ImmutableMap.of(javac, Hashing.sha1().hashInt(42));
FakeFileHashCache fileHashCache = new FakeFileHashCache(hashCodes);
SourcePathResolver pathResolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
BuildRuleParams params = new FakeBuildRuleParamsBuilder("//example:target").build();
BuildRule buildRule = new NoopBuildRule(params, pathResolver);
DefaultRuleKeyBuilderFactory fakeRuleKeyBuilderFactory =
new DefaultRuleKeyBuilderFactory(0, fileHashCache, pathResolver);
RuleKey javacKey = new UncachedRuleKeyBuilder(
pathResolver,
fileHashCache,
fakeRuleKeyBuilderFactory)
.setReflectively("javac.version", javacVersion.toString())
.build();
RuleKeyBuilder<RuleKey> builder = fakeRuleKeyBuilderFactory.newInstance(buildRule);
builder.setReflectively("key.appendableSubKey", javacKey);
RuleKey expected = builder.build();
ProcessExecutorParams javacExe = ProcessExecutorParams.builder().addCommand(
javac.toAbsolutePath().toString(),
"-version").build();
FakeProcess javacProc = new FakeProcess(0, "", reportedJavacVersion);
final FakeProcessExecutor executor = new FakeProcessExecutor(
ImmutableMap.of(javacExe, javacProc));
builder = fakeRuleKeyBuilderFactory.newInstance(buildRule);
ExternalJavac compiler = new ExternalJavac(javac) {
@Override
ProcessExecutor createProcessExecutor(PrintStream stdout, PrintStream stderr) {
return executor;
}
};
builder.setReflectively("key", compiler);
RuleKey seen = builder.build();
assertEquals(expected, seen);
}
private ImmutableList.Builder<String> getArgs() {
return ImmutableList.<String>builder().add(
"-source", "6",
"-target", "6",
"-g",
"-d", ".",
"-classpath");
}
private ExternalJavac createTestStep() {
Path fakeJavac = Paths.get("fakeJavac");
return new ExternalJavac(fakeJavac);
}
}
|
yupengKenny/baw-server
|
spec/lib/modules/alphabetical_paginator_spec.rb
|
<gh_stars>1-10
# frozen_string_literal: true
describe 'alphabetical paginator activerecord extension' do
before :each do
FactoryBot.build(:user, user_name: '汉字 user').save(validate: false)
FactoryBot.create(:user, user_name: 'aauser')
FactoryBot.create(:user, user_name: 'anuser')
FactoryBot.create(:user, user_name: 'amuser')
FactoryBot.create(:user, user_name: 'azuser')
FactoryBot.create(:user, user_name: 'buser')
FactoryBot.create(:user, user_name: 'zzzzzuser')
FactoryBot.create(:user, user_name: '_user')
FactoryBot.create(:user, user_name: '123user')
end
it 'returns users in the other range' do
users = User.alphabetical_page(:user_name, "\u{1F30F}")
expect(users.count).to eq(2)
expect(users[0].user_name).to eq('_user')
expect(users[1].user_name).to eq('汉字 user')
end
it 'returns users in the number range' do
users = User.alphabetical_page(:user_name, '0-9')
expect(users.count).to eq(1)
expect(users[0].user_name).to eq('123user')
end
it 'returns users in the a-a range' do
users = User.alphabetical_page(:user_name, 'a-a')
expect(users.count).to eq(5)
expect(users[0].user_name).to eq('aauser')
expect(users[1].user_name).to eq('Admin')
expect(users[2].user_name).to eq('amuser')
expect(users[3].user_name).to eq('anuser')
expect(users[4].user_name).to eq('azuser')
end
it 'returns users in the a-b range' do
users = User.alphabetical_page(:user_name, 'a-b')
expect(users.count).to eq(6)
expect(users[0].user_name).to eq('aauser')
expect(users[1].user_name).to eq('Admin')
expect(users[2].user_name).to eq('amuser')
expect(users[3].user_name).to eq('anuser')
expect(users[4].user_name).to eq('azuser')
expect(users[5].user_name).to eq('buser')
end
it 'returns users in the z-z range' do
users = User.alphabetical_page(:user_name, 'z-z')
expect(users.count).to eq(1)
expect(users[0].user_name).to eq('zzzzzuser')
end
it 'returns users in the yzz-zzz range' do
users = User.alphabetical_page(:user_name, 'yzz-zzz')
expect(users.count).to eq(1)
expect(users[0].user_name).to eq('zzzzzuser')
end
it 'returns users in the an-am range' do
users = User.alphabetical_page(:user_name, 'am-an')
expect(users.count).to eq(2)
expect(users[0].user_name).to eq('amuser')
expect(users[1].user_name).to eq('anuser')
end
context 'optimization for matching arguments' do
it 'uses a simpler query format for ranges with indentical left and rights' do
query = User.alphabetical_page(:user_name, 'aaa-aaa').to_sql
expect(query).to include('LOWER(LEFT("user_name", 3)) = \'aaa\'')
expect(query).to match(/WHERE..LOWER(?!.*LOWER)/)
end
end
context 'validating arguments' do
cases = [
'1-2',
"\u{1F30D}",
"\u{1F30F}-\u{1F30F}",
'汉-字',
'a1-a2',
'---',
'\';-- SELECT * FROM users',
'A-Z',
'aA-ab',
'aa-zZ'
]
cases.each do |bad_case|
it "fails to process an invalid case ('#{bad_case}')" do
expect {
User.alphabetical_page(:user_name, bad_case)
}.to raise_error(ArgumentError, 'Alphabetical paginator range invalid')
end
end
end
end
|
graehl/carmel
|
graehl/shared/normalize.hpp
|
<reponame>graehl/carmel
// Copyright 2014 <NAME> - http://graehl.org/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// normalization to 1 for each group in a disjoint partition of indices - on-disk normalization groups (memmappable) for large scale
#ifndef GRAEHL_SHARED__NORMALIZE_HPP
#define GRAEHL_SHARED__NORMALIZE_HPP
#include <graehl/shared/dynamic_array.hpp>
#ifdef GRAEHL_TEST
#include <graehl/shared/test.hpp>
#endif
#include <graehl/shared/container.hpp>
#include <graehl/shared/byref.hpp>
#include <graehl/shared/genio.h>
#include <graehl/shared/threadlocal.hpp>
#include <graehl/shared/funcs.hpp>
#include <algorithm>
#include <graehl/shared/debugprint.hpp>
#include <graehl/shared/swapbatch.hpp>
#include <graehl/shared/statistics.hpp>
namespace graehl {
//FIXME: leave rules that don't occur in normalization groups alone (use some original/default value)
template <class Wsource, class Wdest = Wsource>
struct NormalizeGroups {
typedef NormalizeGroups<Wsource, Wdest> self_type;
typedef Wsource source_t;
typedef Wdest dest_t;
// typedef PointerOffset<W> index_type; // pointer offsets
typedef size_t index_type;
typedef index_type offset_type;
typedef array<offset_type> Group;
typedef SwapBatch<Group> Groups;
max_in_accum<offset_type> max_offset;
size_accum<size_t> total_size;
Groups norm_groups;
NormalizeGroups(std::string basename, unsigned batchsize, source_t add_k_smoothing_ = 0) : norm_groups(basename, batchsize), add_k_smoothing(add_k_smoothing_)
{
//,index_type watch_value
// if (watch_value.get_offset())
}
template <class charT, class Traits>
void
read(std::basic_istream<charT, Traits>& in)
{
char c;
EXPECTCH_SPACE('('); //FIXME: rationalize w/ dynarray input w/ optional '('->eof? not possible?
norm_groups.read_all_enumerate(in, make_both_functors_byref(max_offset, total_size),')');
}
unsigned num_groups() const {
return norm_groups.size();
}
//FIXME: accumulate on read
size_t num_params() const {
return total_size;
}
size_t max_params() const
{
return total_size.maximum();
}
typename Groups::iterator find_group_holding(offset_type v) {
typename Groups::iterator i = norm_groups.begin(), e = norm_groups.end();
DBPC3("find group", v, norm_groups);
for (; i!=e; ++i) {
DBP_ADD_VERBOSE(2);
DBP2(*i, v);
Group &gi=*i;
typename Group::iterator e2 = gi.end();
if (std::find(gi.begin(), e2, v) != e2)
return i;
}
return e;
}
static size_t get_index(offset_type i) {
return i;
}
size_t max_index() const {
return get_index(max_offset);
}
size_t required_size() const {
return max_index()+1;
}
source_t *base;
dest_t *dest;
dest_t maxdiff;
source_t add_k_smoothing;
std::ostream *log;
enum make_not_anon_18 { ZERO_ZEROCOUNTS = 0, SKIP_ZEROCOUNTS = 1, UNIFORM_ZEROCOUNTS = 2};
int zerocounts; // use enum vals
size_t maxdiff_index;
typedef typename Group::iterator GIt;
typedef typename Group::const_iterator GItc;
void print_stats(std::ostream &out = std::cerr) const {
unsigned npar = num_params();
unsigned ng = num_groups();
out << ng << " normalization groups, " << npar<<" parameters, "<<(float)npar/ng<<" average parameters/group, "<<max_params()<< " max.";
}
source_t &source(offset_type index) const {
return base[index];
}
dest_t &sink(offset_type index) const {
return dest[index];
}
void operator ()(Group &i) {
GIt end = i.end(), beg = i.begin();
source_t sum = 0;
for (GIt j = beg; j!=end; ++j) {
source_t &w = source(*j);
sum += w;
}
#define DODIFF(d, w) do {dest_t diff = absdiff(d, w); if (maxdiff<diff) {maxdiff_index = get_index(*j); DBP5(d, w, maxdiff, diff, maxdiff_index); maxdiff = diff; } } while (0)
if (sum > 0) {
sum += add_k_smoothing; // add to denominator
DBPC2("Normalization group with", sum);
for (GIt j = beg; j!=end; ++j) {
source_t &w = source(*j);
dest_t &d = sink(*j);
DBP4(get_index(*j), d, w, w/sum);
dest_t prev = d;
d = w/sum;
DODIFF(d, prev);
}
} else {
if (log)
*log << "Zero counts for normalization group #" << 1+(&i-norm_groups.begin()) << " with first parameter " << get_index(*beg) << " (one of " << i.size() << " parameters)";
if (zerocounts!=SKIP_ZEROCOUNTS) {
dest_t setto;
if (zerocounts == UNIFORM_ZEROCOUNTS) {
setto = 1. / (end-beg);
if (log)
*log << " - setting to uniform probability " << setto << std::endl;
} else {
setto = 0;
if (log)
*log << " - setting to zero probability." << std::endl;
}
for (GIt j = beg; j!=end; ++j) {
dest_t &d = sink(*j);
DODIFF(d, setto);
d = setto;
}
}
}
#undef DO_DIFF
}
#ifdef NORMALIZE_SEEN
void copy_unseen(W *src, W *to) {
for (unsigned i = 0, e = seen_index.size(); i!=e; ++i) {
if (!seen_index[i])
to[i] = src[i];
}
}
void set_unseen_to(W src, W *to) {
for (unsigned i = 0, e = seen_index.size(); i!=e; ++i) {
if (!seen_index[i])
to[i] = src;
}
}
template <class F>
void enumerate_seen(F f) {
for (unsigned i = 0, e = seen_index.size(); i!=e; ++i) {
if (seen_index[i])
f(i);
}
}
template <class F>
void enumerate_unseen(F f) {
for (unsigned i = 0, e = seen_index.size(); i!=e; ++i) {
if (!seen_index[i])
f(i);
}
}
#endif
template <class V> // v(normindex,paramid,normsize) where paramid is in normindex. if end_index>0, v(i) for any unseen i<end_index
void visit_norm_param(V &v, index_type end_index = 0) {
fixed_array<bool> seen(end_index);
unsigned normi = 0;
for (Groups::iterator g = norm_groups.begin(), ge = norm_groups.end(); g!=ge; ++g) {
++normi;
Group const& group=*g;
unsigned gsz = group.size();
for (GItc p = group.begin(), pe = group.end(); p!=pe; ++p) {
index_type i=*p;
v(normi, i, gsz);
if (i<end_index) seen[i] = true;
}
}
for (index_type i = 0; i<end_index; ++i)
if (!seen[i])
v(i);
}
template <class T> // enumerate:
void visit(Group &group, T tag) {
GIt beg = group.begin(), end = group.end();
dest_t sum = 0;
for (GIt i = beg; i!=end; ++i) {
dest_t &w = sink(*i);
tag(w);
sum += w;
}
if (sum > 0)
for (GIt i = beg; i!=end; ++i) {
dest_t &w = sink(*i);
w /= sum;
}
}
template <class T>
void init(dest_t *w, T tag) {
dest = w;
enumerate(norm_groups, *this, tag); // calls visit(group,tag)
}
void init_uniform(dest_t *w) {
init(w, set_one());
}
void init_random(dest_t *w) {
base = w;
init(w, set_random_pos_fraction());
}
// array must have values for all max_index()+1 rules
//FIXME: only works if source_t = dest_t
void normalize(source_t *array_base) {
normalize(array_base, array_base);
}
void normalize(source_t *array_base, dest_t* _dest, int _zerocounts = UNIFORM_ZEROCOUNTS, std::ostream *_log = NULL) {
base = array_base;
dest = _dest;
maxdiff.setZero();
// DBP(maxdiff);
DBP_INC_VERBOSE;
#ifdef DEBUG
unsigned size = required_size();
#endif
DBPC2("Before normalize from base->dest", array<source_t>(base, base+size));
zerocounts = _zerocounts;
log = _log;
enumerate(norm_groups, boost::ref(*this));
DBPC2("After normalize:", array<dest_t>(dest, dest+size));
}
template <class O> void print(O&o) const
{
norm_groups.print(o);
}
TO_OSTREAM_PRINT
FROM_ISTREAM_READ
};
/*
template <class charT, class Traits,class W1,class W2>
std::basic_istream<charT,Traits>&
operator >>
(std::basic_istream<charT,Traits>& is, NormalizeGroups<W1,W2> &arg)
{
arg.read(is);
return is;
}
template <class charT, class Traits,class W1,class W2>
std::basic_ostream<charT,Traits>&
operator <<
(std::basic_ostream<charT,Traits>& o, const NormalizeGroups<W1,W2> &arg)
{
arg.print(o);
return o;
}
*/
#ifdef GRAEHL_TEST
BOOST_AUTO_TEST_CASE( TEST_NORMALIZE )
{
using namespace std;
using namespace graehl;
typedef Weight W;
fixed_array<W> w(10u);
w[0] = 1;
w[1] = 2;
w[2] = 3;
w[3] = 4;
w[4] = 1;
w[5] = 2;
w[6] = 3;
w[7] = 4;
w[8] = 1;
w[9] = 2;
NormalizeGroups<W> ng("tmp.test.normalize", 32);
string s="((0 1) (2 3) (4 5 6) (7 8 9))";
istringstream is(s);
BOOST_CHECK(is >> ng);
BOOST_CHECK(ng.max_index() == 9);
// cerr << Weight::out_always_real;
// cout << Weight::out_variable;
// DBP(w);
// DBP(ng);
ng.normalize(w.begin());
// BOOST_CHECK_CLOSE(w[2].getReal()+w[3].getReal(),1,1e-6);
// BOOST_CHECK_CLOSE(w[2].getReal()*4,w[3].getReal()*3,1e-6);
// DBP(w);
}
#endif
}
#endif
|
jandppw/ppwcode
|
javascript/_obsolete/WaveMaker/common/packages/lib_merge.js
|
/*
Add custom widgets to projects made with WaveMaker Studio
Include a dojo require statement for each custom widget
you want to include in every WaveMaker project.
For project specific widgets, add the dojo.require statement to the project javascript file.
*/
// Example:
// dojo.require("wm.packages.example.Button");
dojo.require("common.packages.ppwcode.NewLineTextArea");
|
lmarvaud/django-invite
|
invite/migrations/0004_family_host.py
|
<reponame>lmarvaud/django-invite
"""
Generated by Django 2.1.2 on 2018-11-01 10:41
"""
#pylint: disable=invalid-name
from django.db import migrations, models
class Migration(migrations.Migration):
"""Add the host to the family"""
dependencies = [
('invite', '0003_auto_20181101_1013'),
]
operations = [
migrations.AddField(
model_name='family',
name='host',
field=models.CharField(default='', max_length=32),
preserve_default=False,
),
]
|
isabella232/ion-1
|
ion/port/logging_cerr.cc
|
/**
Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS-IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "ion/port/logging.h"
#include <iostream> // NOLINT
namespace {
class CerrLogEntryWriter : public ion::port::LogEntryWriter {
public:
CerrLogEntryWriter() {}
~CerrLogEntryWriter() override {}
// LogEntryWriter impl.
void Write(ion::port::LogSeverity severity,
const std::string& message) override {
std::cerr << GetSeverityName(severity) << " " << message << "\n";
}
};
} // namespace
ion::port::LogEntryWriter* ion::port::CreateDefaultLogEntryWriter() {
return new CerrLogEntryWriter();
}
void ion::port::SetLoggingTag(const char* tag) {}
|
rhencke/engine
|
src/third_party/angle/src/libANGLE/gen_overlay_fonts.py
|
#!/usr/bin/env vpython
#
# [VPYTHON:BEGIN]
# wheel: <
# name: "infra/python/wheels/freetype-py/${vpython_platform}"
# version: "version:2.1.0.post1"
# >
# [VPYTHON:END]
# Copyright 2019 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# gen_vk_overlay_fonts.py:
# Code generation for overlay fonts. Should be run if the font file under overlay/ is changed,
# or the font sizes declared in this file are modified. The font is assumed to be monospace.
# The output will contain ASCII characters in order from ' ' to '~'. The output will be images
# with 3 rows of 32 characters each.
# NOTE: don't run this script directly. Run scripts/run_code_generation.py.
from datetime import date
import sys
if len(sys.argv) < 2:
from freetype import *
out_file_cpp = 'Overlay_font_autogen.cpp'
out_file_h = 'Overlay_font_autogen.h'
font_file = 'overlay/DejaVuSansMono-Bold.ttf'
template_out_file_h = u"""// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using {font_file}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {out_file_name}:
// Autogenerated overlay font data.
#include "libANGLE/Overlay.h"
namespace gl
{{
namespace overlay
{{
constexpr int kFontCount = {font_count};
constexpr int kFontGlyphWidths[kFontCount] = {{ {font_glyph_widths} }};
constexpr int kFontGlyphHeights[kFontCount] = {{ {font_glyph_heights} }};
constexpr int kFontCharactersPerRow = 32;
constexpr int kFontCharactersPerCol = 3;
constexpr int kFontCharacters = kFontCharactersPerRow * kFontCharactersPerCol;
constexpr int kFontImageWidth = {max_font_width} * kFontCharactersPerRow;
constexpr int kFontImageHeight = {max_font_height} * kFontCharactersPerCol;
{font_layers}
}} // namespace overlay
}} // namespace gl
"""
template_out_file_cpp = u"""// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using images from {font_file}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {out_file_name}:
// Autogenerated overlay font data.
#include "libANGLE/Overlay.h"
#include "libANGLE/Overlay_font_autogen.h"
#include <numeric>
namespace gl
{{
using namespace overlay;
// Save binary size if the font images are never to be used.
#if ANGLE_ENABLE_OVERLAY
namespace
{{
constexpr int kFontWidths[kFontCount] = {{ {font_layer_widths} }};
constexpr int kFontHeights[kFontCount] = {{ {font_layer_heights} }};
{font_data}
// Returns a bit with the value of the pixel.
template<int kFontWidth, int kFontHeight>
uint32_t GetFontLayerPixel(const uint32_t fontImage[kFontHeight][kFontWidth / 32], int x, int y)
{{
ASSERT(x >= 0 && x < kFontWidth && y >= 0 && y < kFontHeight);
return fontImage[y][x / 32] >> (x % 32) & 1;
}}
inline uint32_t GetFontPixel(int layer, int x, int y)
{{
switch (layer)
{{
{get_font_layer_pixel}
default:
UNREACHABLE();
return 0;
}}
}}
}} // anonymous namespace
void OverlayState::initFontData(uint8_t *fontData) const
{{
constexpr int kFontDataLayerSize = kFontImageWidth * kFontImageHeight;
// Unpack the font bitmap into R8_UNORM format. Border pixels are given a 0.5 value for better
// font visibility.
for (int layer = 0; layer < kFontCount; ++layer)
{{
memset(fontData, 0, kFontDataLayerSize);
for (int y = 0; y < kFontHeights[layer]; ++y)
{{
for (int x = 0; x < kFontWidths[layer]; ++x)
{{
uint32_t src = GetFontPixel(layer, x, y);
uint8_t dstValue = src ? 255 : 0;
fontData[y * kFontImageWidth + x] = dstValue;
}}
}}
fontData += kFontDataLayerSize;
}}
}}
#else
void OverlayState::initFontData(uint8_t *fontData) const
{{
memset(fontData, 0, kFontCount * kFontImageWidth * kFontImageHeight * sizeof(*fontData));
}}
#endif
}} // namespace gl
"""
template_get_font_layer_pixel = u"""case {layer}:
return GetFontLayerPixel<kFontWidths[{layer}], kFontHeights[{layer}]>({font_image}, x, y);
"""
def main():
if len(sys.argv) == 2 and sys.argv[1] == 'inputs':
# disabled because of issues on Windows. http://anglebug.com/3892
# print(font_file)
return
if len(sys.argv) == 2 and sys.argv[1] == 'outputs':
print(','.join([out_file_cpp, out_file_h]))
return
font_defs = [('large', 36), ('medium', 23), ('small', 14)]
chars = ' !"#$%&\'()*+,-./0123456789:;<=>?' + \
'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_' + \
'`abcdefghijklmnopqrstuvwxyz{|}~ '
output_rows = 3
output_cols = 32
assert (len(chars) == output_rows * output_cols)
font_glyph_widths = []
font_glyph_heights = []
font_layers = []
font_data = []
get_font_layer_pixel = []
current_font_layer = 0
# Load the font file.
face = Face(font_file)
assert (face.is_fixed_width)
for font_name, font_size in font_defs:
# Since the font is fixed width, we can retrieve its size right away.
face.set_char_size(font_size << 6)
glyph_width = face.size.max_advance >> 6
glyph_ascender = face.size.ascender >> 6
glyph_descender = face.size.descender >> 6
glyph_height = glyph_ascender - glyph_descender
font_tag = font_name.capitalize()
font_layer = str(current_font_layer)
font_layer_symbol = 'kFontLayer' + font_tag
font_array_name = 'kFontImage' + font_tag
font_width = 'kFontWidths[' + font_layer_symbol + ']'
font_height = 'kFontHeights[' + font_layer_symbol + ']'
# Font pixels are packed in 32-bit values.
font_array_width = output_cols * glyph_width / 32
font_array_height = output_rows * glyph_height
font_array = [[0] * font_array_width for i in range(font_array_height)]
for charIndex in range(len(chars)):
char = chars[charIndex]
base_x = (charIndex % output_cols) * glyph_width
base_y = (charIndex / output_cols) * glyph_height
# Render the character.
face.load_char(char)
bitmap = face.glyph.bitmap
left = face.glyph.bitmap_left
top = face.glyph.bitmap_top
width = bitmap.width
rows = bitmap.rows
pitch = bitmap.pitch
offset_x = left
offset_y = glyph_height - (top - glyph_descender)
# '#' in the smallest font generates a larger glyph than the "fixed" font width.
if offset_x + width > glyph_width:
offset_x = glyph_width - width
if offset_x < 0:
width += offset_x
offset_x = 0
base_x += offset_x
base_y += offset_y
assert (offset_x + width <= glyph_width)
assert (offset_y + rows <= glyph_height)
# Write the character bitmap in the font image.
for y in range(rows):
for x in range(width):
pixel_value = bitmap.buffer[y * pitch + x]
output_bit = 1 if pixel_value >= 122 else 0
font_array_row = base_y + y
font_array_col = (base_x + x) / 32
font_array_bit = (base_x + x) % 32
font_array[font_array_row][font_array_col] |= output_bit << font_array_bit
# Output the image to a C array.
data = 'constexpr uint32_t ' + font_array_name + '[' + font_height + '][' + font_width + '/32] = {\n'
for y in range(font_array_height):
data += '{'
for x in range(font_array_width):
data += '0x{:08X}, '.format(font_array[y][x])
data += '},\n'
data += '};\n'
font_glyph_widths.append(glyph_width)
font_glyph_heights.append(glyph_height)
font_layers.append('constexpr int ' + font_layer_symbol + ' = ' + font_layer + ';')
font_data.append(data)
get_font_layer_pixel.append(
template_get_font_layer_pixel.format(
layer=font_layer_symbol, font_image=font_array_name))
current_font_layer += 1
with open(out_file_h, 'w') as outfile:
outfile.write(
template_out_file_h.format(
script_name=__file__,
font_file=font_file,
copyright_year=date.today().year,
out_file_name=out_file_h,
font_count=len(font_data),
font_glyph_widths=','.join(map(str, font_glyph_widths)),
font_glyph_heights=','.join(map(str, font_glyph_heights)),
max_font_width=max(font_glyph_widths),
max_font_height=max(font_glyph_heights),
font_layers='\n'.join(font_layers)))
outfile.close()
font_layer_widths = [
'kFontGlyphWidths[' + str(layer) + '] * kFontCharactersPerRow'
for layer in range(len(font_data))
]
font_layer_heights = [
'kFontGlyphHeights[' + str(layer) + '] * kFontCharactersPerCol'
for layer in range(len(font_data))
]
with open(out_file_cpp, 'w') as outfile:
outfile.write(
template_out_file_cpp.format(
script_name=__file__,
font_file=font_file,
copyright_year=date.today().year,
out_file_name=out_file_cpp,
font_layer_widths=','.join(font_layer_widths),
font_layer_heights=','.join(font_layer_heights),
font_data='\n'.join(font_data),
get_font_layer_pixel=''.join(get_font_layer_pixel)))
outfile.close()
if __name__ == '__main__':
sys.exit(main())
|
yamacir-kit/meevax
|
src/kernel/syntactic_continuation.cpp
|
<filename>src/kernel/syntactic_continuation.cpp
/*
Copyright 2018-2021 <NAME>.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <meevax/kernel/syntactic_continuation.hpp>
#include <meevax/posix/vt10x.hpp>
namespace meevax
{
inline namespace kernel
{
auto syntactic_continuation::operator [](const_reference name) -> const_reference
{
return cdr(machine::locate(name));
}
auto syntactic_continuation::operator [](std::string const& name) -> const_reference
{
return (*this)[intern(name)];
}
auto syntactic_continuation::build() -> void
{
/* ---- NOTE -------------------------------------------------------------
*
* If this class was instantiated by the FORK instruction, the instance
* will have received the compilation continuation as a constructor
* argument.
*
* The car part contains the registers of the virtual Lisp machine
* (s e c . d). The cdr part is set to the global environment at the
* time the FORK instruction was executed.
*
* Here, the value in the c register is the operand of the FORK
* instruction. The operand of the FORK instruction is a pair of a
* lambda expression form passed to the syntax fork/csc and a lexical
* environment.
*
* -------------------------------------------------------------------- */
if (std::get<0>(*this).is<continuation>())
{
/* ---- NOTE -----------------------------------------------------------
*
* If this class is constructed as make<syntactic_continuation>(...),
* this object until the constructor is completed, the case noted that
* it is the state that is not registered in the GC.
*
* ------------------------------------------------------------------ */
auto const& k = std::get<0>(*this).as<continuation>();
s = k.s();
e = k.e();
c = compile(syntactic_context::outermost, *this, car(k.c()), cdr(k.c()));
d = k.d();
form() = execute();
assert(form().is<closure>());
}
else
{
throw error(make<string>(__func__, " was called by something other than the FORK instruction"), unit);
}
}
auto syntactic_continuation::current_expression() const -> const_reference
{
return car(form());
}
auto syntactic_continuation::define(const_reference name, const_reference value) -> const_reference
{
assert(name.is<symbol>());
return global_environment() = make<identifier>(name, value) | global_environment();
}
auto syntactic_continuation::define(std::string const& name, const_reference value) -> const_reference
{
return define(intern(name), value);
}
auto syntactic_continuation::dynamic_environment() const -> const_reference
{
return cdr(form());
}
auto syntactic_continuation::evaluate(const_reference expression) -> value_type
{
c = compile(syntactic_context::none, *this, expression);
if (is_debug_mode())
{
disassemble(debug_port().as<std::ostream>(), c);
}
return execute();
}
auto syntactic_continuation::execute() -> value_type
{
if (is_trace_mode())
{
return machine::execute<declaration::trace>();
}
else
{
return machine::execute();
}
}
auto syntactic_continuation::fork() const -> value_type
{
let const module = make<syntactic_continuation>(current_continuation(), global_environment());
module.as<syntactic_continuation>().import();
module.as<syntactic_continuation>().build();
return module;
}
auto syntactic_continuation::form() const noexcept -> const_reference
{
return std::get<0>(*this);
}
auto syntactic_continuation::form() noexcept -> reference
{
return const_cast<reference>(std::as_const(*this).form());
}
auto syntactic_continuation::global_environment() const noexcept -> const_reference
{
return std::get<1>(*this);
}
auto syntactic_continuation::global_environment() noexcept -> reference
{
return const_cast<reference>(std::as_const(*this).global_environment());
}
auto syntactic_continuation::import() -> void
{
define<procedure>("free-identifier=?", [this](let const& xs)
{
if (let const& a = car(xs); a.is<symbol>() or a.is<identifier>())
{
if (let const& b = cadr(xs); b.is<symbol>() or b.is<identifier>())
{
if (let const& id1 = a.is<identifier>() ? a.as<identifier>().symbol() : a)
{
if (let const& id2 = b.is<identifier>() ? b.as<identifier>().symbol() : b)
{
return id1 == id2 ? t : f;
}
}
}
}
// if (let const& a = car(xs); a.is<symbol>() or a.is<identifier>())
// {
// if (let const& b = cadr(xs); b.is<symbol>() or b.is<identifier>())
// {
// if (auto const& id1 = a.is<identifier>() ? a.as<identifier>() : locate(a).as<identifier>(); id1.is_free())
// {
// if (auto const& id2 = b.is<identifier>() ? b.as<identifier>() : locate(b).as<identifier>(); id2.is_free())
// {
// return id1 == id2 ? t : f;
// }
// }
// }
// }
return f;
});
define<procedure>("set-batch!", [this](let const& xs) { return batch = car(xs); });
define<procedure>("set-debug!", [this](let const& xs) { return debug = car(xs); });
define<procedure>("set-interactive!", [this](let const& xs) { return interactive = car(xs); });
define<procedure>("set-prompt!", [this](let const& xs) { return prompt = car(xs); });
define<procedure>("set-trace!", [this](let const& xs) { return trace = car(xs); });
define<procedure>("set-verbose!", [this](let const& xs) { return verbose = car(xs); });
}
auto syntactic_continuation::load(std::string const& s) -> value_type
{
write(debug_port(), header(__func__), "open ", s, " => ");
if (let port = make<input_file_port>(s); port and port.as<input_file_port>().is_open())
{
write(debug_port(), t, "\n");
for (let e = read(port); e != eof_object; e = read(port))
{
write(debug_port(), header(__func__), e, "\n");
evaluate(e);
}
return unspecified;
}
else
{
write(debug_port(), f, "\n");
throw file_error(make<string>("failed to open file: " + s), unit);
}
}
auto syntactic_continuation::load(const_reference x) -> value_type
{
if (x.is<symbol>())
{
return load(x.as<symbol>());
}
else if (x.is<string>())
{
return load(x.as<string>());
}
else
{
throw file_error(make<string>(string_append(__FILE__, ":", __LINE__, ":", __func__)), unit);
}
}
auto syntactic_continuation::locate(const_reference variable) -> const_reference
{
if (let const& binding = assq(variable, global_environment()); eq(binding, f))
{
/* -----------------------------------------------------------------------
*
* At the outermost level of a program, a definition
*
* (define <variable> <expression>)
*
* has essentially the same effect as the assignment expression
*
* (set! <variable> <expression>)
*
* if <variable> is bound to a non-syntax value. However, if <variable>
* is not bound, or is a syntactic keyword, then the definition will
* bind <variable> to a new location before performing the assignment,
* whereas it would be an error to perform a set! on an unbound variable.
*
* -------------------------------------------------------------------- */
let const id = make<identifier>(variable);
cdr(id) = id; // NOTE: Identifier is self-evaluate if is unbound.
global_environment() = cons(id, global_environment());
return car(global_environment());
}
else
{
return binding;
}
}
auto syntactic_continuation::lookup(const_reference variable) const -> const_reference
{
if (let const& x = assq(variable, global_environment()); eq(x, f))
{
return variable.is<identifier>() ? variable.as<identifier>().symbol() : variable;
}
else
{
return cdr(x);
}
}
auto syntactic_continuation::macroexpand(const_reference keyword, const_reference form) -> value_type
{
push(d, s, e, cons(make<instruction>(mnemonic::STOP), c)); // XXX ???
s = unit;
e = cons(cons(keyword, cdr(form)), dynamic_environment());
c = current_expression();
return execute();
}
auto operator >>(std::istream & is, syntactic_continuation & datum) -> std::istream &
{
datum.print("syntactic_continuation::operator >>(std::istream &, syntactic_continuation &)");
datum.print("read new expression => ", datum.read(is));
// sk.print("program == ", sk.program(), "current_expression is ", sk.current_expression());
return is;
}
auto operator <<(std::ostream & os, syntactic_continuation & datum) -> std::ostream &
{
// TODO
// Evaluate current_expression, and write the evaluation to ostream.
return datum.write(os, "syntactic_continuation::operator <<(std::ostream &, syntactic_continuation &)\n");
}
auto operator <<(std::ostream & os, syntactic_continuation const& datum) -> std::ostream &
{
return os << magenta << "#,("
<< green << "syntactic-continuation" << reset
<< faint << " #;" << &datum << reset
<< magenta << ")" << reset;
}
template class configurator<syntactic_continuation>;
template class machine<syntactic_continuation>;
template class reader<syntactic_continuation>;
template class writer<syntactic_continuation>;
} // namespace kernel
} // namespace meevax
|
alexey-anufriev/intellij-community
|
platform/platform-api/src/com/intellij/ide/SearchTopHitProvider.java
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.NlsSafe;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.function.Consumer;
/**
* @author <NAME>
*/
public interface SearchTopHitProvider {
ExtensionPointName<SearchTopHitProvider> EP_NAME = ExtensionPointName.create("com.intellij.search.topHitProvider");
void consumeTopHits(@NotNull String pattern, @NotNull Consumer<Object> collector, @Nullable Project project);
@NlsSafe
static String getTopHitAccelerator() {
return "/";
}
}
|
nasa/gunns
|
sims/SIM_mass_overflow/RUN_test/input.py
|
# @copyright Copyright 2019 United States Government as represented by the Administrator of the
# National Aeronautics and Space Administration. All Rights Reserved. */
#
#trick setup
trick.sim_services.exec_set_trap_sigfpe(1)
#simControlPanel = trick.SimControlPanel()
#trick.add_external_application(simControlPanel)
#trickView = trick.TrickView()
#trick.add_external_application(trickView)
#trick.real_time_enable()
trick.sim_services.exec_set_terminate_time(12)
trick.exec_set_software_frame(0.0125)
trick.TMM_reduced_checkpoint(False)
trick_mm.mm.set_expanded_arrays(True)
trick_sys.sched.set_enable_freeze(True)
#trick_sys.sched.set_freeze_command(True)
#---------------------------------------------
# Initial setup
#---------------------------------------------
# Override fluid14err volumes and pressures to be small to create a loop where all nodes are
# overflowing. Circulation is created by turning off the potential sources of all but the pot01
# link.
massOverflow.fluid14err.netInput.vol0.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.vol1.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.vol2.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.vol3.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.fluid0.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid1.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid2.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid3.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid0.mTemperature = 294.261
massOverflow.fluid14err.netInput.fluid1.mTemperature = 294.261
massOverflow.fluid14err.netInput.fluid2.mTemperature = 294.261
massOverflow.fluid14err.netInput.fluid3.mTemperature = 294.261
massOverflow.fluid14err.netInput.pot02.mSourcePressure = 0.0
massOverflow.fluid14err.netInput.pot13.mSourcePressure = 0.0
massOverflow.fluid14err.netInput.pot23.mSourcePressure = 0.0
massOverflow.fluid14err.netConfig.pot01.mMaxConductivity = 0.001
massOverflow.fluid14err.netConfig.pot02.mMaxConductivity = 0.001
massOverflow.fluid14err.netConfig.pot13.mMaxConductivity = 0.001
massOverflow.fluid14err.netConfig.pot23.mMaxConductivity = 0.001
massOverflow.fluid12err.netInput.vol0.mInitialVolume = 1.0
massOverflow.fluid12err.netInput.vol1.mInitialVolume = 1.0e-6
massOverflow.fluid12err.netInput.vol2.mInitialVolume = 1.0e-4
massOverflow.fluid12err.netInput.fluid0.mPressure = 100.0
massOverflow.fluid12err.netInput.fluid1.mPressure = 100.0
massOverflow.fluid12err.netInput.fluid2.mPressure = 100.0
massOverflow.fluid12err.netInput.fluid0.mTemperature = 275.0
massOverflow.fluid12err.netInput.fluid1.mTemperature = 275.0
massOverflow.fluid12err.netInput.fluid2.mTemperature = 275.0
massOverflow.fluid35.netConfig.msorb13des.addCompound(trick.ChemicalCompound.CO2,0.1,0.85,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,True,0.0,0.1,1.0)
massOverflow.fluid35.netConfig.msorb13ad.addCompound(trick.ChemicalCompound.H2O,0.11,0.75,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,False,1.0,0.0,1.0)
massOverflow.fluid48.netConfig.msorb45de.addCompound(trick.ChemicalCompound.CO2,0.1,0.85,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,True,0.0,0.1,1.0)
massOverflow.fluid48.netConfig.msorb12ad.addCompound(trick.ChemicalCompound.H2O,0.11,0.75,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,False,1.0,0.0,1.0)
massOverflow.fluid37.sub14.setHeatBalance(-10000.0)
# Configure sorbant segments in the GunnsFluidCdraAdsorber links
massOverflow.fluid41.cdra13.addSegment(0, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(1, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(2, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(3, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(4, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(5, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(6, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(7, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(8, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(9, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(0, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(1, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(2, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(3, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(4, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(5, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(6, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(7, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(8, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(9, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
# Override fluid45over node 1 volume to be small so it will overflow.
massOverflow.fluid45over.netInput.vol1.mInitialVolume = 1.0e-4
#---------------------------------------------
# T-0 setup (block all flows)
#---------------------------------------------
massOverflow.fluid.netInput.cond01.mMalfBlockageFlag = True
massOverflow.fluid.netInput.cond13.mMalfBlockageFlag = True
massOverflow.fluid.netInput.cond02.mMalfBlockageFlag = True
massOverflow.fluid.netInput.cond23.mMalfBlockageFlag = True
massOverflow.fluid37.netInput.sub14.mMalfBlockageFlag = True
massOverflow.fluid37.netInput.sub14.mMalfBlockageValue = 1.0
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.chk01.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.chk01.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.chk02.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.chk02.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.htch13.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.htch13.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.htch23.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.htch23.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc01.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc01.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc02.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc02.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs13.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs13.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs23.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs23.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.liqSource.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.liqSource.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.gasSource.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.gasSource.mMalfBlockageValue = 1.0""" )
#---------------------------------------------
# T+1 events (start all flows)
#---------------------------------------------
trick.add_read(1.0, """massOverflow.fluid.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid.cond13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid2.source13.setFlowDemand(0.1)""" )
trick.add_read(1.0, """massOverflow.fluid3.vlv01.setPosition(1.0)""" )
trick.add_read(1.0, """massOverflow.fluid3.vlv02.setPosition(1.0)""" )
trick.add_read(1.0, """massOverflow.fluid3.pipe13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid3.pipe23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.hx01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.hx02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.sensor13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.sensor23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid5.qd01.setState(trick.GunnsFluidSimpleQd.CONNECTED)""" )
trick.add_read(1.0, """massOverflow.fluid5.qd02.setState(trick.GunnsFluidSimpleQd.CONNECTED)""" )
trick.add_read(1.0, """massOverflow.fluid5.leak13.setMalfLeakHole(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid5.leak23.setMalfLeakHole(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv1.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv1.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv2.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv2.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv3.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv3.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv1.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv1.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv2.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv2.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid8.chk01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid8.chk02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid8.chk01.setMalfFailTo(True,1.0)""" )
#trick.add_read(1.0, """massOverflow.fluid8.htch13.mMalfBlockageFlag = False""" )
#trick.add_read(1.0, """massOverflow.fluid8.htch23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxc01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxc02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxs13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxs23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid10.pchg02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid10.pchg01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.prv01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.prv02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.ls13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.ls23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12.htch01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12.htch12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12err.htch01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12err.htch12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid13.reg01.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid13.reg02.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid13.rel13.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid13.rel23.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid14.pot01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14.pot02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14.pot13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14.pot23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid15.vlv32.setPosition(1.0)""" )
trick.add_read(1.0, """massOverflow.fluid16.src10.setFlowDemand(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid17.pot20.setSourcePressure(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid17.tank1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O, True, 200.0, 10.0)""" )
trick.add_read(1.0, """massOverflow.fluid18.pot20.setSourcePressure(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid18.bln1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O, True, 200.0, 10.0)""" )
trick.add_read(1.0, """massOverflow.fluid19.srck01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid19.srck13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid19.turb02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid19.turb23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfor01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfor02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfval13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfval23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.jump01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.sock02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.jump13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.sock23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.gfan01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.gfan12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.lcp34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.lcp45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid23.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid23.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.pot30.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.sorb13des.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.sorb23des.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.evap14.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.pot20.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.cond34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.react45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.hreact45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.hreact12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.react12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.met0.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.netInput.met0.mNNominal = 2.0""")
trick.add_read(1.0, """massOverflow.fluid29.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid29.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid29.heat10.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid29.heat21.mMalfBlockageFlag = False""")
trick.add_read(1.0, """massOverflow.fluid30.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid30.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid30.v2met1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid30.netInput.v2met1.mNNominal = 2.0""")
trick.add_read(1.0, """massOverflow.fluid31.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid31.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid31.fire1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gsep13for.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gsep13back.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gpump23for.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gpump23back.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.liqSource.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.gasSource.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid34.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid34.lpump12for.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid34.lpump12back.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13ad.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13des.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13des.mCompounds[0].mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13des.mCompounds[0].mMalfEfficiencyValue = 1.0""" )
trick.add_read(1.0, """massOverflow.fluid36.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.cond34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.cond45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.pchg14.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid37.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid37.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid37.sub14.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.sbound1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.sbound2.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid39.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid39.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid39.reactor.setCurrent(10.0)""" )
trick.add_read(1.0, """massOverflow.fluid40.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid40.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid40.cond56.mMalfBlockageFlag = False""" )
#trick.add_read(0.9, """massOverflow.fluid40.rca12.mDesorbFlag = False """)
trick.add_read(1.0, """massOverflow.fluid40.rca45.mCompounds[0].mAdsorbedMass = 0.02""" )
trick.add_read(1.0, """massOverflow.fluid40.rca45.mCompounds[1].mAdsorbedMass = 0.01""" )
trick.add_read(1.0, """massOverflow.fluid41.src01.setFlowDemand(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid41.src02.setFlowDemand(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid42.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid42.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid42.lsep1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.v4meta1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.v4meta1.mRespiration.mBreathsPerMinute = 12.0""" )
trick.add_read(1.0, """massOverflow.fluid44.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid44.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid44.Dhtc13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid44.Dhtc23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45.pot20.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45over.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45over.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45over.pot20.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid46.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.cond56.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.sorb45de.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.sorb45de.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.sorb45de.mMalfBlockageValue = 0.0""" )
trick.add_read(1.0, """massOverflow.fluid47.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid47.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.cond56.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.hsorb45de.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.hsorb45de.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.hsorb45de.mMalfBlockageValue = 0.0""" )
trick.add_read(1.0, """massOverflow.fluid48.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid48.src01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.src01.mMalfBlockageValue = 0.0""" )
trick.add_read(1.0, """massOverflow.fluid48.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.cond56.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mCompounds[0].mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mCompounds[0].mMalfEfficiencyValue = 1.0""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mMalfBlockageValue = 0.0""" )
#---------------------------------------------
# T+5 events (mid-test changes)
#---------------------------------------------
trick.add_read(5.0, """massOverflow.fluid15.vlv32.setPosition(0.0)""" )
trick.add_read(6.0, """massOverflow.fluid15.vlv24.setPosition(1.0)""" )
#---------------------------------------------
# T+10 events (stop some flows to allow settling before comparison)
#---------------------------------------------
trick.add_read(10.0, """massOverflow.fluid15.vlv24.setPosition(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid16.src10.setFlowDemand(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid17.pot20.setSourcePressure(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid17.tank1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O)""" )
trick.add_read(10.0, """massOverflow.fluid18.pot20.setSourcePressure(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid18.bln1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O)""" )
trick.add_read(10.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(10.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(10.0, """massOverflow.fluid33.liqSource.mMalfBlockageFlag = True""" )
trick.add_read(10.0, """massOverflow.fluid33.gasSource.mMalfBlockageFlag = True""" )
#---------------------------------------------
# Setup Data Logging
#---------------------------------------------
execfile("Log_setup/Log_setup.py")
log_setup(0.1)
#---------------------------------------------
# Call integration tests
#---------------------------------------------
trick_utest.unit_tests.enable()
trick_utest.unit_tests.set_file_name( "RUN_test/results/SIM_mass_overflow_int_test_results.xml" )
execfile("int_tests/SimTestSuite.py")
|
Kaleem2255/Cloud-Gaming-Windows-Sample
|
deps.win64/include/gpa/utility/range-spec-parser.h
|
/******************************************************************************
Copyright 2019 Intel Corporation.
This software and the related documents are Intel copyrighted materials,
and your use of them is governed by the express license under which they
were provided to you ("License"). Unless the License provides otherwise,
you may not use, modify, copy, publish, distribute, disclose or transmit
this software or the related documents without Intel's prior written
permission.
This software and the related documents are provided as is, with no express
or implied warranties, other than those that are expressly stated in the
License.
******************************************************************************/
#pragma once
#include "igpa-config.h"
namespace gpa {
namespace utility {
class RangeSpec;
/**
* @brief The RangeSpecParser class provides a convenient means to parse range specifications
* described in string data. @see RangeSpecParser::RangeSpecParser.
*/
class RangeSpecParser
{
public:
/**
* @brief RangeSpecParser constructor.
* @param string String containing range specification(s) to parse.
* @details Frame numbers begin at 1. All values must be positive.
Interval capture can be specified using standard 'range'
notation, where ( and ) indicate 'open' range endpoints,
and [ and ] indicate 'closed' range endpoints. Ranges may
include an optional step value, appended to the end of the
range, for example: (10..100]:10, to indicate every tenth
frame in the range 10 to 100 (not including 10). The default
step value is 1. Individual frames are specified as integers
separated by semicolons, for example: 1;10;100. Individual
frames and range specifications can be mixed in any order,
for example: 1;(50..70]:5;10;100.
*/
RangeSpecParser(TCHAR const* string);
~RangeSpecParser();
/**
* @brief Parse the next range (if any) into @a destSpec.
* @param destSpec Pointer to valid RangeSpec instance. If this pointer is null or otherwise
* invalid, behavior is undefined.
* @return True if (a) next range exists, and (b) could be parsed; false otherwise.
*/
bool ParseNextRange(RangeSpec* destSpec);
private:
TCHAR const* mCurrent;
};
} // namespace utility
} // namespace gpa
|
tierklinik-dobersberg/userhub
|
internal/api/identityapi/manage_permissions.go
|
<reponame>tierklinik-dobersberg/userhub<filename>internal/api/identityapi/manage_permissions.go
package identityapi
import (
"context"
"net/http"
"github.com/labstack/echo/v4"
"github.com/tierklinik-dobersberg/cis/internal/app"
"github.com/tierklinik-dobersberg/cis/internal/identity"
"github.com/tierklinik-dobersberg/cis/internal/permission"
"github.com/tierklinik-dobersberg/cis/pkg/httperr"
"github.com/tierklinik-dobersberg/cis/pkg/models/identity/v1alpha"
)
type Action struct {
Scope string `json:"scope"`
Description string `json:"description"`
ValidatesResourcePath bool `json:"validatesResourcePath"`
}
type ListActionsResult struct {
Actions []Action `json:"actions"`
}
func ListActionsEndpoint(r *app.Router) {
r.GET(
"v1/actions",
permission.OneOf{ManageUserAction},
func(ctx context.Context, app *app.App, c echo.Context) error {
allActions := permission.AllActions()
var result ListActionsResult
for _, action := range allActions {
result.Actions = append(result.Actions, Action{
Scope: action.Name,
Description: action.Description,
ValidatesResourcePath: action.ResourceName != nil,
})
}
return c.JSON(http.StatusOK, result)
},
)
}
func CreatePermissionEndpoint(r *app.Router) {
r.POST(
"v1/permissions/:scope/:owner",
permission.OneOf{ManageUserAction},
func(ctx context.Context, app *app.App, c echo.Context) error {
manager, err := getManager(app)
if err != nil {
return err
}
scope := c.Param("scope")
owner := c.Param("owner")
var req v1alpha.Permission
if err := c.Bind(&req); err != nil {
return httperr.BadRequest().SetInternal(err)
}
permID, err := manager.CreatePermission(ctx, scope, owner, identity.Permission{
Permission: req,
})
if err != nil {
return err
}
return c.JSON(http.StatusOK, echo.Map{
"id": permID,
})
},
)
}
func DeletePermissionEndpoint(r *app.Router) {
r.DELETE(
"v1/permissions/:scope/:owner/:id",
permission.OneOf{ManageUserAction},
func(ctx context.Context, app *app.App, c echo.Context) error {
manager, err := getManager(app)
if err != nil {
return err
}
scope := c.Param("scope")
owner := c.Param("owner")
permID := c.Param("id")
if err := manager.DeletePermission(ctx, scope, owner, permID); err != nil {
return err
}
return c.NoContent(http.StatusNoContent)
},
)
}
|
RichardLitt/go-filecoin
|
tools/faucet/main.go
|
package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net/http"
"time"
"gx/ipfs/QmR8BauakNcBa3RbE4nbQu76PDiJgoQgz8AJdhJuiU4TAw/go-cid"
logging "gx/ipfs/QmcuXC5cxs79ro2cUuHs4HQ2bkDLJUYokwL8aivcX6HW3C/go-log"
"github.com/filecoin-project/go-filecoin/address"
"github.com/filecoin-project/go-filecoin/tools/faucet/limiter"
)
var log = logging.Logger("faucet")
// Tick interval to cleanup wallet addrs that have passed the expiry time
var limiterCleanTick = time.Minute * 15
// Default timeout between wallet fund requests
var defaultLimiterExpiry = time.Hour * 24
func init() {
// Info level
logging.SetAllLoggers(4)
}
type timeImpl struct{}
// Until returns the time.Duration until time.Time t
func (mt *timeImpl) Until(t time.Time) time.Duration {
return time.Until(t)
}
func main() {
filapi := flag.String("fil-api", "localhost:3453", "set the api address of the filecoin node to use")
filwal := flag.String("fil-wallet", "", "(required) set the wallet address for the controlled filecoin node to send funds from")
expiry := flag.Duration("limiter-expiry", defaultLimiterExpiry, "minimum time duration between faucet request to the same wallet addr")
faucetval := flag.Int64("faucet-val", 500, "set the amount of fil to pay to each requester")
flag.Parse()
if *filwal == "" {
fmt.Println("ERROR: must provide wallet address to send funds from")
flag.Usage()
return
}
addrLimiter := limiter.NewLimiter(&timeImpl{})
// Clean the limiter every limiterCleanTick
go func() {
c := time.Tick(limiterCleanTick)
for range c {
addrLimiter.Clean()
}
}()
http.HandleFunc("/", displayForm)
http.HandleFunc("/tap", func(w http.ResponseWriter, r *http.Request) {
target := r.FormValue("target")
if target == "" {
http.Error(w, "must specify a target address to send FIL to", 400)
return
}
log.Infof("Request to send funds to: %s", target)
addr, err := address.NewFromString(target)
if err != nil {
log.Errorf("failed to parse target address: %s %s", target, err)
http.Error(w, fmt.Sprintf("Failed to parse target address %s %s", target, err.Error()), 400)
return
}
if readyIn, ok := addrLimiter.Ready(target); !ok {
log.Errorf("limit hit for target address %s", target)
w.Header().Add("Retry-After", fmt.Sprintf("%d", int64(readyIn/time.Second)))
http.Error(w, fmt.Sprintf("Too Many Requests, please wait %s", readyIn), http.StatusTooManyRequests)
return
}
reqStr := fmt.Sprintf("http://%s/api/message/send?arg=%s&value=%d&from=%s&price=0&limit=0", *filapi, addr.String(), *faucetval, *filwal)
log.Infof("Request URL: %s", reqStr)
resp, err := http.Post(reqStr, "application/json", nil)
if err != nil {
log.Errorf("failed to Post request. Status: %s Error: %s", resp.Status, err)
http.Error(w, err.Error(), 500)
return
}
out, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Errorf("failed to read response body: %s", err)
http.Error(w, "failed to read response", 500)
return
}
if resp.StatusCode != 200 {
log.Errorf("status: %s body: %s", resp.Status, string(out))
http.Error(w, "failed to send funds", 500)
return
}
msgResp := struct{ Cid cid.Cid }{}
// result should be a message cid
if err := json.Unmarshal(out, &msgResp); err != nil {
log.Errorf("json unmarshal from response failed: %s", err)
log.Errorf("response data was: %s", out)
http.Error(w, "faucet unmarshal failed", 500)
return
}
msgcid := msgResp.Cid
addrLimiter.Add(target, time.Now().Add(*expiry))
log.Info("Request successful. Message CID: %s", msgcid.String())
w.Header().Add("Message-Cid", msgcid.String())
w.WriteHeader(200)
fmt.Fprint(w, "Success! Message CID: ") // nolint: errcheck
fmt.Fprintln(w, msgcid.String()) // nolint: errcheck
})
panic(http.ListenAndServe(":9797", nil))
}
const form = `
<html>
<body>
<h1> What is your wallet address </h1>
<p> You can find this by running: </p>
<tt> go-filecoin wallet addrs ls </tt>
<p> Address: </p>
<form action="/tap" method="post">
<input type="text" name="target" size="30" />
<input type="submit" value="Submit" size="30" />
</form>
</body>
</html>
`
func displayForm(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, form) // nolint: errcheck
}
|
Fusion-Rom/android_external_chromium_org_third_party_WebKit
|
Source/core/animation/animatable/AnimatableValueKeyframe.h
|
<filename>Source/core/animation/animatable/AnimatableValueKeyframe.h
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef AnimatableValueKeyframe_h
#define AnimatableValueKeyframe_h
#include "core/animation/Keyframe.h"
#include "core/animation/animatable/AnimatableValue.h"
namespace blink {
class AnimatableValueKeyframe : public Keyframe {
public:
static PassRefPtrWillBeRawPtr<AnimatableValueKeyframe> create()
{
return adoptRefWillBeNoop(new AnimatableValueKeyframe);
}
void setPropertyValue(CSSPropertyID property, PassRefPtrWillBeRawPtr<AnimatableValue> value)
{
m_propertyValues.add(property, value);
}
void clearPropertyValue(CSSPropertyID property) { m_propertyValues.remove(property); }
AnimatableValue* propertyValue(CSSPropertyID property) const
{
ASSERT(m_propertyValues.contains(property));
return m_propertyValues.get(property);
}
virtual PropertySet properties() const OVERRIDE;
virtual void trace(Visitor*) OVERRIDE;
class PropertySpecificKeyframe : public Keyframe::PropertySpecificKeyframe {
public:
PropertySpecificKeyframe(double offset, PassRefPtr<TimingFunction> easing, const AnimatableValue*, AnimationEffect::CompositeOperation);
AnimatableValue* value() const { return m_value.get(); }
virtual const PassRefPtrWillBeRawPtr<AnimatableValue> getAnimatableValue() const OVERRIDE FINAL { return m_value; }
virtual PassOwnPtrWillBeRawPtr<Keyframe::PropertySpecificKeyframe> neutralKeyframe(double offset, PassRefPtr<TimingFunction> easing) const OVERRIDE FINAL;
virtual PassRefPtrWillBeRawPtr<Interpolation> createInterpolation(CSSPropertyID, blink::Keyframe::PropertySpecificKeyframe* end, Element*) const OVERRIDE FINAL;
virtual void trace(Visitor*) OVERRIDE;
private:
PropertySpecificKeyframe(double offset, PassRefPtr<TimingFunction> easing, PassRefPtrWillBeRawPtr<AnimatableValue>);
virtual PassOwnPtrWillBeRawPtr<Keyframe::PropertySpecificKeyframe> cloneWithOffset(double offset) const OVERRIDE;
virtual bool isAnimatableValuePropertySpecificKeyframe() const OVERRIDE { return true; }
RefPtrWillBeMember<AnimatableValue> m_value;
};
private:
AnimatableValueKeyframe() { }
AnimatableValueKeyframe(const AnimatableValueKeyframe& copyFrom);
virtual PassRefPtrWillBeRawPtr<Keyframe> clone() const OVERRIDE;
virtual PassOwnPtrWillBeRawPtr<Keyframe::PropertySpecificKeyframe> createPropertySpecificKeyframe(CSSPropertyID) const OVERRIDE;
virtual bool isAnimatableValueKeyframe() const OVERRIDE { return true; }
typedef WillBeHeapHashMap<CSSPropertyID, RefPtrWillBeMember<AnimatableValue> > PropertyValueMap;
PropertyValueMap m_propertyValues;
};
typedef AnimatableValueKeyframe::PropertySpecificKeyframe AnimatableValuePropertySpecificKeyframe;
DEFINE_TYPE_CASTS(AnimatableValueKeyframe, Keyframe, value, value->isAnimatableValueKeyframe(), value.isAnimatableValueKeyframe());
DEFINE_TYPE_CASTS(AnimatableValuePropertySpecificKeyframe, Keyframe::PropertySpecificKeyframe, value, value->isAnimatableValuePropertySpecificKeyframe(), value.isAnimatableValuePropertySpecificKeyframe());
}
#endif
|
jemiah-labs/SKRLS-COBOL
|
cobol85parser/src/main/java/io/proleap/cobol/asg/metamodel/procedure/read/impl/ReadStatementImpl.java
|
<reponame>jemiah-labs/SKRLS-COBOL<gh_stars>0
/*
* Copyright (C) 2017, <NAME> <<EMAIL>>
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
package io.proleap.cobol.asg.metamodel.procedure.read.impl;
import org.jemiahlabs.skrls.core.Producer;
import io.proleap.cobol.Cobol85Parser.ReadIntoContext;
import io.proleap.cobol.Cobol85Parser.ReadKeyContext;
import io.proleap.cobol.Cobol85Parser.ReadStatementContext;
import io.proleap.cobol.Cobol85Parser.ReadWithContext;
import io.proleap.cobol.asg.metamodel.ProgramUnit;
import io.proleap.cobol.asg.metamodel.Scope;
import io.proleap.cobol.asg.metamodel.call.Call;
import io.proleap.cobol.asg.metamodel.procedure.AtEndPhrase;
import io.proleap.cobol.asg.metamodel.procedure.InvalidKeyPhrase;
import io.proleap.cobol.asg.metamodel.procedure.NotAtEndPhrase;
import io.proleap.cobol.asg.metamodel.procedure.NotInvalidKeyPhrase;
import io.proleap.cobol.asg.metamodel.procedure.StatementType;
import io.proleap.cobol.asg.metamodel.procedure.StatementTypeEnum;
import io.proleap.cobol.asg.metamodel.procedure.impl.StatementImpl;
import io.proleap.cobol.asg.metamodel.procedure.read.Into;
import io.proleap.cobol.asg.metamodel.procedure.read.Key;
import io.proleap.cobol.asg.metamodel.procedure.read.ReadStatement;
import io.proleap.cobol.asg.metamodel.procedure.read.With;
public class ReadStatementImpl extends StatementImpl implements ReadStatement {
protected AtEndPhrase atEnd;
protected final ReadStatementContext ctx;
protected Call fileCall;
protected Into into;
protected InvalidKeyPhrase invalidKeyPhrase;
protected Key key;
protected boolean nextRecord;
protected NotAtEndPhrase notAtEndPhrase;
protected NotInvalidKeyPhrase notInvalidKeyPhrase;
protected final StatementType statementType = StatementTypeEnum.READ;
protected With with;
private final Producer producer;
public ReadStatementImpl(final ProgramUnit programUnit, final Scope scope, final ReadStatementContext ctx, final Producer producer) {
super(programUnit, scope, ctx, producer);
this.producer = producer;
this.ctx = ctx;
}
@Override
public Into addInto(final ReadIntoContext ctx) {
Into result = (Into) getASGElement(ctx);
if (result == null) {
result = new IntoImpl(programUnit, ctx, producer);
if (ctx.identifier() != null) {
final Call intoCall = createCall(ctx.identifier());
result.setIntoCall(intoCall);
}
into = result;
registerASGElement(result);
}
return result;
}
@Override
public Key addKey(final ReadKeyContext ctx) {
Key result = (Key) getASGElement(ctx);
if (result == null) {
result = new KeyImpl(programUnit, ctx, producer);
final Call keyCall = createCall(ctx.qualifiedDataName());
result.setKeyCall(keyCall);
key = result;
registerASGElement(result);
}
return result;
}
@Override
public With addWith(final ReadWithContext ctx) {
With result = (With) getASGElement(ctx);
if (result == null) {
result = new WithImpl(programUnit, ctx, producer);
// type
final With.WithType type;
if (ctx.KEPT() != null) {
type = With.WithType.KEPT_LOCK;
} else if (ctx.NO() != null) {
type = With.WithType.NO_LOCK;
} else if (ctx.WAIT() != null) {
type = With.WithType.WAIT;
} else {
type = null;
}
result.setWithType(type);
with = result;
registerASGElement(result);
}
return result;
}
@Override
public AtEndPhrase getAtEnd() {
return atEnd;
}
@Override
public Call getFileCall() {
return fileCall;
}
@Override
public Into getInto() {
return into;
}
@Override
public InvalidKeyPhrase getInvalidKeyPhrase() {
return invalidKeyPhrase;
}
@Override
public Key getKey() {
return key;
}
@Override
public NotAtEndPhrase getNotAtEndPhrase() {
return notAtEndPhrase;
}
@Override
public NotInvalidKeyPhrase getNotInvalidKeyPhrase() {
return notInvalidKeyPhrase;
}
@Override
public StatementType getStatementType() {
return statementType;
}
@Override
public With getWith() {
return with;
}
@Override
public boolean isNextRecord() {
return nextRecord;
}
@Override
public void setAtEnd(final AtEndPhrase atEnd) {
this.atEnd = atEnd;
}
@Override
public void setFileCall(final Call fileCall) {
this.fileCall = fileCall;
}
@Override
public void setInvalidKeyPhrase(final InvalidKeyPhrase invalidKeyPhrase) {
this.invalidKeyPhrase = invalidKeyPhrase;
}
@Override
public void setNextRecord(final boolean nextRecord) {
this.nextRecord = nextRecord;
}
@Override
public void setNotAtEndPhrase(final NotAtEndPhrase notAtEndPhrase) {
this.notAtEndPhrase = notAtEndPhrase;
}
@Override
public void setNotInvalidKeyPhrase(final NotInvalidKeyPhrase notInvalidKeyPhrase) {
this.notInvalidKeyPhrase = notInvalidKeyPhrase;
}
}
|
wzx54321/LockDemo
|
aliocrlib/src/main/java/com/lib/aliocr/bean/ReqInput.java
|
<gh_stars>100-1000
package com.lib.aliocr.bean;
import java.io.Serializable;
/**
* 作者:xin on 2018/7/9 0009 15:03
* <p>
* 邮箱:<EMAIL>
* <P>
* https://github.com/wzx54321/XinFrameworkLib
*/
public class ReqInput implements Serializable {
private static final long serialVersionUID = 7540344538333996814L;
private String image;// 图片的 base64
private String configure;// "{\"side\":\"face\"}" 身份证正反面类型:face/back
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
public String getConfigure() {
return configure;
}
public void setConfigure(String configure) {
this.configure = configure;
}
}
|
lagerdata/unit-test_templates
|
cc3235sf/ti_sdk/simplelink_cc32xx_sdk_4_10_00_07/docs/wifi_host_driver_api/html/group___wlan_struct_sl_wlan_rx_filter_pattern_arg__t.js
|
var group___wlan_struct_sl_wlan_rx_filter_pattern_arg__t =
[
[ "Length", "group___wlan.html#aa28b6e5b77cd079809e345d284ce2068", null ],
[ "Offset", "group___wlan.html#a1881fc88f70ed8bdc975d6d055c1c149", null ],
[ "Reserved", "group___wlan.html#a2f1a76867fe7184d36fd5090caec3305", null ],
[ "Value", "group___wlan.html#a23c7bc57a96d5e6c897aceeb742c72c5", null ]
];
|
boostasoft/ecommerce
|
src/main/java/com/commerce/app/web/rest/vm/PaymentRequestVM.java
|
<filename>src/main/java/com/commerce/app/web/rest/vm/PaymentRequestVM.java
package com.commerce.app.web.rest.vm;
import com.adyen.model.BrowserInfo;
import com.adyen.model.checkout.DefaultPaymentMethodDetails;
import javax.validation.constraints.NotNull;
public class PaymentRequestVM {
@NotNull
private DefaultPaymentMethodDetails paymentMethod;
private BrowserInfo browserInfo;
private String origin;
public DefaultPaymentMethodDetails getPaymentMethod() {
return paymentMethod;
}
public PaymentRequestVM setPaymentMethod(final DefaultPaymentMethodDetails paymentMethod) {
this.paymentMethod = paymentMethod;
return this;
}
public BrowserInfo getBrowserInfo() {
return browserInfo;
}
public PaymentRequestVM setBrowserInfo(final BrowserInfo browserInfo) {
this.browserInfo = browserInfo;
return this;
}
public String getOrigin() {
return origin;
}
public PaymentRequestVM setOrigin(final String origin) {
this.origin = origin;
return this;
}
@Override
public String toString() {
return "PaymentRequestDTO{" +
"paymentMethod=" + paymentMethod +
", browserInfo=" + browserInfo +
", origin='" + origin + '\'' +
'}';
}
}
|
lechium/iOS1351Headers
|
System/Library/PrivateFrameworks/SpringBoard.framework/SBSnapshotSlotIdWrapper.h
|
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:23:48 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/PrivateFrameworks/SpringBoard.framework/SpringBoard
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
@class CAContext;
@interface SBSnapshotSlotIdWrapper : NSObject {
unsigned _slotId;
CAContext* _context;
}
@property (nonatomic,readonly) unsigned slotId; //@synthesize slotId=_slotId - In the implementation block
@property (nonatomic,readonly) CAContext * context; //@synthesize context=_context - In the implementation block
-(id)init;
-(void)dealloc;
-(CAContext *)context;
-(unsigned)slotId;
-(id)initWithContext:(id)arg1 slotId:(unsigned)arg2 ;
@end
|
tusharchoudhary0003/Custom-Football-Game
|
sources/p024io/fabric/sdk/android/services/concurrency/C13952m.java
|
package p024io.fabric.sdk.android.services.concurrency;
import java.util.Collection;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import p024io.fabric.sdk.android.services.concurrency.C13940e.C13944d;
/* renamed from: io.fabric.sdk.android.services.concurrency.m */
/* compiled from: PriorityAsyncTask */
public abstract class C13952m<Params, Progress, Result> extends C13940e<Params, Progress, Result> implements C13948h<C13960s>, C13956p, C13960s, C13947g {
/* renamed from: o */
private final C13957q f42305o = new C13957q();
/* renamed from: io.fabric.sdk.android.services.concurrency.m$a */
/* compiled from: PriorityAsyncTask */
private static class C13953a<Result> implements Executor {
/* renamed from: a */
private final Executor f42306a;
/* access modifiers changed from: private */
/* renamed from: b */
public final C13952m f42307b;
public C13953a(Executor ex, C13952m task) {
this.f42306a = ex;
this.f42307b = task;
}
public void execute(Runnable command) {
this.f42306a.execute(new C13951l(this, command, null));
}
}
/* renamed from: a */
public final void mo43406a(ExecutorService exec, Params... params) {
super.mo43372a(new C13953a(exec, this), params);
}
public int compareTo(Object another) {
return C13950k.m44332a(this, another);
}
/* renamed from: a */
public void mo43381a(C13960s task) {
if (mo43375d() == C13944d.PENDING) {
((C13948h) ((C13956p) mo43410g())).mo43381a(task);
return;
}
throw new IllegalStateException("Must not add Dependency after task is running");
}
/* renamed from: c */
public Collection<C13960s> mo43383c() {
return ((C13948h) ((C13956p) mo43410g())).mo43383c();
}
/* renamed from: b */
public boolean mo43382b() {
return ((C13948h) ((C13956p) mo43410g())).mo43382b();
}
/* renamed from: a */
public void mo43407a(boolean finished) {
((C13960s) ((C13956p) mo43410g())).mo43407a(finished);
}
/* renamed from: a */
public boolean mo43408a() {
return ((C13960s) ((C13956p) mo43410g())).mo43408a();
}
/* renamed from: a */
public void mo43405a(Throwable throwable) {
((C13960s) ((C13956p) mo43410g())).mo43405a(throwable);
}
/* renamed from: g */
public <T extends C13948h<C13960s> & C13956p & C13960s> T mo43410g() {
return this.f42305o;
}
}
|
cyjake/material
|
icons/VectorCircleVariantIcon.js
|
<filename>icons/VectorCircleVariantIcon.js
import React from 'react'
const DEFAULT_SIZE = 24
export default ({
fill = 'currentColor',
width = DEFAULT_SIZE,
height = DEFAULT_SIZE,
style = {},
...props
}) => (
<svg
viewBox={ `0 0 ${ DEFAULT_SIZE } ${ DEFAULT_SIZE }` }
style={{ fill, width, height, ...style }}
{ ...props }
>
<path d="M22,9H19.97C18.7,5.41 15.31,3 11.5,3C6.53,3 2.5,7.03 2.5,12C2.5,17 6.53,21 11.5,21C15.31,21 18.7,18.6 20,15H22M20,11V13H18V11M17.82,15C16.66,17.44 14.2,19 11.5,19C7.64,19 4.5,15.87 4.5,12C4.5,8.14 7.64,5 11.5,5C14.2,5 16.66,6.57 17.81,9H16V15" />
</svg>
)
|
garethhk/mldong
|
mldong-generator/src/main/java/com/mldong/Generator.java
|
package com.mldong;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.mldong.generator.config.GeneratorConfig;
import com.mldong.generator.config.model.TableConfigModel;
import com.mldong.generator.config.model.TemplateConfigModel;
import com.mldong.generator.core.DataBase;
import com.mldong.generator.core.impl.MysqlDataBase;
import com.mldong.generator.core.model.Table;
import com.mldong.generator.engine.FreeMarkerImpl;
import com.mldong.generator.engine.TemplateEngine;
/**
* 代码生成主函数
* @author mldong
*
*/
public class Generator {
private static final Logger LOGGER = LoggerFactory.getLogger(Generator.class);
public static void main(String[] args) throws FileNotFoundException {
String path = Generator.class.getResource("/").getPath();
String configPath = path+"config.yml";
String dataTypePath = path+ "dataType.yml";
String templateDir = path + "/templates";
GeneratorConfig config = new GeneratorConfig(configPath, dataTypePath);
// 加载配置
config.loadConfig();
DataBase dataBase = new MysqlDataBase(config);
// Gson gson = new GsonBuilder().setPrettyPrinting().create();
Gson gson = new Gson();
TemplateEngine templateEngine = new FreeMarkerImpl(templateDir);
// 要生成的表
List<TableConfigModel> tableConfigModelList = Arrays.asList(config.getConfigModel().getTables());
// 模板集合
List<TemplateConfigModel> templateConfigModelList = Arrays.asList(config.getConfigModel().getTemplates());
tableConfigModelList.forEach(tableConfigModel -> {
// 获取表
List<Table> tableList = dataBase.getTables(tableConfigModel.getTableName());
tableList.forEach(table -> {
LOGGER.info("元数据:{}",gson.toJson(table));
Map<String, Object> model = new HashMap<String, Object>();
String targetProject = config.getConfigModel().getTargetProject();
model.put("targetProject", targetProject);
model.put("basePackage", config.getConfigModel().getBasePackage());
model.put("moduleName", config.getConfigModel().getModuleName());
model.put("moduleDesc", config.getConfigModel().getModuleDesc());
model.put("logicDelete", config.getConfigModel().isLogicDelete());
model.put("table", table);
templateConfigModelList.forEach(templateConfigModel -> {
// 选中的才能生成代码
if(templateConfigModel.isSelected()) {
if(!templateConfigModel.getTargetPath().contains(targetProject)){
templateConfigModel.setTargetPath(targetProject+File.separator+templateConfigModel.getTargetPath());
}
templateEngine.processToFile(model, templateConfigModel);
}
});
});
});
}
}
|
zemo/naali
|
Interfaces/RenderServiceInterface.h
|
// For conditions of distribution and use, see copyright notice in license.txt
#ifndef incl_Interfaces_RenderServiceInterface_h
#define incl_Interfaces_RenderServiceInterface_h
#include "IService.h"
#include "ResourceInterface.h"
#include "LogListenerInterface.h"
#include "CoreModuleApi.h"
#include "Vector3D.h"
#include "Quaternion.h"
#include <QObject>
#include <QVariant>
#include <set>
class QRect;
namespace Scene
{
class Entity;
}
//! Result of a raycast. Other fields are valid only if entity_ is non-null
class RaycastResult : public QObject
{
Q_OBJECT
public:
Q_PROPERTY(Scene::Entity* entity READ getentity);
Scene::Entity* getentity() const { return entity_; }
Q_PROPERTY(Vector3df pos READ getpos);
Vector3df getpos() const { return pos_; }
Q_PROPERTY(unsigned submesh READ getsubmesh);
unsigned getsubmesh() const { return submesh_; }
Q_PROPERTY(float u READ getu);
float getu() const { return u_; }
Q_PROPERTY(float v READ getv);
float getv() const { return v_; }
//! Entity that was hit, null if none
Scene::Entity* entity_;
//! World coordinates of hit position
Vector3df pos_;
//! Submesh index in entity, starting from 0
unsigned submesh_;
//! U coord in entity. 0 if no texture mapping
float u_;
//! V coord in entity. 0 if no texture mapping
float v_;
};
namespace Foundation
{
//! Render service interface.
/*!
\ingroup Services_group
Manages the rendering window, handles scene rendering, and manages renderer related resources.
Implemented by the \ref OgreRenderingModule.
*/
class MODULE_API RenderServiceInterface : public IService
{
public:
/// Default constructor
RenderServiceInterface() {}
/// Destructor.
virtual ~RenderServiceInterface() {}
//! Renders the scene
virtual void Render() = 0;
//! Do raycast into the world from viewport coordinates.
/*! The coordinates are a position in the render window, not scaled to [0,1].
\param x Horizontal position for the origin of the ray
\param y Vertical position for the origin of the ray
\return Raycast result structure
*/
virtual RaycastResult* Raycast(int x, int y) = 0;
//! Do a frustum query to the world from viewport coordinates.
/*! Returns the found entities as a QVariantList so that
Python and Javascript can get the result directly from here.
\param viewrect The query rectangle in 2d window coords.
*/
virtual QVariantList FrustumQuery(QRect &viewrect) = 0;
/// Returns the backbuffer image that contains the UI layer of the application screen.
/// Used to perform alpha-keying based input.
// virtual QImage &GetBackBuffer() = 0;
//! Returns render window width, or 0 if no window is opened
virtual int GetWindowWidth() const = 0;
//! Returns render window height, or 0 if no window is opened
virtual int GetWindowHeight() const = 0;
//! subscribe a listener to renderer log
virtual void SubscribeLogListener(const LogListenerPtr &listener) = 0;
//! unsubsribe a listener to renderer log
virtual void UnsubscribeLogListener(const LogListenerPtr &listener) = 0;
//! set maximum view distance
virtual void SetViewDistance(float distance) = 0;
//! get maximum view distance
virtual float GetViewDistance() const = 0;
//! force UI repaint
virtual void RepaintUi() = 0;
//! get visible entities last frame
virtual const std::set<entity_id_t>& GetVisibleEntities() = 0;
//! take sceenshot to a location
//! \param filePath File path.
//! \param fileName File name.
virtual void TakeScreenshot(const std::string& filePath, const std::string& fileName) = 0;
//! Render current main window content to texture
virtual QPixmap RenderImage(bool use_main_camera = true) = 0;
//! Render current main window with focus on the avatar
//! @todo make this focus non hard coded but as param
virtual QPixmap RenderAvatar(const Vector3df &avatar_position, const Quaternion &avatar_orientation) = 0;
//! Gets a renderer-specific resource
/*! Does not automatically queue a download request
\param id Resource id
\param type Resource type
\return pointer to resource, or null if not found
*/
virtual ResourcePtr GetResource(const std::string& id, const std::string& type) = 0;
//! Requests a renderer-specific resource to be downloaded from the asset system
/*! A RESOURCE_READY event will be sent when the resource is ready to use
\param id Resource id
\param type Resource type
\return Request tag, or 0 if request could not be queued
*/
virtual request_tag_t RequestResource(const std::string& id, const std::string& type) = 0;
//! Removes a renderer-specific resource
/*! \param id Resource id
\param type Resource type
*/
virtual void RemoveResource(const std::string& id, const std::string& type) = 0;
};
}
#endif
|
edisonhello/dbux
|
samples/src/testutil/runSample.js
|
import dbuxRunFile from 'dbux-cli/src/dbuxRunFile';
import path from 'path';
const srcFolder = path.join(__dirname, '..');
const samplesFolder = path.join(srcFolder, '..');
const inputFolder = path.join(samplesFolder, '__samplesInput__');
export default function runSample(name, performTests) {
test(name, () => {
const fpath = path.join(inputFolder, name);
dbuxRunFile(fpath);
// NOTE: dbux runtime is injected as a global variable through `registerDbuxAsGlobal`
const dbuxRuntime = global.__dbux;
performTests(dbuxRuntime);
});
}
|
tszielin/q-lab-editor
|
src/main/java/org/openide/filesystems/Repository.java
|
<gh_stars>1-10
/*
* Sun Public License Notice
*
* The contents of this file are subject to the Sun Public License
* Version 1.0 (the "License"). You may not use this file except in
* compliance with the License. A copy of the License is available at
* http://www.sun.com/
*
* The Original Code is NetBeans. The Initial Developer of the Original
* Code is Sun Microsystems, Inc. Portions Copyright 1997-2003 Sun
* Microsystems, Inc. All Rights Reserved.
*/
package org.openide.filesystems;
import java.beans.*;
import java.io.*;
import java.util.*;
import org.openide.util.NbBundle;
import org.openide.util.io.NbMarshalledObject;
/** This singleton object contains all {@link FileSystem}s in the IDE.
* It corresponds to the <b>Filesystems</b> tab in the Explorer, or more precisely
* to <b>Filesystems Settings</b> in Project Settings.
* <P>
* At any given time, no two filesystems in the pool may share the same {@link FileSystem#getSystemName name}
* (unless all but one are {@link FileSystem#isValid invalid}).
*
* <p>Use {@link #getDefault} to retrieve the default instance.
*
* <p>Note that you may construct additional instances of the Repository if you
* wish. The serialization replacer of this class specifically deals with the
* default instance as stored by the top manager; however you may safely call
* the {@link #readExternal} and {@link #writeExternal} methods directly to implement
* persistence of a non-default instance.
*
* @author <NAME>, <NAME>
*/
public class Repository extends Object implements java.io.Serializable
{
/** list of filesystems (FileSystem) */
private ArrayList fileSystems;
private transient ArrayList fileSystemsClone;
/** the system filesystem */
private FileSystem system;
/** hashtable that maps system names to FileSystems */
private Hashtable names;
private transient FCLSupport fclSupport;
// [PENDING] access to this hashtable is apparently not propertly synched
// should use e.g. Collections.synchronizedSet, or just synch methods using it
/** hashtable for listeners on changes in the filesystem.
* Its elements are of type (RepositoryListener, RepositoryListener)
*/
private Hashtable listeners = new Hashtable ();
/** vetoable listener on systemName property of filesystem */
private VetoableChangeListener vetoListener = new VetoableChangeListener () {
/** @param ev event with changes */
public void vetoableChange (PropertyChangeEvent ev)
throws PropertyVetoException {
if (ev.getPropertyName ().equals ("systemName")) {
final String ov = (String)ev.getOldValue ();
final String nv = (String)ev.getNewValue ();
if (names.get (nv) != null) {
throw new PropertyVetoException ("system name already exists", ev) { // NOI18N
public String getLocalizedMessage () {
return NbBundle.getMessage (Repository.class, "EXC_duplicate_system_name", ov, nv);
}
};
}
}
}
};
/** property listener on systemName property of filesystem */
private PropertyChangeListener propListener = new PropertyChangeListener () {
/** @param ev event with changes */
public void propertyChange (PropertyChangeEvent ev) {
if (ev.getPropertyName ().equals ("systemName")) {
// assign the property to new name
String ov = (String)ev.getOldValue ();
String nv = (String)ev.getNewValue ();
FileSystem fs = (FileSystem)ev.getSource ();
if (fs.isValid ()) {
// when a filesystem is valid then it is attached to a name
names.remove (ov);
}
// register name of the filesystem
names.put (nv, fs);
// the filesystem becomes valid
fs.setValid (true);
}
}
};
static final long serialVersionUID =-6344768369160069704L;
/** Creates new instance of filesystem pool and
* registers it as the default one. Also registers the default filesystem.
*
* @param def the default filesystem
*/
public Repository (FileSystem def) {
this.system = def;
init ();
}
/** Access method to get default instance of repository in the system.
* The instance is either taken as a result of
* <CODE>org.openide.util.Lookup.getDefault ().lookup (Repository.class)</CODE>
* or (if the lookup query returns null) a default instance is created.
*
* @return default repository for the system
*/
public static Repository getDefault () {
return ExternalUtil.getRepository ();
}
/** Initialazes the pool.
*/
private void init () {
// empties the pool
fileSystems = new ArrayList ();
names = new Hashtable ();
addFileSystem (system);
}
/** Gets the default filesystem of the IDE.
* @return the default filesystem
*/
public final FileSystem getDefaultFileSystem () {
return system;
}
/** Adds new filesystem to the pool.
* <em>Note</em> that a filesystem cannot be assigned to more than one file
* system pool at one time (though currently there is only one pool anyway).
* At any given time, no two filesystems in the pool may share the same {@link FileSystem#getSystemName name}
* (unless all but one are {@link FileSystem#isValid invalid}). To be sure, that
* filesystem was really added in Repository, then test that <code>FileSystem</code>
* is valid.
* @param fs filesystem to add
*/
public final void addFileSystem (FileSystem fs) {
boolean fireIt = false;
synchronized (this) {
// if the filesystem is not assigned yet
if (!fs.assigned && !fileSystems.contains(fs)) {
// new filesystem
fs.setRepository (this);
fileSystems.add(fs);
fileSystemsClone = (ArrayList)fileSystems.clone();
String systemName = fs.getSystemName ();
boolean isReg = names.get (systemName) == null;
if (isReg && !systemName.equals ("")) { // NOI18N
// filesystem with the same name is not there => then it is valid
names.put (systemName, fs);
fs.setValid (true);
} else {
// there is another filesystem with the same name => it is invalid
fs.setValid (false);
}
// mark the filesystem as being assigned
fs.assigned = true;
// mark as a listener on changes in the filesystem
fs.addPropertyChangeListener (propListener);
fs.addVetoableChangeListener (vetoListener);
// notify filesystem itself that it has been added
fs.addNotify();
// fire info about new filesystem
fireIt = true;
}
}
// postponed firing after synchronized block to prevent deadlock
if (fireIt)
fireFileSystem (fs, true);
}
/** Removes a filesystem from the pool.
* @param fs filesystem to remove
*/
public final void removeFileSystem (FileSystem fs) {
boolean fireIt = false;
synchronized (this) {
if (fs.isDefault()) return;
if (fireIt = fileSystems.remove(fs)) {
fs.setRepository (null);
fileSystemsClone = (ArrayList)fileSystems.clone();
// the filesystem realy was here
if (fs.isValid ()) {
// if the filesystem is valid then is in names hashtable
names.remove (fs.getSystemName ());
fs.setValid (false);
}
// in all cases remove it from listeners
fs.removePropertyChangeListener (propListener);
fs.removeVetoableChangeListener (vetoListener);
// notify filesystem itself that it has been removed
fs.removeNotify();
}
// unassign the filesystem
fs.assigned = false;
}
// postponed firing after synchronized block to prevent deadlock
if (fireIt)
fireFileSystem (fs, false);
}
/** Reorders {@link FileSystem}s by given permutation.
* For example, if there are three filesystems, <code>new int[] {2, 0, 1}</code> cycles the filesystems forwards.
* @param perm an array of integers
* @throws IllegalArgumentException if the array is not a permutation, or is not the same length as the current number of filesystems in the pool
*/
public final void reorder(int[] perm) {
synchronized (this) {
if (perm == null) {
throw new IllegalArgumentException ("null permutation"); // NOI18N
} else if (perm.length != fileSystems.size ()) {
throw new IllegalArgumentException ("permutation is wrong size: " + perm.length + " elements but should be " + fileSystems.size ()); // NOI18N
} else if (! isPermutation (perm)) {
StringBuffer message = new StringBuffer ("permutation is not really a permutation:"); // NOI18N
for (int i = 0; i < perm.length; i++) {
message.append (' ');
message.append (perm[i]);
}
throw new IllegalArgumentException (message.toString ());
}
ArrayList newList = new ArrayList (fileSystems.size ());
int len = perm.length;
for (int i = 0; i < len; i++) {
newList.add (fileSystems.get (perm[i]));
}
fileSystems = newList;
fileSystemsClone = (ArrayList)fileSystems.clone();
}
fireFileSystemReordered(perm);
}
/** @return true if the parameter describes a permutation */
private static boolean isPermutation(int[] perm) {
final int len = perm.length;
boolean[] bool = new boolean[len];
try {
for (int i = 0; i < len; i++) {
if (bool[perm[i]]) return false;
else bool[perm[i]] = true;
}
return true;
} catch (IndexOutOfBoundsException e) {
return false;
}
}
/** Returns enumeration of all filesystems.
* @return enumeration of type {@link FileSystem}
*/
public final Enumeration getFileSystems () {
ArrayList tempFileSystems = fileSystemsClone;
return java.util.Collections.enumeration (tempFileSystems);
}
/** Returns enumeration of all filesystems.
* @return enumeration of type {@link FileSystem}
*/
public final Enumeration fileSystems () {
return getFileSystems ();
}
/** Returns a sorted array of filesystems. */
public final FileSystem[] toArray() {
ArrayList tempFileSystems = fileSystemsClone;
FileSystem[] fss = new FileSystem[tempFileSystems.size()];
tempFileSystems.toArray(fss);
return fss;
}
/** Finds filesystem when only its system name is known.
* @param systemName {@link FileSystem#getSystemName name} of the filesystem
* @return the filesystem or <CODE>null</CODE> if there is no such
* filesystem
*/
public final FileSystem findFileSystem (String systemName) {
FileSystem fs = (FileSystem)names.get (systemName);
return fs;
}
/** Saves pool to stream by saving all filesystems.
* The default (system) filesystem, or any persistent filesystems, are skipped.
*
* @param oos object output stream
* @exception IOException if an error occures
* @deprecated Unused.
*/
public final synchronized void writeExternal (ObjectOutput oos) throws IOException {
Iterator iter = fileSystems.iterator();
while (iter.hasNext()) {
FileSystem fs = (FileSystem)iter.next();
if (!fs.isDefault () && !fs.isPersistent ()) {
oos.writeObject (new NbMarshalledObject (fs));
}
}
oos.writeObject (null);
}
/** Reads object from stream.
* Reads all filesystems. Persistent and system filesystems are untouched; all others are removed and possibly reread.
* @param ois object input stream
* @exception IOException if an error occures
* @exception ClassNotFoundException if read class is not found
* @deprecated Unused.
*/
public final synchronized void readExternal (ObjectInput ois)
throws IOException, ClassNotFoundException {
ArrayList temp = new ArrayList(10);
for (;;) {
Object obj = ois.readObject ();
if (obj == null) {
// all system has been read in
break;
}
FileSystem fs;
if (obj instanceof FileSystem) {
fs = (FileSystem)obj;
} else {
try {
NbMarshalledObject mar = (NbMarshalledObject)obj;
fs = (FileSystem)mar.get ();
} catch (IOException ex) {
ExternalUtil.exception (ex);
fs = null;
} catch (ClassNotFoundException ex) {
ExternalUtil.exception (ex);
fs = null;
}
}
if (fs != null) {
// add the new filesystem
temp.add(fs);
}
}
Enumeration ee = getFileSystems();
FileSystem fs;
while (ee.hasMoreElements()) {
fs = (FileSystem) ee.nextElement();
if (!fs.isPersistent ()) {
removeFileSystem (fs);
}
}
// in init assigned is checked and we force 'system' to be added again
system.assigned = false;
init ();
// all is successfuly read
for (Iterator iter = temp.iterator(); iter.hasNext();)
addFileSystem ((FileSystem) iter.next());
}
/** Finds file when its name is provided. It scans in the list of
* filesystems and asks them for the specified file by a call to
* {@link FileSystem#find find}. The first object that is found is returned or <CODE>null</CODE>
* if none of the filesystems contain such a file.
*
* @param aPackage package name where each package is separated by a dot
* @param name name of the file (without dots) or <CODE>null</CODE> if
* one wants to obtain the name of a package and not a file in it
* @param ext extension of the file or <CODE>null</CODE> if one needs
* a package and not a file name
*
* @return {@link FileObject} that represents file with given name or
* <CODE>null</CODE> if the file does not exist
* @deprecated Please use the <a href="@JAVA/API@/org/netbeans/api/java/classpath/api.html">ClassPath API</a> instead.
*/
public final FileObject find (String aPackage, String name, String ext) {
Enumeration en = getFileSystems ();
while (en.hasMoreElements ()) {
FileSystem fs = (FileSystem)en.nextElement ();
FileObject fo = fs.find (aPackage, name, ext);
if (fo != null) {
// object found
return fo;
}
}
return null;
}
/** Searches for the given resource among all filesystems.
* <p><em>Note: Do not use this method for finding classes!
* It is a wrong usage.</em>
* @see FileSystem#findResource
* @param name a name of the resource
* @return file object or <code>null</code> if the resource can not be found
*/
public final FileObject findResource(String name) {
Enumeration en = getFileSystems ();
while (en.hasMoreElements ()) {
FileSystem fs = (FileSystem)en.nextElement ();
FileObject fo = fs.findResource(name);
if (fo != null) {
// object found
return fo;
}
}
return null;
}
/** Searches for the given resource among all filesystems, returning all matches.
* <p><em>Note: Do not use this method for finding classes!.
* It is a wrong usage.</em>
* @param name name of the resource
* @return enumeration of {@link FileObject}s
*/
public final Enumeration findAllResources(String name) {
Vector v = new Vector(8);
Enumeration en = getFileSystems ();
while (en.hasMoreElements ()) {
FileSystem fs = (FileSystem)en.nextElement ();
FileObject fo = fs.findResource(name);
if (fo != null) {
v.addElement(fo);
}
}
return v.elements();
}
/** Finds all files among all filesystems matching a given name, returning all matches.
* All filesystems are queried with {@link FileSystem#find}.
*
* @param aPackage package name where each package is separated by a dot
* @param name name of the file (without dots) or <CODE>null</CODE> if
* one wants to obtain the name of a package and not a file in it
* @param ext extension of the file or <CODE>null</CODE> if one needs
* a package and not a file name
*
* @return enumeration of {@link FileObject}s
* @deprecated Please use the <a href="@JAVA/API@/org/netbeans/api/java/classpath/api.html">ClassPath API</a> instead.
*/
public final Enumeration findAll (String aPackage, String name, String ext) {
Enumeration en = getFileSystems ();
Vector ret = new Vector();
while (en.hasMoreElements ()) {
FileSystem fs = (FileSystem)en.nextElement ();
FileObject fo = fs.find (aPackage, name, ext);
if (fo != null) {
ret.addElement(fo);
}
}
return ret.elements();
}
/** Fire info about changes in the filesystem pool.
* @param fs filesystem
* @param add <CODE>true</CODE> if the filesystem is added,
* <CODE>false</CODE> if it is removed
*/
private void fireFileSystem (FileSystem fs, boolean add) {
Enumeration en = ((Hashtable)listeners.clone ()).elements ();
RepositoryEvent ev = new RepositoryEvent (this, fs, add);
while (en.hasMoreElements ()) {
RepositoryListener list = (RepositoryListener)en.nextElement ();
if (add) {
list.fileSystemAdded (ev);
} else {
list.fileSystemRemoved (ev);
}
}
}
/** Fires info about reodering
* @param perm
*/
private void fireFileSystemReordered(int[] perm) {
Enumeration en = ((Hashtable)listeners.clone ()).elements ();
RepositoryReorderedEvent ev = new RepositoryReorderedEvent(this, perm);
while (en.hasMoreElements ()) {
RepositoryListener list = (RepositoryListener)en.nextElement ();
list.fileSystemPoolReordered(ev);
}
}
/** Adds new listener.
* @param list the listener
*/
public final void addRepositoryListener (RepositoryListener list) {
listeners.put (list, list);
}
/** Removes listener.
* @param list the listener
*/
public final void removeRepositoryListener (RepositoryListener list) {
listeners.remove (list);
}
/** Writes the object to the stream.
*/
private Object writeReplace () {
return new Replacer ();
}
final FCLSupport getFCLSupport() {
synchronized (FCLSupport.class) {
if (fclSupport == null)
fclSupport = new FCLSupport ();
}
return fclSupport;
}
/** Add new listener to this object.
* @param fcl the listener
* @since 2.8
*/
public final void addFileChangeListener(FileChangeListener fcl) {
getFCLSupport ().addFileChangeListener(fcl);
}
/** Remove listener from this object.
* @param fcl the listener
* @since 2.8
*/
public final void removeFileChangeListener(FileChangeListener fcl) {
getFCLSupport ().removeFileChangeListener(fcl);
}
private static class Replacer implements java.io.Serializable {
/** serial version UID */
static final long serialVersionUID=-3814531276726840241L;
Replacer() {}
private void writeObject (ObjectOutputStream oos) throws IOException {
ExternalUtil.getRepository ().writeExternal (oos);
}
private void readObject (ObjectInputStream ois)
throws IOException, ClassNotFoundException {
ExternalUtil.getRepository ().readExternal (ois);
}
/** @return the default pool */
public Object readResolve () {
return ExternalUtil.getRepository ();
}
}
}
|
anolson/wednesday_worlds
|
app/lib/ride_tweet.rb
|
class RideTweet
include ApplicationHelper
attr_accessor :event
def initialize(event)
@event = event
end
def to_s
%(#{date}. #{location}. #{time}. #{route}.)
end
def date
format_date(event.begins_at)
end
def location
event.ride.location
end
def time
format_time(event.begins_at)
end
def route
event.route.name
end
end
|
wlMalk/goms
|
generator/generators/service_main.go
|
package generators
import (
"strings"
"github.com/wlMalk/goms/constants"
"github.com/wlMalk/goms/generator/file"
"github.com/wlMalk/goms/generator/helpers"
"github.com/wlMalk/goms/parser/types"
)
func ServiceMainFunc(file file.File, service types.Service) error {
file.AddImport("", "io")
file.AddImport("", "os")
if helpers.IsServerEnabled(service) {
file.AddImport("", service.ImportPath, "/cmd/start")
}
if service.Generate.Has(constants.ServiceGenerateLoggerFlag) || helpers.IsLoggingEnabled(service) {
file.AddImport("", "github.com/go-kit/kit/log")
}
if helpers.IsMetricsEnabled(service) {
file.AddImport("", "github.com/go-kit/kit/metrics")
}
if helpers.IsTracingEnabled(service) {
file.AddImport("opentracinggo", "github.com/opentracing/opentracing-go")
}
if service.Generate.Has(constants.ServiceGenerateProtoBufFlag) && (helpers.IsGRPCServerEnabled(service) || helpers.IsGRPCClientEnabled(service)) {
file.Pf("//go:generate protoc --go_out=plugins=grpc:%s --proto_path=%s proto/service.goms.proto", strings.TrimSuffix(file.Base(), service.ImportPath), file.Base())
file.P("")
}
file.Pf("func main() {")
if service.Generate.Has(constants.ServiceGenerateLoggerFlag) || helpers.IsLoggingEnabled(service) {
file.Pf("logger := InitLogger(os.Stderr)")
}
if helpers.IsTracingEnabled(service) {
file.Pf("tracer := InitTracer()")
}
if helpers.IsFrequencyMetricEnabled(service) {
file.Pf("frequencyMetric := InitRequestFrequencyMetric()")
}
if helpers.IsLatencyMetricEnabled(service) {
file.Pf("latencyMetric := InitRequestLatencyMetric()")
}
if helpers.IsCounterMetricEnabled(service) {
file.Pf("counterMetric := InitRequestCounterMetric()")
}
if helpers.IsServerEnabled(service) {
file.Pf("start.Start(")
if service.Generate.Has(constants.ServiceGenerateLoggerFlag) || helpers.IsLoggingEnabled(service) {
file.Pf("logger,")
}
if helpers.IsTracingEnabled(service) {
file.Pf("tracer,")
}
if helpers.IsFrequencyMetricEnabled(service) {
file.Pf("frequencyMetric,")
}
if helpers.IsLatencyMetricEnabled(service) {
file.Pf("latencyMetric,")
}
if helpers.IsCounterMetricEnabled(service) {
file.Pf("counterMetric,")
}
file.Pf(")")
}
file.Pf("}")
file.Pf("")
return nil
}
func ServiceMainInitLoggerFunc(file file.File, service types.Service) error {
file.Pf("func InitLogger(writer io.Writer) log.Logger {")
file.Pf("logger := log.NewJSONLogger(writer)")
file.Pf("logger = log.With(logger, \"@timestamp\", log.DefaultTimestampUTC)")
file.Pf("logger = log.With(logger, \"caller\", log.DefaultCaller)")
file.Pf("return logger")
file.Pf("}")
file.Pf("")
return nil
}
func ServiceMainInitTracerFunc(file file.File, service types.Service) error {
file.Pf("func InitTracer() opentracinggo.Tracer {")
file.Pf("// TODO: Initialize tracer")
file.Pf("return nil")
file.Pf("}")
file.Pf("")
return nil
}
func ServiceMainInitCounterFunc(file file.File, service types.Service) error {
file.Pf("func InitRequestCounterMetric() metrics.Counter {")
file.Pf("// TODO: Initialize counterMetric")
file.Pf("return nil")
file.Pf("}")
file.Pf("")
return nil
}
func ServiceMainInitLatencyFunc(file file.File, service types.Service) error {
file.Pf("func InitRequestLatencyMetric() metrics.Histogram {")
file.Pf("// TODO: Initialize latencyMetric")
file.Pf("return nil")
file.Pf("}")
file.Pf("")
return nil
}
func ServiceMainInitFrequencyFunc(file file.File, service types.Service) error {
file.Pf("func InitRequestFrequencyMetric() metrics.Gauge {")
file.Pf("// TODO: Initialize frequencyMetric")
file.Pf("return nil")
file.Pf("}")
file.Pf("")
return nil
}
|
mdiebolt/pixie.strd6.com
|
public/javascripts/jquery.tile_editor.js
|
/* DO NOT MODIFY. This file was compiled Thu, 26 May 2011 22:45:45 GMT from
* /home/daniel/apps/pixie.strd6.com/app/coffeescripts/jquery.tile_editor.coffee
*/
(function() {
$.fn.tileEditor = function(options) {
var addNewLayer, addScreenLayer, clearSelection, clickMode, createNewTile, createPixelEditor, createdTileCount, currentLayer, currentTool, debugMode, deleteTile, dirty, eachEntity, editEntity, entered, filledToken, firstGID, floodFill, generateUuid, getNeighborPositions, grid, harvestSelection, hotkeys, inBounds, isInSelection, layerSelect, loadData, loadEntity, loadExternalEntities, modeDown, nextTile, pixelEditTile, positionElementIndices, prevTile, propEditor, propElement, removeEntity, removeTile, replaceTile, saveData, savedSelectionCount, select, selectNextVisibleLayer, selectTile, selectTool, selectionCache, selectionCopy, selectionCut, selectionDelete, selectionEach, selectionStart, showPropertiesEditor, stamp, templates, tileAt, tileEditor, tileHeight, tileLookup, tilePosition, tileTray, tileWidth, tilesTall, tilesWide;
options = $.extend({
layers: ["Background", "Entities"],
eachEntity: $.noop,
editEntity: $.noop,
loadEntity: $.noop,
removeEntity: $.noop,
tilesWide: 20,
tilesTall: 15,
tileWidth: 32,
tileHeight: 32
}, options);
tileEditor = $(this.get(0)).addClass("editor tile_editor");
templates = $("#tile_editor_templates");
templates.find(".editor.template").tmpl().appendTo(tileEditor);
debugMode = false;
dirty = false;
firstGID = 1;
eachEntity = options.eachEntity, editEntity = options.editEntity, loadEntity = options.loadEntity, removeEntity = options.removeEntity;
tilesWide = parseInt(options.tilesWide, 10);
tilesTall = parseInt(options.tilesTall, 10);
tileWidth = parseInt(options.tileWidth, 10);
tileHeight = parseInt(options.tileHeight, 10);
currentLayer = 0;
modeDown = null;
tileTray = ".module .tiles";
layerSelect = ".module .layer_select";
positionElementIndices = [];
grid = GridGen({
width: tileWidth,
height: tileHeight
});
if ($.fn.pixie) {
createPixelEditor = function(options) {
var pixelEditor, url;
url = options.url;
tileEditor = options.tileEditor;
pixelEditor = $('<div />').pixie({
width: options.width,
height: options.height,
initializer: function(canvas) {
if (url) {
canvas.fromDataURL(url);
}
canvas.addAction({
name: "Save Tile",
icon: "/images/icons/database_save.png",
perform: function(canvas) {
pixelEditor.trigger('save', canvas.toDataURL());
pixelEditor.remove();
return tileEditor.show();
},
undoable: false
});
return canvas.addAction({
name: "Back to Tilemap",
icon: "/images/icons/arrow_left.png",
perform: function(canvas) {
pixelEditor.remove();
return tileEditor.show();
},
undoable: false
});
}
});
tileEditor.hide().after(pixelEditor);
window.currentComponent = pixelEditor;
return pixelEditor;
};
}
pixelEditTile = function(selectedTile) {
var imgSource, pixelEditor;
if (createPixelEditor) {
imgSource = selectedTile.attr('src');
pixelEditor = createPixelEditor({
width: selectedTile.get(0).width,
height: selectedTile.get(0).height,
tileEditor: tileEditor,
url: imgSource.replace('http://images.pixie.strd6.com', '/s3')
});
return pixelEditor.bind('save', function(event, data) {
var img;
img = $("<img/>", {
src: data
});
return tileEditor.find('.component .tiles').append(img);
});
}
};
generateUuid = function() {
return Math.uuid(32, 16);
};
createdTileCount = 0;
createNewTile = function() {
var pixelEditor;
if (createPixelEditor) {
pixelEditor = createPixelEditor({
width: tileWidth,
height: tileHeight,
tileEditor: tileEditor
});
return pixelEditor.bind('save', function(event, data) {
var entity, img, name, src, uuid;
uuid = generateUuid();
name = "New Tile " + (createdTileCount += 1);
src = data;
img = $("<img/>", {
alt: name,
"data-uuid": uuid,
src: src,
title: name
});
entity = {
name: name,
tileSrc: src
};
loadEntity(uuid, {
src: src,
entity: entity
});
return tileEditor.find('.component .tiles').append(img);
});
}
};
deleteTile = function(tile) {
var uuid;
uuid = tile.remove().data('uuid');
removeEntity(uuid);
return tileEditor.find(".screen img[data-uuid=" + uuid + "]").remove();
};
tilePosition = function(element, event) {
var localX, localY, offset;
offset = element.offset();
localY = (event.pageY - offset.top).snap(tileHeight).clamp(0, (tilesTall - 1) * tileHeight);
localX = (event.pageX - offset.left).snap(tileWidth).clamp(0, (tilesWide - 1) * tileWidth);
return {
x: localX,
y: localY
};
};
addScreenLayer = function() {
$("<div />", {
"class": "layer",
width: tilesWide * tileWidth,
height: tilesTall * tileHeight
}).appendTo(tileEditor.find("section .layers"));
tileEditor.find(".screen").find(".cursor, .selection").appendTo(tileEditor.find("section .layers"));
return positionElementIndices.push({});
};
addNewLayer = function(layerName) {
layerName || (layerName = "Layer " + (tileEditor.find(".layer_select .choice").length + 1));
templates.find(".layer_select.template").tmpl({
name: layerName
}).appendTo(tileEditor.find(layerSelect)).find('.name').mousedown();
return addScreenLayer();
};
selectNextVisibleLayer = function() {
var shownLayers;
shownLayers = tileEditor.find(".layer_select .choice .show.on");
if (shownLayers.length) {
return shownLayers.eq(0).parent().find(".name").mousedown();
}
};
prevTile = function(mode) {
var cur, tileCount;
tileCount = $(".tiles img").length;
cur = tileEditor.find(".tiles ." + mode).removeClass(mode).index();
return tileEditor.find(".tiles img").eq((cur - 1).mod(tileCount)).addClass(mode);
};
nextTile = function(mode) {
var cur, tileCount;
tileCount = tileEditor.find(".tiles img").length;
cur = tileEditor.find(".tiles ." + mode).removeClass(mode).index();
return tileEditor.find(".tiles img").eq((cur + 1).mod(tileCount)).addClass(mode);
};
inBounds = function(x, y) {
return ((0 <= x && x < tileWidth * tilesWide)) && ((0 <= y && y < tileHeight * tilesTall));
};
replaceTile = function(x, y, tile) {
var posString, targetLayer;
if (!inBounds(x, y)) {
return;
}
if (!dirty) {
dirty = true;
tileEditor.trigger("dirty");
}
posString = x + "x" + y;
tile = tile.clone().removeClass("primary secondary").css({
position: "absolute",
top: y,
left: x
}).attr("data-pos", posString);
targetLayer = tileEditor.find(".screen .layer").eq(currentLayer);
removeTile(x, y);
targetLayer.append(tile);
positionElementIndices[currentLayer][posString] = tile.get();
return tile;
};
removeTile = function(x, y) {
var posString;
if (!dirty) {
dirty = true;
tileEditor.trigger("dirty");
}
tileAt(x, y).remove();
posString = x + "x" + y;
return positionElementIndices[currentLayer][posString] = void 0;
};
tileAt = function(x, y) {
var posString;
posString = x + "x" + y;
return $(positionElementIndices[currentLayer][posString]);
};
getNeighborPositions = function(position) {
var neighbors;
return neighbors = [[position[0] - tileWidth, position[1]], [position[0] + tileWidth, position[1]], [position[0], position[1] - tileHeight], [position[0], position[1] + tileHeight]].select(function(neighborPos) {
return inBounds(neighborPos[0], neighborPos[1]);
});
};
filledToken = 0;
floodFill = function(x, y, mode) {
var inSelection, neighbors, position, queue, selection, sourceTiles, targetTile, targetUuid, tile;
if ((tile = tileEditor.find(".tiles").find("." + mode)).length) {
sourceTiles = [[tile]];
} else if (selection = tileEditor.find(".saved_selections").find("." + mode).data("selectionData")) {
sourceTiles = selection;
}
filledToken += 1;
inSelection = isInSelection(x, y);
targetTile = tileAt(x, y);
targetUuid = targetTile.data("uuid");
tile = sourceTiles[0][0];
queue = [];
replaceTile(x, y, tile).data("fill", filledToken);
queue.push([x, y]);
while (position = queue.pop()) {
neighbors = getNeighborPositions(position);
neighbors.each(function(neighbor, index) {
var currentUuid;
if (inSelection === isInSelection(neighbor[0], neighbor[1])) {
tile = sourceTiles.wrap((neighbor[1] - y) / tileHeight).wrap((neighbor[0] - x) / tileWidth);
if (neighbor) {
targetTile = tileAt(neighbor[0], neighbor[1]);
currentUuid = targetTile.data("uuid");
if (currentUuid === targetUuid && targetTile.data("fill") !== filledToken) {
replaceTile(neighbor[0], neighbor[1], tile).data("fill", filledToken);
return queue.push(neighbor);
}
}
}
});
}
return;
};
selectionCache = null;
isInSelection = function(x, y) {
if (selectionCache) {
return (selectionCache.top <= y && y < selectionCache.top + selectionCache.height) && (selectionCache.left <= x && x < selectionCache.left + selectionCache.width);
} else {
return false;
}
};
clearSelection = function() {
tileEditor.find(".screen .selection").removeClass("active");
return selectionCache = null;
};
selectionEach = function(callback) {
var $selection, pos, selectionHeight, selectionWidth, x, y;
$selection = tileEditor.find(".screen .selection");
if ($selection.hasClass("active")) {
pos = $selection.position();
selectionWidth = $selection.outerWidth();
selectionHeight = $selection.outerHeight();
y = pos.top;
while (y < pos.top + selectionHeight) {
x = pos.left;
while (x < pos.left + selectionWidth) {
callback(x, y);
x += tileWidth;
}
y += tileHeight;
}
return clearSelection();
}
};
selectionDelete = function() {
return selectionEach(removeTile);
};
savedSelectionCount = 0;
harvestSelection = function(remove) {
var preview, row, rowY, savedSelection, selectionData;
rowY = void 0;
row = void 0;
savedSelection = templates.find(".saved_selection.template").tmpl({
text: "Selection" + (++savedSelectionCount)
}).appendTo(tileEditor.find(".saved_selections"));
preview = savedSelection.find(".preview");
selectionData = [];
selectionEach(function(x, y) {
var tile;
if (y !== rowY) {
rowY = y;
row = [];
selectionData.push(row);
}
tile = tileAt(x, y).clone();
row.push(tile);
tile.css({
position: "absolute",
top: (selectionData.length - 1) * tileHeight,
left: (row.length - 1) * tileWidth
});
preview.append(tile);
if (remove) {
return removeTile(x, y);
}
});
savedSelection.data("selectionData", selectionData);
return selectTile(savedSelection, "primary");
};
selectionCopy = function() {
return harvestSelection();
};
selectionCut = function() {
return harvestSelection(true);
};
selectionStart = null;
select = function(x, y) {
var $selection, deltaX, deltaY, pos, selectionHeight, selectionLeft, selectionTop, selectionWidth;
if (selectionStart) {
$selection = tileEditor.find(".screen .selection");
pos = $selection.position();
deltaX = x - selectionStart.x;
deltaY = y - selectionStart.y;
selectionWidth = deltaX.abs() + tileWidth;
selectionHeight = deltaY.abs() + tileHeight;
selectionLeft = deltaX < 0 ? x : selectionStart.x;
selectionTop = deltaY < 0 ? y : selectionStart.y;
selectionCache = {
height: selectionHeight,
left: selectionLeft,
top: selectionTop,
width: selectionWidth
};
return $selection.css(selectionCache);
} else {
selectionCache = {
height: tileHeight,
left: x,
top: y,
width: tileWidth
};
tileEditor.find(".screen .selection").addClass('active').css(selectionCache);
return selectionStart = {
x: x,
y: y
};
}
};
stamp = function(x, y, mode) {
var selection, tile;
if ((tile = tileEditor.find(".tiles").find("." + mode)).length) {
return replaceTile(x, y, tile);
} else if (selection = tileEditor.find(".saved_selections").find("." + mode).data("selectionData")) {
return selection.each(function(row, tileY) {
return row.each(function(tile, tileX) {
var targetX, targetY;
if (tile) {
targetX = x + tileX * tileWidth;
targetY = y + tileY * tileHeight;
return replaceTile(targetX, targetY, tile);
}
});
});
}
};
currentTool = function(mode) {
return tileEditor.find(".tools .tool." + mode).data("tool");
};
entered = function(x, y) {
var mode;
if (mode = modeDown) {
switch (currentTool(mode)) {
case "stamp":
return stamp(x, y, mode);
case "eraser":
return removeTile(x, y);
case "fill":
return floodFill(x, y, mode);
case "selection":
return select(x, y);
}
}
};
clickMode = function(event) {
if (event.which === 1) {
return "primary";
} else if (event.which === 3) {
return "secondary";
}
};
selectTool = function(name, mode) {
var tool;
tool = tileEditor.find(".tools .tool[data-tool=" + name + "]");
return tool.takeClass(mode);
};
selectTile = function(tile, mode) {
tileEditor.find(".saved_selections .selection").removeClass(mode);
tileEditor.find(".tiles img").removeClass(mode);
return tile.addClass(mode);
};
propElement = null;
showPropertiesEditor = function(element) {
propElement = element;
propEditor.setProps(propElement.data("properties"));
return propEditor.parent().show();
};
tileEditor.bind("contextmenu", function(event) {
if (!debugMode) {
return event.preventDefault();
}
});
$(".tools .tool", tileEditor).live('mousedown', function(event) {
var mode;
event.preventDefault();
if (mode = clickMode(event)) {
return $(this).takeClass(mode);
}
});
$(".tiles img, .saved_selections .selection", tileEditor).live({
mousedown: function(event) {
var mode;
event.preventDefault();
if (mode = clickMode(event)) {
return selectTile($(this), mode);
}
}
});
$(".tiles img, .saved_selections .selection", tileEditor).live('mouseup', function(event) {
if (event.which === 2) {
return $(this).remove();
}
});
$(".tiles img", tileEditor).live("dblclick", function(event) {
return editEntity($(this).data('uuid'));
});
tileEditor.find("button.new_tile").click(function() {
return createNewTile();
});
tileEditor.find("button.delete_tile").click(function() {
return deleteTile(tileEditor.find('.tiles img.primary'));
});
tileEditor.find(".prop_save").click(function(event) {
if (propElement) {
propElement.data("properties", propEditor.getProps());
return propEditor.parent().hide();
}
});
tileEditor.find(".layer_select").parent().find('.new').click(function() {
return addNewLayer();
});
$(".layer_select .choice .name", tileEditor).live('mousedown', function(event) {
var $layer;
$layer = $(this).parent();
$layer.takeClass("active");
return currentLayer = $layer.index();
});
tileEditor.find(".layer_select").delegate(".show", 'mousedown', function(event) {
var $choice, $this;
$this = $(this);
$choice = $this.parent();
if ($this.toggleClass("on").hasClass("on")) {
tileEditor.find(".screen .layers .layer").eq($choice.index()).fadeIn();
return $choice.find(".name").mousedown();
} else {
tileEditor.find(".screen .layers .layer").eq($choice.index()).fadeOut();
return selectNextVisibleLayer();
}
});
tileEditor.find(".screen .layers").bind("mousemove", function(event) {
var oldPos, pos;
pos = tilePosition($(this), event);
oldPos = tileEditor.find(".screen .cursor").position();
if (!(oldPos.left === pos.x && oldPos.top === pos.y)) {
entered(pos.x, pos.y);
return tileEditor.find(".screen .cursor").css({
left: pos.x,
top: pos.y
});
}
});
tileEditor.find(".screen .layers").bind("mousedown", function(event) {
var pos;
if (modeDown = clickMode(event)) {
pos = tilePosition($(this), event);
return entered(pos.x, pos.y);
}
});
$(document).bind("mouseup", function(event) {
selectionStart = null;
return modeDown = null;
});
tileEditor.mousedown(function() {
return window.currentComponent = tileEditor;
});
hotkeys = {
a: function(event) {
return prevTile("primary");
},
z: function(event) {
return nextTile("primary");
},
s: function(event) {
return prevTile("secondary");
},
x: function(event) {
return nextTile("secondary");
},
p: function() {
return showPropertiesEditor(tileEditor.find('.tiles img.primary'));
},
i: function() {
var left, tile, top, _ref;
_ref = tileEditor.find(".screen .cursor").position(), left = _ref.left, top = _ref.top;
if ((tile = tileAt(left, top)).length) {
return showPropertiesEditor(tile);
}
},
backspace: selectionDelete,
del: selectionDelete,
esc: clearSelection,
"ctrl+c": selectionCopy,
"ctrl+x": selectionCut
};
$.each(hotkeys, function(key, fn) {
return $(document).bind("keydown", key, function(event) {
if (window.currentComponent === tileEditor) {
event.preventDefault();
return fn(event);
}
});
});
tileEditor.find(tileTray).sortable();
tileEditor.dropImageReader(function(file, event) {
var entity, img, name, src, uuid;
if (event.target.readyState === FileReader.DONE) {
uuid = generateUuid();
src = event.target.result;
name = file.name.replace(/\.[^\.]*$/, '');
img = $("<img/>", {
alt: name,
src: src,
title: name,
"data-uuid": uuid
});
entity = {
name: name,
tileSrc: src
};
loadEntity(uuid, {
src: src,
entity: entity
});
return $(this).find(".tiles").append(img);
}
});
$('.filename, .layer_select .name, .saved_selections .name', tileEditor).liveEdit();
propEditor = $(".prop_editor", tileEditor).propertyEditor();
tileEditor.find("button.save").click(function() {
return typeof options.save === "function" ? options.save(saveData()) : void 0;
});
saveData = function() {
var entityCache, layers;
entityCache = {};
tileEditor.find(".module .tiles img").each(function() {
var $this, entity, mapTileData, props, src, uuid;
$this = $(this);
uuid = $this.data("uuid");
src = $this.attr("src");
entity = {
tileSrc: src
};
mapTileData = {
entity: entity,
src: src
};
loadEntity(uuid, mapTileData);
if (props = $this.data("properties")) {
mapTileData.properties = props;
}
return entityCache[uuid] = mapTileData;
});
layers = [];
tileEditor.find(".layer_select .choice").each(function(i) {
var $this, entities, entityLayer, layer, name, screenLayer, tileLookup, tiles;
$this = $(this);
name = $this.text().trim();
entityLayer = name.match(/entities/i);
screenLayer = tileEditor.find(".screen .layers .layer").eq(i);
if (entityLayer) {
entities = screenLayer.find("img").map(function() {
var $element, left, top, uuid, _ref;
$element = $(this);
uuid = $element.data("uuid");
_ref = $element.position(), top = _ref.top, left = _ref.left;
return {
x: left,
y: top,
uuid: uuid,
properties: $(this).data("properties")
};
}).get();
layer = {
name: name,
entities: entities
};
} else {
tileLookup = {};
screenLayer.find("img").each(function() {
var pos, uuid;
uuid = this.getAttribute("data-uuid");
pos = this.getAttribute("data-pos");
return tileLookup[pos] = uuid;
});
tiles = [];
tilesTall.times(function(y) {
var row;
row = [];
tiles.push(row);
return tilesWide.times(function(x) {
var posString;
posString = x * tileWidth + "x" + y * tileHeight;
return row.push(tileLookup[posString]);
});
});
layer = {
name: name,
tiles: tiles
};
}
return layers.push(layer);
});
return {
title: tileEditor.find(".filename").text(),
orientation: "orthogonal",
width: tilesWide,
height: tilesTall,
tileWidth: tileWidth,
tileHeight: tileHeight,
entityCache: entityCache,
layers: layers
};
};
loadData = function(data, tileLookup) {
tileWidth = data.tileWidth, tileHeight = data.tileHeight;
tilesWide = data.width;
tilesTall = data.height;
positionElementIndices = [];
tileEditor.find("section .layers .layer").remove();
tileEditor.find(layerSelect).html('');
data.layers.each(function(layer, i) {
var entities, entity, tile, tiles, _i, _len, _results;
currentLayer = i;
addScreenLayer();
templates.find(".layer_select.template").tmpl({
name: layer.name
}).appendTo(tileEditor.find(layerSelect));
if (tiles = layer.tiles) {
tiles.each(function(row, y) {
return row.each(function(uuid, x) {
if (uuid) {
return replaceTile(x * tileWidth, y * tileHeight, tileLookup[uuid]);
}
});
});
}
if (entities = layer.entities) {
_results = [];
for (_i = 0, _len = entities.length; _i < _len; _i++) {
entity = entities[_i];
tile = replaceTile(entity.x, entity.y, tileLookup[entity.uuid]);
_results.push(entity.properties ? tile.data("properties", entity.properties) : void 0);
}
return _results;
}
});
return tileEditor.find(layerSelect).find(".name").last().trigger("mousedown");
};
loadExternalEntities = function(data) {
var entityCache, index, tileData, tileLookup, uuid;
if (entityCache = data != null ? data.entityCache : void 0) {
for (uuid in entityCache) {
tileData = entityCache[uuid];
loadEntity(uuid, tileData);
}
}
tileEditor.find(tileTray).html('');
tileLookup = {};
index = 0;
eachEntity(function(uuid, entity) {
var active, src;
active = index === 0 ? "primary" : index === 1 ? "secondary" : void 0;
src = entity.tileSrc;
tileLookup[uuid] = $("<img />", {
"class": active,
"data-uuid": uuid,
src: src
}).appendTo(tileEditor.find(tileTray));
if (typeof cachedEntity != "undefined" && cachedEntity !== null ? cachedEntity.properties : void 0) {
tileLookup[uuid].data("properties", cachedEntity.properties);
}
return index += 1;
});
return tileLookup;
};
tileLookup = loadExternalEntities(options.data);
if (options.data) {
loadData(options.data, tileLookup);
} else {
if (options.layers.each) {
options.layers.each(function(layerName) {
return addNewLayer(layerName);
});
} else if (options.layers.times) {
options.layers.times(function() {
return addNewLayer();
});
}
}
tileEditor.find(".screen .cursor").css({
width: tileWidth - 1,
height: tileHeight - 1
});
tileEditor.find(".screen .layers").css({
backgroundImage: grid.backgroundImage(),
width: tilesWide * tileWidth,
height: tilesTall * tileHeight
});
tileEditor.bind("clean", function() {
return dirty = false;
});
dirty = false;
return $.extend(tileEditor, {
addAction: function(action) {
var actionButton;
actionButton = $("<button/>", {
text: action.name,
click: action.perform
});
return tileEditor.find(".actions").append(actionButton);
},
mapData: saveData
});
};
}).call(this);
|
banggibima/react_fitnesspro
|
src/screens/Download/Platform/index.js
|
import React from "react";
import cn from "classnames";
import styles from "./Platform.module.sass";
import Icon from "../../../components/Icon";
import ScrollParallax from "../../../components/ScrollParallax";
const items = [
{
title: "Fitness Pro for Mac OS",
description: "We realize ideas from simple.",
color: "#FF592C",
image: "/images/content/apple.svg",
status: "black",
statusContent: "updated",
},
{
title: "Fitness Pro for Windows",
description: "We realize ideas from simple.",
color: "#45B26B",
image: "/images/content/windows.svg",
status: "green",
statusContent: "coming soon",
},
{
title: "Fitness Pro for Mac OS",
description: "We realize ideas from simple.",
color: "#EF466F",
image: "/images/content/mouse.svg",
status: "green",
statusContent: "coming soon",
},
{
title: "Fitness Pro for Mac OS",
description: "We realize ideas from simple.",
color: "#3772FF",
image: "/images/content/apple.svg",
status: "green",
statusContent: "coming soon",
},
{
title: "Fitness Pro for Android",
description: "We realize ideas from simple.",
color: "#9757D7",
image: "/images/content/android.svg",
},
];
const Platform = () => {
return (
<div className={cn("section", styles.section)}>
<div className={cn("container", styles.container)}>
<h2 className={cn("h2", styles.title)}>Choose the plaform</h2>
<div className={styles.list}>
{items.map((x, index) => (
<ScrollParallax className={styles.item} key={index}>
<div
className={styles.preview}
style={{ backgroundColor: x.color }}
>
<img src={x.image} alt="Logo" />
</div>
<div className={styles.details}>
{x.status && (
// <div className={cn(x.status, styles.status)}>{x.statusContent}</div>
<div
className={cn(
{ "status-stroke-black": x.status === "black" },
{ "status-green": x.status === "green" },
styles.status
)}
>
{x.statusContent}
</div>
)}
<div className={styles.subtitle}>{x.title}</div>
<div className={styles.description}>{x.description}</div>
<a
href="/#"
className={cn("button-small", styles.button)}
target="_blank"
rel="noopener noreferrer"
>
<span>Download</span>
<Icon name="arrow-down" size="10" />
</a>
</div>
</ScrollParallax>
))}
</div>
<div className={styles.btns}>
<button className={cn("button-stroke", styles.button)}>
See the plan
</button>
</div>
</div>
</div>
);
};
export default Platform;
|
AIS-Bonn/stillleben
|
include/stillleben/object.h
|
<gh_stars>10-100
// Scene object
// Author: <NAME> <<EMAIL>>
#ifndef STILLLEBEN_OBJECT_H
#define STILLLEBEN_OBJECT_H
#include <stillleben/common.h>
#include <stillleben/math.h>
#include <stillleben/physx.h>
#include <Magnum/Math/Range.h>
#include <Magnum/Math/Vector3.h>
#include <Magnum/Math/Color.h>
#include <Magnum/Math/Quaternion.h>
#include <Magnum/Magnum.h>
#include <Magnum/SceneGraph/Drawable.h>
#include <Magnum/GL/Mesh.h>
#include <Magnum/GL/RectangleTexture.h>
#include <memory>
namespace physx
{
class PxScene;
class PxRigidDynamic;
}
namespace sl
{
class Context;
class Mesh;
class MeshCache;
class Drawable;
typedef std::function<void(const Magnum::Matrix4& transformationMatrix, Magnum::SceneGraph::Camera3D& camera, Drawable* drawable)> DrawCallback;
class Drawable : public Magnum::SceneGraph::Drawable3D
{
public:
Drawable(Object3D& object, Magnum::SceneGraph::DrawableGroup3D& group, const std::shared_ptr<Magnum::GL::Mesh>& mesh, DrawCallback* cb)
: Magnum::SceneGraph::Drawable3D{object, &group}
, m_mesh{mesh}
, m_cb(cb)
{
}
inline Magnum::GL::Texture2D* texture()
{ return m_texture; }
void setTexture(Magnum::GL::Texture2D* texture)
{ m_texture = texture; }
Magnum::Color4 color()
{ return m_color; }
void setColor(const Magnum::Color4& color)
{ m_color = color; }
Magnum::GL::Mesh& mesh()
{ return *m_mesh; }
Magnum::Float metallic() const
{ return m_metallic; }
Magnum::Float roughness() const
{ return m_roughness; }
void setMetallicRoughness(Magnum::Float metallic, Magnum::Float roughness)
{ m_metallic = metallic; m_roughness = roughness; }
void setHasVertexColors(bool hasVertexColors)
{ m_hasVertexColors = hasVertexColors; }
inline bool hasVertexColors() const
{ return m_hasVertexColors; }
void draw(const Magnum::Matrix4& transformationMatrix, Magnum::SceneGraph::Camera3D& camera) override;
private:
std::shared_ptr<Magnum::GL::Mesh> m_mesh;
Magnum::GL::Texture2D* m_texture = nullptr;
Magnum::Color4 m_color{};
DrawCallback* m_cb = nullptr;
bool m_hasVertexColors = false;
Magnum::Float m_metallic = 0.5f;
Magnum::Float m_roughness = 0.04f;
};
struct InstantiationOptions
{
/**
* Default color if the mesh does not have texture or vertex colors
**/
Magnum::Color4 color{1.0f, 1.0f, 1.0f, 1.0f};
/**
* If true, always render the mesh with this color, regardless of texture
**/
bool forceColor = false;
};
class Object
{
public:
class Part : public Object3D
{
public:
explicit Part(Magnum::UnsignedInt index, Object3D* parent)
: Object3D{parent}
, m_index{index}
{}
~Part() = default;
constexpr Magnum::UnsignedInt index()
{ return m_index; }
private:
Magnum::UnsignedInt m_index;
};
Object();
~Object();
Object(const Object&) = delete;
Object& operator=(const Object&) = delete;
void setMesh(const std::shared_ptr<Mesh>& mesh);
void setInstantiationOptions(const InstantiationOptions& options);
void loadVisual();
void loadPhysics();
void loadPhysicsVisualization();
void serialize(Corrade::Utility::ConfigurationGroup& group);
void deserialize(const Corrade::Utility::ConfigurationGroup& group, MeshCache& meshCache);
void setPose(const Magnum::Matrix4& pose);
Magnum::Matrix4 pose() const
{ return m_sceneObject.transformationMatrix(); }
void setParentSceneObject(Object3D* parent);
void setPhysicsScene(physx::PxScene* scene);
void draw(Magnum::SceneGraph::Camera3D& camera, const DrawCallback& cb);
void drawPhysics(Magnum::SceneGraph::Camera3D& camera, const DrawCallback& cb);
void setInstanceIndex(unsigned int instanceIndex);
unsigned int instanceIndex() const
{ return m_instanceIndex; }
void setSpecularColor(const Magnum::Color4& color);
constexpr Magnum::Color4 specularColor() const
{ return m_specularColor; }
void setShininess(float shininess);
constexpr float shininess() const
{ return m_shininess; }
void setRoughness(float roughness);
constexpr float roughness() const
{ return m_roughness; }
void setMetallic(float metalness);
constexpr float metallic() const
{ return m_metallic; }
std::shared_ptr<Mesh> mesh()
{ return m_mesh; }
Magnum::SceneGraph::DrawableGroup3D& debugDrawables()
{ return m_debugDrawables; }
physx::PxRigidDynamic& rigidBody()
{ return *m_rigidBody; }
void updateFromPhysics();
void setStickerTexture(const std::shared_ptr<Magnum::GL::RectangleTexture>& color);
std::shared_ptr<Magnum::GL::RectangleTexture> stickerTexture() const
{ return m_stickerTexture; }
void setStickerRange(const Magnum::Range2D& range);
constexpr Magnum::Range2D stickerRange() const
{ return m_stickerRange; }
void setStickerRotation(const Magnum::Quaternion& q);
constexpr Magnum::Quaternion stickerRotation() const
{ return m_stickerRotation; }
Magnum::Matrix4 stickerViewProjection() const;
private:
void populateParts();
void addPart(Object3D& parent, Magnum::UnsignedInt i);
std::shared_ptr<Mesh> m_mesh;
InstantiationOptions m_options;
// This is the scene object that contains everything in this object.
// setPose() acts upon this object.
Object3D m_sceneObject;
// This holds the actual mesh.
// Mesh::scaleToBBoxDiagonal() acts upon this object.
Object3D m_meshObject{&m_sceneObject};
std::vector<Part*> m_parts;
Magnum::SceneGraph::DrawableGroup3D m_drawables;
Magnum::SceneGraph::DrawableGroup3D m_debugDrawables;
Magnum::SceneGraph::DrawableGroup3D m_physXDrawables;
Magnum::SceneGraph::DrawableGroup3D m_simplifiedDrawables;
DrawCallback m_cb;
float m_scale = 1.0f;
unsigned int m_instanceIndex = 0;
physx::PxScene* m_physicsScene = nullptr;
PhysXHolder<physx::PxRigidDynamic> m_rigidBody;
bool m_visualLoaded = false;
bool m_physicsVisLoaded = false;
// By default, we have a fully specular object
Magnum::Color4 m_specularColor{1.0f};
// With sharp specular highlights
float m_shininess = 80.0f;
// These are factors onto the individual drawables
float m_roughness = -1.0f;
float m_metallic = -1.0f;
// Sticker simulation
std::shared_ptr<Magnum::GL::RectangleTexture> m_stickerTexture{};
Magnum::Range2D m_stickerRange{};
Magnum::Quaternion m_stickerRotation{};
};
}
#endif
|
Damons-work/icodework_vue__back_end
|
src/main/java/cn/js/icode/tool/mapper/DbtableFieldMapper.java
|
package cn.js.icode.tool.mapper;
import team.bangbang.common.sql.IMybatisMapper;
import org.apache.ibatis.annotations.Mapper;
import cn.js.icode.tool.data.DbtableField;
/**
* 数据表字段 - Mapper
* 对应数据库表:tool_dbtable_field
*
* @author ICode Studio
* @version 1.0 2018-10-21
*/
@Mapper
public interface DbtableFieldMapper extends IMybatisMapper<DbtableField> {
/**************************************************************************
* !!除非设计、指导人员有特别说明,否则此处不得随意增加、修改、删除!!
* ------------------------------------
*
* 如确需添加自定义方法,相应的mapper.xml中应配置SQL块,注意3点:
*
* 1. SQL块的id须与方法名保持一致;
*
* 2. 方法中的参数添加@Param注解;
*
* 3. SQL块中的参数对象名与@Param注解内名称一致。
*
*************************************************************************/
}
|
wuxinshui/spring-boot-samples
|
spring-boot-sample-quartz/src/main/java/com/wxs/quartz/job/ServiceException.java
|
package com.wxs.quartz.job;
/**
* 业务异常
* @author Wuxinshui
*
*/
public class ServiceException extends RuntimeException {
private static final long serialVersionUID = 8624944628363400977L;
public ServiceException() {
super();
}
public ServiceException(String message, Throwable cause,
boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
public ServiceException(String message, Throwable cause) {
super(message, cause);
}
public ServiceException(String message) {
super(message);
}
public ServiceException(Throwable cause) {
super(cause);
}
}
|
nfearnley/Mei
|
commands/pat.js
|
<reponame>nfearnley/Mei
module.exports = {
main: function(Bot, m, args, prefix) {
if (m.mentions.length > 2) {
Bot.createMessage(m.channel.id, "Thats too many pats to give :angry:")
return;
}
Bot.sendChannelTyping(m.channel.id).then(async () => {
if (m.mentions.length == 1 && m.author.id == m.mentions[0].id) { // If the user mentions only themself
Bot.createMessage(m.channel.id, `Lovely shi... Alone? Don't be like that ${m.author.username} ;-; *hugs you*`)
return;
}
var imageArray = [
"https://78.media.tumblr.com/f95f14437809dfec8057b2bd525e6b4a/tumblr_omvkl2SzeK1ql0375o1_500.gif",
"https://m.popkey.co/a5cfaf/1x6lW.gif",
"http://giant.gfycat.com/PoisedWindingCaecilian.gif",
"https://i.imgur.com/NxTmYnV.gif",
"http://gifimage.net/wp-content/uploads/2017/07/head-pat-gif.gif",
"https://78.media.tumblr.com/313a6fcdf842ba0e0f393de0746f6cd6/tumblr_oc9tu4rAff1v8ljjro1_500.gif",
"http://i.imgur.com/xj0iJ.gif"
]
var url = m.channel.guild.members.get(m.author.id).avatarURL
var image = imageArray[Math.floor(Math.random() * imageArray.length)]
var author = m.channel.guild.members.get(m.author.id).nick || m.channel.guild.members.get(m.author.id).username
var url = m.channel.guild.members.get(m.author.id).avatarURL
if (m.mentions.length == 1 && m.author.id != m.mentions[0].id) {
var pet = m.channel.guild.members.get(m.mentions[0].id).nick || m.channel.guild.members.get(m.mentions[0].id).username
const data = {
"embed": {
"title": pet + ", You got a pat from " + author + "~",
"color": 0xA260F6,
"image": {
"url": image
},
"author": {
"name": author,
"icon_url": url
}
}
};
Bot.createMessage(m.channel.id, data);
return;
} else {
const data = {
"embed": {
"title": "It's okay to pat yourself too~",
"color": 0xA260F6,
"image": {
"url": image
},
"author": {
"name": author,
"icon_url": url
}
}
};
Bot.createMessage(m.channel.id, data);
return;
}
});
},
help: ":3"
}
|
oragejuice/restack
|
src/main/java/me/lnadav/restack/api/command/AbstractCommand.java
|
<gh_stars>10-100
package me.lnadav.restack.api.command;
public abstract class AbstractCommand {
private final String name;
private final String description;
private final String[] aliases;
private final int minArgs;
private final String usage;
public AbstractCommand(String name, String description, String usage, String[] aliases, int minArgs){
this.name = name;
this.description = description;
this.usage = usage;
this.aliases = aliases;
this.minArgs = minArgs;
}
public void execute(String[] args){
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public String[] getAliases() {
return aliases;
}
public int getMinArgs() {
return minArgs;
}
public String getUsage() {
return usage;
}
}
|
spite/sketch
|
js/sceneBackdrop.js
|
<gh_stars>100-1000
import {
Group,
Mesh,
IcosahedronBufferGeometry,
} from "../third_party/three.module.js";
const group = new Group();
let backdrop;
let material;
async function generate() {
if (backdrop) {
group.remove(backdrop);
}
backdrop = new Mesh(new IcosahedronBufferGeometry(20, 4), material);
group.add(backdrop);
}
const obj = {
init: async (m) => {
material = m;
await generate();
},
update: () => {},
group,
generate,
params: (gui) => {},
};
export { obj };
|
tausiq2003/C-with-DS
|
Opensource Programs/GretestCommonDivisor.cpp
|
<gh_stars>1-10
class GCD
{
int a,b;
public:
void getno();
void showdivisor();
};
void GCD::getno()
{
cout<<"enter two number";
cin>>a>>b;
}
void GCD::showdivisor()
{
while(a!=b)
{
if(a>b)
a=a-b;
if(b>a)
b=b-a;
}
cout<<"GCD="<<a;
}
void main()
{
GCD x;
x.getno();
x.showdivisor();
getch();
}
|
jiangshide/sdk
|
eclipse/plugins/com.android.ide.eclipse.ddms/src/com/android/ide/eclipse/ddms/LogCatMonitor.java
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.ide.eclipse.ddms;
import com.android.ddmlib.AndroidDebugBridge;
import com.android.ddmlib.AndroidDebugBridge.IDeviceChangeListener;
import com.android.ddmlib.IDevice;
import com.android.ddmlib.Log.LogLevel;
import com.android.ddmlib.logcat.LogCatMessage;
import com.android.ddmuilib.logcat.ILogCatBufferChangeListener;
import com.android.ddmuilib.logcat.LogCatReceiver;
import com.android.ddmuilib.logcat.LogCatReceiverFactory;
import com.android.ide.eclipse.ddms.views.LogCatView;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.util.IPropertyChangeListener;
import org.eclipse.jface.util.PropertyChangeEvent;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* LogCatMonitor helps in monitoring the logcat output from a set of devices.
* It scans through the received logcat messages, and activates the logcat view
* if any message is deemed important.
*/
public class LogCatMonitor {
public static final String AUTO_MONITOR_PREFKEY = "ddms.logcat.automonitor"; //$NON-NLS-1$
public static final String AUTO_MONITOR_LOGLEVEL = "ddms.logcat.auotmonitor.level"; //$NON-NLS-1$
private static final String AUTO_MONITOR_PROMPT_SHOWN = "ddms.logcat.automonitor.userprompt"; //$NON-NLS-1$
private IPreferenceStore mPrefStore;
private Map<String, DeviceData> mMonitoredDevices;
private IDebuggerConnector[] mConnectors;
private int mMinMessagePriority;
/**
* Flag that controls when the logcat stream is checked. This flag is set when the user
* performs a launch, and is reset as soon as the logcat view is displayed.
*/
final AtomicBoolean mMonitorEnabled = new AtomicBoolean(false);
public LogCatMonitor(IDebuggerConnector[] debuggerConnectors, IPreferenceStore prefStore) {
mConnectors = debuggerConnectors;
mPrefStore = prefStore;
mMinMessagePriority =
LogLevel.getByString(mPrefStore.getString(AUTO_MONITOR_LOGLEVEL)).getPriority();
mMonitoredDevices = new HashMap<String, DeviceData>();
AndroidDebugBridge.addDeviceChangeListener(new IDeviceChangeListener() {
@Override
public void deviceDisconnected(IDevice device) {
unmonitorDevice(device.getSerialNumber());
mMonitoredDevices.remove(device.getSerialNumber());
}
@Override
public void deviceConnected(IDevice device) {
}
@Override
public void deviceChanged(IDevice device, int changeMask) {
}
});
mPrefStore.addPropertyChangeListener(new IPropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent event) {
if (AUTO_MONITOR_PREFKEY.equals(event.getProperty())
&& event.getNewValue().equals(false)) {
unmonitorAllDevices();
} else if (AUTO_MONITOR_LOGLEVEL.equals(event.getProperty())) {
mMinMessagePriority =
LogLevel.getByString((String) event.getNewValue()).getPriority();
}
}
});
}
private void unmonitorAllDevices() {
for (String device : mMonitoredDevices.keySet()) {
unmonitorDevice(device);
}
mMonitoredDevices.clear();
}
private void unmonitorDevice(String deviceSerial) {
DeviceData data = mMonitoredDevices.get(deviceSerial);
if (data == null) {
return;
}
data.receiver.removeMessageReceivedEventListener(data.bufferChangeListener);
}
public void monitorDevice(final IDevice device) {
if (!mPrefStore.getBoolean(AUTO_MONITOR_PREFKEY)) {
// do not monitor device if auto monitoring is off
return;
}
mMonitorEnabled.set(true);
if (mMonitoredDevices.keySet().contains(device.getSerialNumber())) {
// the device is already monitored
return;
}
LogCatReceiver r = LogCatReceiverFactory.INSTANCE.newReceiver(device, mPrefStore);
ILogCatBufferChangeListener l = new ILogCatBufferChangeListener() {
@Override
public void bufferChanged(List<LogCatMessage> addedMessages,
List<LogCatMessage> deletedMessages) {
checkMessages(addedMessages, device);
}
};
r.addMessageReceivedEventListener(l);
mMonitoredDevices.put(device.getSerialNumber(), new DeviceData(r, l));
}
private void checkMessages(List<LogCatMessage> receivedMessages, IDevice device) {
if (!mMonitorEnabled.get()) {
return;
}
// check the received list of messages to see if any of them are
// significant enough to be seen by the user. If so, activate the logcat view
// to display those messages
for (LogCatMessage m : receivedMessages) {
if (isImportantMessage(m)) {
focusLogCatView(device, m.getAppName());
// now that logcat view is active, no need to check messages until the next
// time user launches an application.
mMonitorEnabled.set(false);
break;
}
}
}
/**
* Check whether a message is "important". Currently, we assume that a message is important if
* it is of severity level error or higher, and it belongs to an app currently in the workspace.
*/
private boolean isImportantMessage(LogCatMessage m) {
if (m.getLogLevel().getPriority() < mMinMessagePriority) {
return false;
}
String app = m.getAppName();
for (IDebuggerConnector c : mConnectors) {
if (c.isWorkspaceApp(app)) {
return true;
}
}
return false;
}
private void focusLogCatView(final IDevice device, final String appName) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow();
if (window == null) {
return;
}
IWorkbenchPage page = window.getActivePage();
if (page == null) {
return;
}
// if the logcat view is not visible, then prompt the user once to set
// logcat monitoring preferences
if (!isLogCatViewVisible(page)) {
boolean showLogCatView = promptUserOnce(page.getWorkbenchWindow().getShell());
if (!showLogCatView) {
return;
}
}
// display view
final LogCatView v = displayLogCatView(page);
if (v == null) {
return;
}
// select correct device
v.selectionChanged(device);
// select appropriate filter
v.selectTransientAppFilter(appName);
}
private boolean isLogCatViewVisible(IWorkbenchPage page) {
IViewPart view = page.findView(LogCatView.ID);
return view != null && page.isPartVisible(view);
}
private LogCatView displayLogCatView(IWorkbenchPage page) {
// if the view is already in the page, just bring it to the front
// without giving it focus.
IViewPart view = page.findView(LogCatView.ID);
if (view != null) {
page.bringToTop(view);
if (view instanceof LogCatView) {
return (LogCatView)view;
}
}
// if the view is not in the page, then create and show it.
try {
return (LogCatView) page.showView(LogCatView.ID);
} catch (PartInitException e) {
return null;
}
}
private boolean promptUserOnce(Shell shell) {
// see if this prompt was already displayed
boolean promptShown = mPrefStore.getBoolean(AUTO_MONITOR_PROMPT_SHOWN);
if (promptShown) {
return mPrefStore.getBoolean(AUTO_MONITOR_PREFKEY);
}
LogCatMonitorDialog dlg = new LogCatMonitorDialog(shell);
int r = dlg.open();
// save preference indicating that this dialog has been displayed once
mPrefStore.setValue(AUTO_MONITOR_PROMPT_SHOWN, true);
mPrefStore.setValue(AUTO_MONITOR_PREFKEY, dlg.shouldMonitor());
mPrefStore.setValue(AUTO_MONITOR_LOGLEVEL, dlg.getMinimumPriority());
return r == Window.OK && dlg.shouldMonitor();
}
});
}
private static class DeviceData {
public final LogCatReceiver receiver;
public final ILogCatBufferChangeListener bufferChangeListener;
public DeviceData(LogCatReceiver r, ILogCatBufferChangeListener l) {
receiver = r;
bufferChangeListener = l;
}
}
}
|
Pix-00/olea-v2_flask_1_
|
src/pink/pwd_tools.py
|
import lzma
import os
import pickle
import random
import string
from math import log2
from typing import Set
PWD_CHARS = string.digits + string.ascii_letters + string.punctuation
def generate_pwd() -> str:
random.seed(os.urandom(64))
return ''.join(random.choices(PWD_CHARS, k=20))
def is_common_pwd(pwd: str) -> bool:
with lzma.open(f'{os.path.dirname(__file__)}/common-passwords.xz',
'rb') as f:
common_pwd: Set[str] = pickle.load(f)
return pwd in common_pwd
# about pwd stength
# modified from
# https://github.com/kolypto/py-password-strength/blob/master/password_strength/stats.py
# Here, we want a function that:
# 1. f(x)=0.333 at x=weak_bits
# 2. f(x)=0.950 at x=weak_bits*3 (great estimation for a perfect password)
# 3. f(x) is almost linear in range{weak_bits .. weak_bits*2}: doubling the bits should double the strength
# 4. f(x) has an asymptote of 1.0 (normalization)
# First, the function:
# f(x) = 1 - (1-WEAK_MAX)*2^( -k*x)
# Now, the equation:
# f(HARD_BITS) = HARD_VAL
# 1 - (1-WEAK_MAX)*2^( -k*HARD_BITS) = HARD_VAL
# 2^( -k*HARD_BITS) = (1 - HARD_VAL) / (1-WEAK_MAX)
# k = -log2((1 - HARD_VAL) / (1-WEAK_MAX)) / HARD_BITS
WEAK_MAX: float = 1 / 3
WEAK_BITS: int = 30
HARD_BITS: int = WEAK_BITS * 3
HARD_VAL: float = 0.950
K: float = -log2((1 - HARD_VAL) / (1 - WEAK_MAX)) / HARD_BITS
def measure_strength(pwd: str) -> float:
''' Get password strength as a number normalized to range {0 .. 1}.
Normalization is done in the following fashion:
1. If entropy_bits <= weak_bits -- linear in range{0.0 .. 0.33} (weak)
2. If entropy_bits <= weak_bits*2 -- almost linear in range{0.33 .. 0.66} (medium)
3. If entropy_bits > weak_bits*3 -- asymptotic towards 1.0 (strong)
'''
# https://en.wikipedia.org/wiki/Password_strength
entropy_bits: float = len(pwd) * log2(len(set(pwd)))
if entropy_bits <= WEAK_BITS:
return WEAK_MAX * entropy_bits / WEAK_BITS
return 1 - (1 - WEAK_MAX) * pow(2, -K *
(entropy_bits - WEAK_BITS)) # with offset
|
guardaco/tzstats
|
src/components/Home/TransactionVolume/TransactionVolume.js
|
<gh_stars>10-100
import React from 'react';
import styled from 'styled-components';
import { Card, Value, Devices } from '../../Common';
import BarChart from './BarChart';
import _ from 'lodash';
const TransactionVolume = ({ txSeries, txVol24h }) => {
const avgVolume = _.sumBy(txSeries, o => o.value) / txSeries.length;
const avgTxn = _.sumBy(txSeries, o => o.n_tx) / txSeries.length;
return (
<Wrapper>
<Card title={'Transaction Volume Last 30d'}>
<BarChart data={txSeries} />
<DataRow>
<DataColumn>
<DataItem>
<DataTitle>24h Transactions</DataTitle>
<Value value={txVol24h[1]} />
</DataItem>
<DataItem>
<DataTitle>24h Volume</DataTitle>
<Value type="currency" digits={0} round={true} dim={0} value={txVol24h[0]} />
</DataItem>
</DataColumn>
<DataColumn>
<DataItem>
<DataTitle>30d Avg Transactions</DataTitle>
<Value prec={0} value={avgTxn}/>
</DataItem>
<DataItem>
<DataTitle>30d Avg Volume</DataTitle>
<Value type="currency" digits={0} round={true} dim={0} value={avgVolume}/>
</DataItem>
</DataColumn>
</DataRow>
</Card>
</Wrapper>
);
};
const Wrapper = styled.div`
min-width: 300px;
margin: 0 5px;
flex: 2;
`;
const DataRow = styled.div`
display: flex;
flex-direction: row;
@media ${Devices.mobileL} {
display: block;
}
`;
const DataColumn = styled.div`
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
position: relative;
flex-wrap: wrap;
margin-top: 15px;
flex-grow: 1;
@media ${Devices.mobileL} {
flex-direction: row;
min-width: unset;
margin-left: 0;
}
`;
const DataItem = styled.div`
font-size: 14px;
margin-left: 0px;
margin-right: 0px;
white-space: nowrap;
display: flex;
flex-grow: 1;
align-items: center;
&:last-child {
margin-right: 0;
}
@media ${Devices.mobileL} {
max-width: unset;
width: 100%;
margin-right: 0;
line-height: 1.4;
}
`;
const DataTitle = styled.div`
color:
rgba(255,255,255,0.52);
font-size: 12px;
margin-right: 15px;
width: 100%;
text-align: right;
@media ${Devices.mobileL} {
text-align: left;
}
`;
export default TransactionVolume;
|
OpenMunicipality/api-service-casemanagement
|
src/main/generated-sources/cxf/se/tekis/servicecontract/GetRoller.java
|
package se.tekis.servicecontract;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* &lt;complexType&gt;
* &lt;complexContent&gt;
* &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt;
* &lt;sequence&gt;
* &lt;element name="rollTyp" type="{www.tekis.se/ServiceContract}RollTyp"/&gt;
* &lt;element name="statusfilter" type="{www.tekis.se/ServiceContract}StatusFilter"/&gt;
* &lt;/sequence&gt;
* &lt;/restriction&gt;
* &lt;/complexContent&gt;
* &lt;/complexType&gt;
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"rollTyp",
"statusfilter"
})
@XmlRootElement(name = "GetRoller")
public class GetRoller {
@XmlElement(required = true)
@XmlSchemaType(name = "string")
protected RollTyp rollTyp;
@XmlElement(required = true)
@XmlSchemaType(name = "string")
protected StatusFilter statusfilter;
/**
* Gets the value of the rollTyp property.
*
* @return
* possible object is
* {@link RollTyp }
*
*/
public RollTyp getRollTyp() {
return rollTyp;
}
/**
* Sets the value of the rollTyp property.
*
* @param value
* allowed object is
* {@link RollTyp }
*
*/
public void setRollTyp(RollTyp value) {
this.rollTyp = value;
}
/**
* Gets the value of the statusfilter property.
*
* @return
* possible object is
* {@link StatusFilter }
*
*/
public StatusFilter getStatusfilter() {
return statusfilter;
}
/**
* Sets the value of the statusfilter property.
*
* @param value
* allowed object is
* {@link StatusFilter }
*
*/
public void setStatusfilter(StatusFilter value) {
this.statusfilter = value;
}
}
|
gb-6k-house/HuxiaoQiHuo
|
HuXiaoQiHuo/HuXiaoQiHuo/Public/NetWork/DataObj/GraphServerObj.h
|
//
// GraphServerObj.h
// traderex
//
// Created by zhouqing on 15/7/23.
// Copyright (c) 2015年 EasyFly. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface GraphServerObj : NSObject
@property(nonatomic,copy)NSString * GIP;
@property(nonatomic,copy)NSString * GPort;
@property(nonatomic,copy)NSString * DBURL;
@end
|
lhzheng880828/VOIPCall
|
doc/libjitisi/sources/net/sf/fmj/media/Module.java
|
<gh_stars>0
package net.sf.fmj.media;
import javax.media.Controls;
import javax.media.Format;
public interface Module extends Controls {
void connectorPushed(InputConnector inputConnector);
InputConnector getInputConnector(String str);
String[] getInputConnectorNames();
String getName();
OutputConnector getOutputConnector(String str);
String[] getOutputConnectorNames();
boolean isInterrupted();
void registerInputConnector(String str, InputConnector inputConnector);
void registerOutputConnector(String str, OutputConnector outputConnector);
void reset();
void setFormat(Connector connector, Format format);
void setModuleListener(ModuleListener moduleListener);
void setName(String str);
}
|
Ed-Fi-Exchange-OSS/SDCOE-Universal-Transcript
|
pdf-utils/write-metadata.js
|
<reponame>Ed-Fi-Exchange-OSS/SDCOE-Universal-Transcript<filename>pdf-utils/write-metadata.js
const fs = require('fs').promises;
const { PDFDocument, PDFName, PDFHexString } = require('pdf-lib');
/**
* This function takes the PDF file path and
* the key and value for custom metadata and writes the data in the PDF itself.
*
* @param {String} filePath
* @param {String} key
* @param {String} value
* @returns { Promise }
*/
async function writeMetaData(filePath, key, value) {
const data = await fs.readFile(filePath);
const pdfDoc = await PDFDocument.load(data);
if (pdfDoc) {
const keyName = PDFName.of(key);
pdfDoc.getInfoDict()?.set(keyName, PDFHexString.fromText(value));
const pdfBytes = await pdfDoc?.save();
if (pdfBytes) {
await fs.writeFile(filePath, pdfBytes);
return { message: 'File written successfully', filePath, key };
}
}
}
module.exports = { writeMetaData };
|
ingvr/test-store
|
client/src/actions/categories.js
|
import Axios from "axios";
import { CATEGORY_API_URL } from "../constants";
import { productsResetCategory, fetchProducts } from "./index";
export const categoriesRequested = payload => {
return {
type: "CATEGORIES_REQUESTED",
payload
};
};
export const categoriesReceived = payload => {
return {
type: "CATEGORIES_RECEIVED",
payload,
receivedAt: Date.now()
};
};
export const fetchCategories = payload => {
return dispatch => {
dispatch(categoriesRequested(payload));
const apiUrl = `${CATEGORY_API_URL}/get/all`;
return Axios.get(apiUrl)
.then(response => {
dispatch(categoriesReceived(response.data.data));
})
.catch(error => {
console.log("Fetch categories failed: ", error);
});
};
};
export const categoryAddSuccess = payload => {
return {
type: "CATEGORY_ADD_SUCCESS",
payload
};
};
export const categoryAdd = title => {
return dispatch => {
const apiUrl = `${CATEGORY_API_URL}/add`;
return Axios.post(apiUrl, { title })
.then(response => {
dispatch(categoryAddSuccess(response.data.categories));
})
.catch(error => {
console.log("Category add dispatch failed: ", error);
});
};
};
export const categoryDeleteSuccess = payload => {
return {
type: "CATEGORY_DELETE_SUCCESS",
payload
};
};
export const categoryDelete = categorytId => {
return dispatch => {
const apiUrl = `${CATEGORY_API_URL}/delete`;
return Axios.delete(`${apiUrl}/${categorytId}`)
.then(({ data: { data: { categories, products } } }) => {
dispatch(categoryDeleteSuccess(categories));
dispatch(productsResetCategory(products));
dispatch(fetchProducts());
})
.catch(error => {
console.log("Category delete dispatch failed: ", error);
});
};
};
|
zhangpf/fuchsia-rs
|
zircon/kernel/top/debug.cc
|
<filename>zircon/kernel/top/debug.cc
// Copyright 2016 The Fuchsia Authors
// Copyright (c) 2008-2015 <NAME>
//
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT
#include <arch/ops.h>
#include <ctype.h>
#include <debug.h>
#include <dev/hw_rng.h>
#include <kernel/spinlock.h>
#include <list.h>
#include <platform.h>
#include <platform/debug.h>
#include <printf.h>
#include <stdio.h>
#include <stdlib.h>
#include <zircon/types.h>
#include <zircon/time.h>
void spin(uint32_t usecs) {
zx_time_t start = current_time();
zx_duration_t nsecs = ZX_USEC(usecs);
while (zx_time_sub_time(current_time(), start) < nsecs)
;
}
void _panic(void* caller, void* frame, const char* fmt, ...) {
platform_panic_start();
printf("panic (caller %p frame %p): ", caller, frame);
va_list ap;
va_start(ap, fmt);
vprintf(fmt, ap);
va_end(ap);
platform_halt(HALT_ACTION_HALT, HALT_REASON_SW_PANIC);
}
static void puts_for_panic(const char *msg, size_t len)
{
__printf_output_func(msg, len, NULL);
}
void _panic_no_format(const char *msg, size_t len) {
platform_panic_start();
puts_for_panic(msg, len);
platform_halt(HALT_ACTION_HALT, HALT_REASON_SW_PANIC);
}
void __stack_chk_fail(void) {
panic_no_format("stack canary corrupted!\n");
}
uintptr_t choose_stack_guard(void) {
uintptr_t guard;
if (hw_rng_get_entropy(&guard, sizeof(guard), true) != sizeof(guard)) {
// We can't get a random value, so use a randomish value.
guard = 0xdeadbeef00ff00ffUL ^ (uintptr_t)&guard;
}
return guard;
}
#if !DISABLE_DEBUG_OUTPUT
void hexdump_very_ex(const void* ptr, size_t len, uint64_t disp_addr, hexdump_print_fn_t* pfn) {
addr_t address = (addr_t)ptr;
size_t count;
int zero_line_count = 0;
for (count = 0; count < len; count += 16, address += 16) {
union {
uint32_t buf[4];
uint8_t cbuf[16];
} u;
size_t s = ROUNDUP(MIN(len - count, 16), 4);
size_t i;
bool cur_line_zeros = true;
for (i = 0; i < s / 4; i++) {
cur_line_zeros &= (((const uint32_t*)address)[i] == 0);
}
if (cur_line_zeros) {
zero_line_count++;
if ((count + 16) >= len) {
// print the last line normally
} else if (zero_line_count >= 2) {
if (zero_line_count == 2) {
pfn(".....\n");
}
continue;
}
} else {
zero_line_count = 0;
}
pfn(((disp_addr + len) > 0xFFFFFFFF)
? "0x%016llx: "
: "0x%08llx: ",
disp_addr + count);
for (i = 0; i < s / 4; i++) {
u.buf[i] = ((const uint32_t*)address)[i];
pfn("%08x ", u.buf[i]);
}
for (; i < 4; i++) {
pfn(" ");
}
pfn("|");
for (i = 0; i < 16; i++) {
char c = u.cbuf[i];
if (i < s && isprint(c)) {
pfn("%c", c);
} else {
pfn(".");
}
}
pfn("|\n");
}
}
void hexdump8_very_ex(const void* ptr, size_t len, uint64_t disp_addr, hexdump_print_fn_t* pfn) {
addr_t address = (addr_t)ptr;
size_t count;
size_t i;
int zero_line_count = 0;
for (count = 0; count < len; count += 16, address += 16) {
bool cur_line_zeros = true;
for (i = 0; i < MIN(len - count, 16); i++) {
cur_line_zeros &= (((const uint8_t*)address)[i] == 0);
}
if (cur_line_zeros) {
zero_line_count++;
if ((count + 16) >= len) {
// print the last line normally
} else if (zero_line_count >= 2) {
if (zero_line_count == 2) {
pfn(".....\n");
}
continue;
}
} else {
zero_line_count = 0;
}
pfn(((disp_addr + len) > 0xFFFFFFFF)
? "0x%016llx: "
: "0x%08llx: ",
disp_addr + count);
for (i = 0; i < MIN(len - count, 16); i++) {
pfn("%02hhx ", *(const uint8_t*)(address + i));
}
for (; i < 16; i++) {
pfn(" ");
}
pfn("|");
for (i = 0; i < MIN(len - count, 16); i++) {
char c = ((const char*)address)[i];
pfn("%c", isprint(c) ? c : '.');
}
pfn("\n");
}
}
#endif // !DISABLE_DEBUG_OUTPUT
|
jaylinjiehong/NumberOfPasses
|
Android/android-30/android30_code_view/src/com/company/source/com/android/shell/HeapDumpReceiver.java
|
<reponame>jaylinjiehong/NumberOfPasses<filename>Android/android-30/android30_code_view/src/com/company/source/com/android/shell/HeapDumpReceiver.java
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.shell;
import static com.android.shell.BugreportProgressService.isTv;
import android.annotation.Nullable;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.FileUtils;
import android.os.Process;
import android.text.format.DateUtils;
import android.util.Log;
import java.io.File;
/**
* Receiver that handles finished heap dumps.
*/
public class HeapDumpReceiver extends BroadcastReceiver {
private static final String TAG = "HeapDumpReceiver";
/**
* Broadcast action to determine when to delete a specific dump heap. Must include a {@link
* HeapDumpActivity#KEY_URI} String extra.
*/
static final String ACTION_DELETE_HEAP_DUMP = "com.android.shell.action.DELETE_HEAP_DUMP";
/** Broadcast sent when heap dump collection has been completed. */
private static final String ACTION_HEAP_DUMP_FINISHED =
"com.android.internal.intent.action.HEAP_DUMP_FINISHED";
/** The process we are reporting */
static final String EXTRA_PROCESS_NAME = "com.android.internal.extra.heap_dump.PROCESS_NAME";
/** The size limit the process reached. */
static final String EXTRA_SIZE_BYTES = "com.android.internal.extra.heap_dump.SIZE_BYTES";
/** Whether the user initiated the dump or not. */
static final String EXTRA_IS_USER_INITIATED =
"com.android.internal.extra.heap_dump.IS_USER_INITIATED";
/** Optional name of package to directly launch. */
static final String EXTRA_REPORT_PACKAGE =
"com.android.internal.extra.heap_dump.REPORT_PACKAGE";
private static final String NOTIFICATION_CHANNEL_ID = "heapdumps";
private static final int NOTIFICATION_ID = 2019;
/**
* Always keep heap dumps taken in the last week.
*/
private static final long MIN_KEEP_AGE_MS = DateUtils.WEEK_IN_MILLIS;
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "onReceive(): " + intent);
final String action = intent.getAction();
if (action == null) {
Log.e(TAG, "null action received");
return;
}
switch (action) {
case Intent.ACTION_BOOT_COMPLETED:
cleanupOldFiles(context);
break;
case ACTION_DELETE_HEAP_DUMP:
deleteHeapDump(context, intent.getStringExtra(HeapDumpActivity.KEY_URI));
break;
case ACTION_HEAP_DUMP_FINISHED:
showDumpNotification(context, intent);
break;
}
}
private void cleanupOldFiles(Context context) {
final PendingResult result = goAsync();
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
Log.d(TAG, "Deleting from " + new File(context.getFilesDir(), "heapdumps"));
FileUtils.deleteOlderFiles(new File(context.getFilesDir(), "heapdumps"), 0,
MIN_KEEP_AGE_MS);
} catch (RuntimeException e) {
Log.e(TAG, "Couldn't delete old files", e);
}
result.finish();
return null;
}
}.execute();
}
private void deleteHeapDump(Context context, @Nullable final String uri) {
if (uri == null) {
Log.e(TAG, "null URI for delete heap dump intent");
return;
}
final PendingResult result = goAsync();
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
context.getContentResolver().delete(Uri.parse(uri), null, null);
result.finish();
return null;
}
}.execute();
}
private void showDumpNotification(Context context, Intent intent) {
final boolean isUserInitiated = intent.getBooleanExtra(
EXTRA_IS_USER_INITIATED, false);
final String procName = intent.getStringExtra(EXTRA_PROCESS_NAME);
final int uid = intent.getIntExtra(Intent.EXTRA_UID, 0);
final String reportPackage = intent.getStringExtra(
EXTRA_REPORT_PACKAGE);
final long size = intent.getLongExtra(EXTRA_SIZE_BYTES, 0);
if (procName == null) {
Log.e(TAG, "No process name sent over");
return;
}
NotificationManager nm = NotificationManager.from(context);
nm.createNotificationChannel(
new NotificationChannel(NOTIFICATION_CHANNEL_ID,
"Heap dumps",
NotificationManager.IMPORTANCE_DEFAULT));
final int titleId = isUserInitiated
? com.android.internal.R.string.dump_heap_ready_notification
: com.android.internal.R.string.dump_heap_notification;
final String procDisplayName = uid == Process.SYSTEM_UID
? context.getString(com.android.internal.R.string.android_system_label)
: procName;
String text = context.getString(titleId, procDisplayName);
Intent shareIntent = new Intent();
shareIntent.setClassName(context, HeapDumpActivity.class.getName());
shareIntent.putExtra(EXTRA_PROCESS_NAME, procName);
shareIntent.putExtra(EXTRA_SIZE_BYTES, size);
shareIntent.putExtra(EXTRA_IS_USER_INITIATED, isUserInitiated);
shareIntent.putExtra(Intent.EXTRA_UID, uid);
if (reportPackage != null) {
shareIntent.putExtra(EXTRA_REPORT_PACKAGE, reportPackage);
}
final Notification.Builder builder = new Notification.Builder(context,
NOTIFICATION_CHANNEL_ID)
.setSmallIcon(
isTv(context) ? R.drawable.ic_bug_report_black_24dp
: com.android.internal.R.drawable.stat_sys_adb)
.setLocalOnly(true)
.setColor(context.getColor(
com.android.internal.R.color.system_notification_accent_color))
.setContentTitle(text)
.setTicker(text)
.setAutoCancel(true)
.setContentText(context.getText(
com.android.internal.R.string.dump_heap_notification_detail))
.setContentIntent(PendingIntent.getActivity(context, 2, shareIntent,
PendingIntent.FLAG_UPDATE_CURRENT));
Log.v(TAG, "Creating share heap dump notification");
NotificationManager.from(context).notify(NOTIFICATION_ID, builder.build());
}
}
|
dawidkski/federated-faceid
|
src/facenet/train_triplet.py
|
import argparse
import dataclasses
import time
from dataclasses import dataclass
from pathlib import Path
from typing import Any, List, Mapping
import numpy as np
import torch
from torch.nn import Module
from torch.nn.modules.distance import PairwiseDistance
from torch.optim.optimizer import Optimizer
from torch.utils.data.dataloader import DataLoader
from torch.utils.tensorboard import SummaryWriter
from tqdm import tqdm
from facenet.commons import ModelBuilder, get_validation_data_loader, load_checkpoint
from facenet.dataloaders import facemetadataset
from facenet.dataloaders.facemetadataset import (
FaceMetaDataset,
PeopleDataset,
TripletIndexes,
TripletsDataset,
)
from facenet.evaluation import EvaluationMetrics, evaluate
from facenet.settings import DataSettings, ModelSettings
def parse_args():
parser = argparse.ArgumentParser(
description="Training FaceNet facial recognition model using Triplet Loss."
)
# Dataset settings
parser.add_argument(
"--output_dir", type=lambda p: Path(p), default=DataSettings.output_dir
)
parser.add_argument(
"--dataset_dir", type=lambda p: Path(p), default=DataSettings.dataset_dir
)
parser.add_argument(
"--lfw_dir", type=lambda p: Path(p), default=DataSettings.lfw_dir
)
parser.add_argument(
"--dataset_csv_file",
type=lambda p: Path(p),
default=DataSettings.dataset_csv_file,
)
parser.add_argument(
"--training_triplets_path",
type=lambda p: Path(p),
default=DataSettings.training_triplets_path,
)
parser.add_argument(
"--checkpoint_path",
type=lambda p: Path(p) if p else None,
default=DataSettings.checkpoint_path,
)
# Training settings
parser.add_argument(
"--lfw_batch_size", default=ModelSettings.lfw_batch_size, type=int
)
parser.add_argument(
"--lfw_validation_epoch_interval",
type=int,
default=ModelSettings.lfw_validation_epoch_interval,
)
parser.add_argument(
"--model_architecture",
type=str,
choices=[
"resnet18",
"resnet34",
"resnet50",
"resnet101",
"inceptionresnetv1",
"inceptionresnetv2",
],
default=ModelSettings.model_architecture,
)
parser.add_argument("--epochs", type=int, default=ModelSettings.epochs)
parser.add_argument(
"--num_triplets_train", type=int, default=ModelSettings.num_triplets_train
)
parser.add_argument(
"--batch_size",
default=ModelSettings.batch_size,
type=int,
help="Batch size (default: 64)",
)
parser.add_argument(
"--num_workers",
default=ModelSettings.num_workers,
type=int,
help="Number of workers for data loaders (default: 4)",
)
parser.add_argument(
"--embedding_dim",
default=ModelSettings.embedding_dim,
type=int,
help="Dimension of the embedding vector (default: 128)",
)
parser.add_argument(
"--pretrained_on_imagenet",
action="store_true",
default=ModelSettings.pretrained_on_imagenet,
)
parser.add_argument(
"--optimizer",
type=str,
choices=["sgd", "adagrad", "rmsprop", "adam"],
default=ModelSettings.optimizer,
)
parser.add_argument(
"--learning_rate", type=float, default=ModelSettings.learning_rate
)
parser.add_argument(
"--triplet_loss_margin", type=float, default=ModelSettings.triplet_loss_margin
)
return parser.parse_args()
class Tensorboard:
def __init__(self, log_path: Path):
self._writer = SummaryWriter(str(log_path))
def add_dict(self, dictionary: Mapping[str, Any], global_step: int):
for key, value in dictionary.items():
self._writer.add_scalar(key, value, global_step=global_step)
def add_scalar(self, name: str, value: float, global_step: int):
self._writer.add_scalar(name, value, global_step=global_step)
class OptimizerBuilder:
@staticmethod
def build(model: Module, optimizer: str, learning_rate: float) -> Optimizer:
# Set optimizers
if optimizer == "sgd":
optimizer_model = torch.optim.SGD(model.parameters(), lr=learning_rate)
elif optimizer == "adam":
optimizer_model = torch.optim.Adam(model.parameters(), lr=learning_rate)
else:
raise ValueError(f"Optimizer {optimizer} is unknown!")
return optimizer_model
@dataclass
class TrainStepResults:
loss: float
steps: int
class TrainEpoch:
def __init__(
self,
model: Module,
dataset: FaceMetaDataset,
dataset_eval_loader: DataLoader,
optimizer: Optimizer,
tensorboard: Tensorboard,
loss_fn: Module,
distance_fn: Module,
settings_model: ModelSettings,
global_step: int = 0,
):
self.model = model
self.dataset = dataset
self.optimizer = optimizer
self.loss_fn = loss_fn
self.distance_fn = distance_fn
self.settings = settings_model
self.tensorboard = tensorboard
self.dataset_eval_loader = dataset_eval_loader
self.global_step: int = global_step
self.log_every_step: int = 10
self.evaluate_every_step: int = 310
def train_for_epoch(self):
self.model.train()
num_batches: int = 0
while num_batches < self.settings.batches_in_epoch:
print("Selecting people")
people_dataset: PeopleDataset = facemetadataset.select_people(
self.dataset,
self.settings.people_per_batch,
self.settings.images_per_person,
)
print("Calculating embeddings")
self.model.eval()
with torch.no_grad():
embeddings: np.array = self.calculate_embeddings(
self.model, people_dataset
)
triplets: List[TripletIndexes] = facemetadataset.select_triplets(
embeddings,
people_dataset.num_images_per_class,
self.settings.people_per_batch,
self.settings.triplet_loss_margin,
)
triplet_dataset = TripletsDataset(triplets, people_dataset)
self.model.train()
results: TrainStepResults = self.train_step(triplet_dataset)
num_batches += results.steps
def train_step(self, triplet_dataset: TripletsDataset) -> TrainStepResults:
losses: List[float] = []
local_step: int = 0
triplet_loader = DataLoader(
triplet_dataset, batch_size=self.settings.batch_size, shuffle=True
)
num_batches = int(np.ceil(len(triplet_dataset) / self.settings.batch_size))
for triplets in tqdm(triplet_loader, total=num_batches):
# Calculate triplet loss
triplet_loss = self.loss_fn(
anchor=self.model(triplets["anchor"].cuda()),
positive=self.model(triplets["positive"].cuda()),
negative=self.model(triplets["negative"].cuda()),
).cuda()
# Backward pass
self.optimizer.zero_grad()
triplet_loss.backward()
self.optimizer.step()
self.global_step += 1
local_step += 1
losses.append(triplet_loss.item())
if self.global_step % self.log_every_step == 0:
self.tensorboard.add_scalar(
name="loss_train",
value=sum(losses[-self.log_every_step :]) / len(losses),
global_step=self.global_step,
)
losses: List[float] = []
if self.global_step % self.evaluate_every_step == 0:
print("Validating on LFW!")
self.model.eval()
metrics: EvaluationMetrics = evaluate(
self.model, self.distance_fn, self.dataset_eval_loader, None
)
self.tensorboard.add_dict(dataclasses.asdict(metrics), self.global_step)
self.model.train()
return TrainStepResults(0.0, local_step)
def calculate_embeddings(self, model: Module, people_dataset: PeopleDataset):
image_loader = DataLoader(
people_dataset, batch_size=self.settings.batch_size, shuffle=False
)
num_examples = len(people_dataset)
embeddings = np.zeros((num_examples, self.settings.embedding_dim))
start_idx = 0
for i, image in tqdm(enumerate(image_loader)):
batch_size = min(
num_examples - i * self.settings.batch_size, self.settings.batch_size
)
image = image.cuda()
embedding = model(image).cpu().detach().numpy()
embeddings[start_idx : start_idx + batch_size, :] = embedding
start_idx += self.settings.batch_size
return embeddings
def train_triplet(settings_data: DataSettings, settings_model: ModelSettings):
output_dir: Path = settings_data.output_dir
output_dir_logs = output_dir.joinpath("logs")
output_dir_plots = output_dir.joinpath("plots")
output_dir_checkpoints = output_dir.joinpath("checkpoints")
output_dir_tensorboard = output_dir.joinpath("tensorboard")
output_dir_logs.mkdir(exist_ok=True, parents=True)
output_dir_plots.mkdir(exist_ok=True, parents=True)
output_dir_checkpoints.mkdir(exist_ok=True, parents=True)
model_architecture = settings_model.model_architecture
start_epoch: int = 0
global_step: int = 0
data_loader_validate: DataLoader = get_validation_data_loader(
settings_model, settings_data
)
model: Module = ModelBuilder.build(
settings_model.model_architecture,
settings_model.embedding_dim,
settings_model.pretrained_on_imagenet,
)
print("Using {} model architecture.".format(model_architecture))
# Load model to GPU or multiple GPUs if available
if torch.cuda.is_available():
print("Using single-gpu training.")
model.cuda()
optimizer: Optimizer = OptimizerBuilder.build(
model, settings_model.optimizer, settings_model.learning_rate
)
if settings_data.checkpoint_path:
checkpoint = load_checkpoint(output_dir_checkpoints, model, optimizer)
model = checkpoint.model
optimizer = checkpoint.optimizer
start_epoch = checkpoint.epoch
global_step = checkpoint.global_step
# Start Training loop
total_time_start = time.time()
end_epoch = start_epoch + settings_model.epochs
face_meta_dataset = FaceMetaDataset(
root_dir=settings_data.dataset_dir, csv_name=settings_data.dataset_csv_file
)
l2_distance = PairwiseDistance(2).cuda()
loss_fn = torch.nn.TripletMarginLoss(
margin=settings_model.triplet_loss_margin, reduction="mean"
)
tensorboard = Tensorboard(output_dir_tensorboard)
train_step = TrainEpoch(
model=model,
dataset=face_meta_dataset,
dataset_eval_loader=data_loader_validate,
distance_fn=l2_distance,
loss_fn=loss_fn,
optimizer=optimizer,
tensorboard=tensorboard,
settings_model=settings_model,
global_step=global_step,
)
for epoch in range(start_epoch, end_epoch):
# Training pass
train_step.model.train()
train_step.train_for_epoch()
global_step = train_step.global_step
# Save model checkpoint
state = {
"model_architecture": model_architecture,
"epoch": epoch,
"global_step": global_step,
"embedding_dimension": settings_model.embedding_dim,
"batch_size_training": settings_model.batch_size,
"model_state_dict": model.state_dict(),
"optimizer_state_dict": optimizer.state_dict(),
}
# Save model checkpoint
checkpoint_name = f"{model_architecture}_{epoch}.pt"
torch.save(state, output_dir_checkpoints.joinpath(checkpoint_name))
def main():
args = parse_args()
settings_model = ModelSettings(
learning_rate=args.learning_rate,
model_architecture=args.model_architecture,
epochs=args.epochs,
num_triplets_train=args.num_triplets_train,
batch_size=args.batch_size,
num_workers=args.num_workers,
embedding_dim=args.embedding_dim,
pretrained_on_imagenet=args.pretrained_on_imagenet,
optimizer=args.optimizer,
triplet_loss_margin=args.triplet_loss_margin,
)
settings_data = DataSettings(
output_dir=args.output_dir,
dataset_dir=args.dataset_dir,
lfw_dir=args.lfw_dir,
dataset_csv_file=args.dataset_csv_file,
training_triplets_path=args.training_triplets_path,
checkpoint_path=args.checkpoint_path,
)
train_triplet(settings_data, settings_model)
if __name__ == "__main__":
main()
|
SlimKatLegacy/android_external_chromium_org
|
net/tools/quic/quic_server_session_test.cc
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/tools/quic/quic_server_session.h"
#include "net/quic/crypto/quic_crypto_server_config.h"
#include "net/quic/crypto/quic_random.h"
#include "net/quic/quic_connection.h"
#include "net/quic/test_tools/quic_connection_peer.h"
#include "net/quic/test_tools/quic_data_stream_peer.h"
#include "net/quic/test_tools/quic_test_utils.h"
#include "net/tools/epoll_server/epoll_server.h"
#include "net/tools/quic/quic_spdy_server_stream.h"
#include "net/tools/quic/test_tools/quic_test_utils.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using __gnu_cxx::vector;
using net::test::MockConnection;
using net::test::QuicConnectionPeer;
using net::test::QuicDataStreamPeer;
using testing::_;
using testing::StrictMock;
namespace net {
namespace tools {
namespace test {
class QuicServerSessionPeer {
public:
static QuicDataStream* GetIncomingReliableStream(
QuicServerSession* s, QuicStreamId id) {
return s->GetIncomingReliableStream(id);
}
static QuicDataStream* GetDataStream(QuicServerSession* s, QuicStreamId id) {
return s->GetDataStream(id);
}
};
class CloseOnDataStream : public QuicDataStream {
public:
CloseOnDataStream(QuicStreamId id, QuicSession* session)
: QuicDataStream(id, session) {
}
virtual bool OnStreamFrame(const QuicStreamFrame& frame) OVERRIDE {
session()->MarkDecompressionBlocked(1, id());
session()->CloseStream(id());
return true;
}
virtual uint32 ProcessData(const char* data, uint32 data_len) OVERRIDE {
return 0;
}
};
class TestQuicQuicServerSession : public QuicServerSession {
public:
TestQuicQuicServerSession(const QuicConfig& config,
QuicConnection* connection,
QuicSessionOwner* owner)
: QuicServerSession(config, connection, owner),
close_stream_on_data_(false) {
}
virtual QuicDataStream* CreateIncomingDataStream(
QuicStreamId id) OVERRIDE {
if (!ShouldCreateIncomingDataStream(id)) {
return NULL;
}
if (close_stream_on_data_) {
return new CloseOnDataStream(id, this);
} else {
return new QuicSpdyServerStream(id, this);
}
}
void CloseStreamOnData() {
close_stream_on_data_ = true;
}
private:
bool close_stream_on_data_;
};
namespace {
class QuicServerSessionTest : public ::testing::Test {
protected:
QuicServerSessionTest()
: crypto_config_(QuicCryptoServerConfig::TESTING,
QuicRandom::GetInstance()) {
config_.SetDefaults();
config_.set_max_streams_per_connection(3, 3);
connection_ = new MockConnection(true);
session_.reset(new TestQuicQuicServerSession(
config_, connection_, &owner_));
session_->InitializeSession(crypto_config_);
visitor_ = QuicConnectionPeer::GetVisitor(connection_);
}
void MarkHeadersReadForStream(QuicStreamId id) {
QuicDataStream* stream = QuicServerSessionPeer::GetDataStream(
session_.get(), id);
ASSERT_TRUE(stream != NULL);
QuicDataStreamPeer::SetHeadersDecompressed(stream, true);
}
StrictMock<MockQuicSessionOwner> owner_;
MockConnection* connection_;
QuicConfig config_;
QuicCryptoServerConfig crypto_config_;
scoped_ptr<TestQuicQuicServerSession> session_;
QuicConnectionVisitorInterface* visitor_;
};
TEST_F(QuicServerSessionTest, CloseStreamDueToReset) {
// Open a stream, then reset it.
// Send two bytes of payload to open it.
QuicStreamFrame data1(3, false, 0, MakeIOVector("HT"));
vector<QuicStreamFrame> frames;
frames.push_back(data1);
EXPECT_TRUE(visitor_->OnStreamFrames(frames));
EXPECT_EQ(1u, session_->GetNumOpenStreams());
// Pretend we got full headers, so we won't trigger the 'unrecoverable
// compression context' state.
MarkHeadersReadForStream(3);
// Send a reset.
QuicRstStreamFrame rst1(3, QUIC_STREAM_NO_ERROR);
visitor_->OnRstStream(rst1);
EXPECT_EQ(0u, session_->GetNumOpenStreams());
// Send the same two bytes of payload in a new packet.
EXPECT_TRUE(visitor_->OnStreamFrames(frames));
// The stream should not be re-opened.
EXPECT_EQ(0u, session_->GetNumOpenStreams());
}
TEST_F(QuicServerSessionTest, NeverOpenStreamDueToReset) {
// Send a reset.
QuicRstStreamFrame rst1(3, QUIC_STREAM_NO_ERROR);
visitor_->OnRstStream(rst1);
EXPECT_EQ(0u, session_->GetNumOpenStreams());
// Send two bytes of payload.
QuicStreamFrame data1(3, false, 0, MakeIOVector("HT"));
vector<QuicStreamFrame> frames;
frames.push_back(data1);
// When we get data for the closed stream, it implies the far side has
// compressed some headers. As a result we're going to bail due to
// unrecoverable compression context state.
EXPECT_CALL(*connection_, SendConnectionClose(
QUIC_STREAM_RST_BEFORE_HEADERS_DECOMPRESSED));
EXPECT_FALSE(visitor_->OnStreamFrames(frames));
// The stream should never be opened, now that the reset is received.
EXPECT_EQ(0u, session_->GetNumOpenStreams());
}
TEST_F(QuicServerSessionTest, GoOverPrematureClosedStreamLimit) {
QuicStreamFrame data1(3, false, 0, MakeIOVector("H"));
vector<QuicStreamFrame> frames;
frames.push_back(data1);
// Set up the stream such that it's open in OnPacket, but closes half way
// through while on the decompression blocked list.
session_->CloseStreamOnData();
EXPECT_CALL(*connection_, SendConnectionClose(
QUIC_STREAM_RST_BEFORE_HEADERS_DECOMPRESSED));
EXPECT_FALSE(visitor_->OnStreamFrames(frames));
}
TEST_F(QuicServerSessionTest, AcceptClosedStream) {
vector<QuicStreamFrame> frames;
// Send (empty) compressed headers followed by two bytes of data.
frames.push_back(
QuicStreamFrame(3, false, 0, MakeIOVector("\1\0\0\0\0\0\0\0HT")));
frames.push_back(
QuicStreamFrame(5, false, 0, MakeIOVector("\2\0\0\0\0\0\0\0HT")));
EXPECT_TRUE(visitor_->OnStreamFrames(frames));
// Pretend we got full headers, so we won't trigger the 'unercoverable
// compression context' state.
MarkHeadersReadForStream(3);
// Send a reset.
QuicRstStreamFrame rst(3, QUIC_STREAM_NO_ERROR);
visitor_->OnRstStream(rst);
// If we were tracking, we'd probably want to reject this because it's data
// past the reset point of stream 3. As it's a closed stream we just drop the
// data on the floor, but accept the packet because it has data for stream 5.
frames.clear();
frames.push_back(QuicStreamFrame(3, false, 2, MakeIOVector("TP")));
frames.push_back(QuicStreamFrame(5, false, 2, MakeIOVector("TP")));
EXPECT_TRUE(visitor_->OnStreamFrames(frames));
}
TEST_F(QuicServerSessionTest, MaxNumConnections) {
EXPECT_EQ(0u, session_->GetNumOpenStreams());
EXPECT_TRUE(
QuicServerSessionPeer::GetIncomingReliableStream(session_.get(), 3));
EXPECT_TRUE(
QuicServerSessionPeer::GetIncomingReliableStream(session_.get(), 5));
EXPECT_TRUE(
QuicServerSessionPeer::GetIncomingReliableStream(session_.get(), 7));
EXPECT_FALSE(
QuicServerSessionPeer::GetIncomingReliableStream(session_.get(), 9));
}
TEST_F(QuicServerSessionTest, MaxNumConnectionsImplicit) {
EXPECT_EQ(0u, session_->GetNumOpenStreams());
EXPECT_TRUE(
QuicServerSessionPeer::GetIncomingReliableStream(session_.get(), 3));
// Implicitly opens two more streams before 9.
EXPECT_FALSE(
QuicServerSessionPeer::GetIncomingReliableStream(session_.get(), 9));
}
TEST_F(QuicServerSessionTest, GetEvenIncomingError) {
// Incoming streams on the server session must be odd.
EXPECT_EQ(NULL,
QuicServerSessionPeer::GetIncomingReliableStream(
session_.get(), 2));
}
} // namespace
} // namespace test
} // namespace tools
} // namespace net
|
puppup420247-org/federal_register
|
lib/federal_register/document_search_details.rb
|
<filename>lib/federal_register/document_search_details.rb
class FederalRegister::DocumentSearchDetails < FederalRegister::Base
extend FederalRegister::Utilities
add_attribute :filters,
:suggestions
def self.search(args)
response = get('/documents/search-details', query: args).parsed_response
new(response)
end
end
|
HedgehogCode/javacpp-presets
|
llvm/src/gen/java/org/bytedeco/llvm/LLVM/LLVMJITCSymbolMapPair.java
|
// Targeted by JavaCPP version 1.5.7-SNAPSHOT: DO NOT EDIT THIS FILE
package org.bytedeco.llvm.LLVM;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.javacpp.presets.javacpp.*;
import static org.bytedeco.llvm.global.LLVM.*;
/**
* Represents a pair of a symbol name and an evaluated symbol.
*/
@Properties(inherit = org.bytedeco.llvm.presets.LLVM.class)
public class LLVMJITCSymbolMapPair extends Pointer {
static { Loader.load(); }
/** Default native constructor. */
public LLVMJITCSymbolMapPair() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public LLVMJITCSymbolMapPair(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public LLVMJITCSymbolMapPair(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public LLVMJITCSymbolMapPair position(long position) {
return (LLVMJITCSymbolMapPair)super.position(position);
}
@Override public LLVMJITCSymbolMapPair getPointer(long i) {
return new LLVMJITCSymbolMapPair((Pointer)this).offsetAddress(i);
}
public native LLVMOrcSymbolStringPoolEntryRef Name(); public native LLVMJITCSymbolMapPair Name(LLVMOrcSymbolStringPoolEntryRef setter);
public native @ByRef LLVMJITEvaluatedSymbol Sym(); public native LLVMJITCSymbolMapPair Sym(LLVMJITEvaluatedSymbol setter);
}
|
CamDX/X-Road
|
src/proxy-ui-api/src/test/java/org/niis/xroad/restapi/service/ClientServiceIntegrationTest.java
|
<filename>src/proxy-ui-api/src/test/java/org/niis/xroad/restapi/service/ClientServiceIntegrationTest.java
/**
* The MIT License
* Copyright (c) 2018 Estonian Information System Authority (RIA),
* Nordic Institute for Interoperability Solutions (NIIS), Population Register Centre (VRK)
* Copyright (c) 2015-2017 Estonian Information System Authority (RIA), Population Register Centre (VRK)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.niis.xroad.restapi.service;
import ee.ria.xroad.common.conf.serverconf.model.ClientType;
import ee.ria.xroad.common.identifier.ClientId;
import ee.ria.xroad.common.util.CryptoUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.niis.xroad.restapi.facade.GlobalConfFacade;
import org.niis.xroad.restapi.repository.ClientRepository;
import org.niis.xroad.restapi.util.TestUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
import java.security.cert.CertificateException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
/**
* test client service
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureTestDatabase
@Slf4j
@Transactional
@WithMockUser
public class ClientServiceIntegrationTest {
@Autowired
private ClientRepository clientRepository;
private ClientService clientService;
private byte[] pemBytes;
private byte[] derBytes;
private byte[] sqlFileBytes;
@MockBean
private GlobalConfFacade globalConfFacade;
@Before
public void setup() throws Exception {
when(globalConfFacade.getMembers(any())).thenReturn(new ArrayList<>(Arrays.asList(
TestUtils.getMemberInfo(TestUtils.INSTANCE_FI, TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1,
TestUtils.SUBSYSTEM1),
TestUtils.getMemberInfo(TestUtils.INSTANCE_FI, TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1,
TestUtils.SUBSYSTEM2),
TestUtils.getMemberInfo(TestUtils.INSTANCE_FI, TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1,
null),
TestUtils.getMemberInfo(TestUtils.INSTANCE_EE, TestUtils.MEMBER_CLASS_PRO, TestUtils.MEMBER_CODE_M2,
TestUtils.SUBSYSTEM3),
TestUtils.getMemberInfo(TestUtils.INSTANCE_EE, TestUtils.MEMBER_CLASS_PRO, TestUtils.MEMBER_CODE_M1,
null),
TestUtils.getMemberInfo(TestUtils.INSTANCE_EE, TestUtils.MEMBER_CLASS_PRO, TestUtils.MEMBER_CODE_M1,
TestUtils.SUBSYSTEM1),
TestUtils.getMemberInfo(TestUtils.INSTANCE_EE, TestUtils.MEMBER_CLASS_PRO, TestUtils.MEMBER_CODE_M2,
null))
));
when(globalConfFacade.getMemberName(any())).thenAnswer(invocation -> {
ClientId clientId = (ClientId) invocation.getArguments()[0];
return clientId.getSubsystemCode() != null ? TestUtils.NAME_FOR + clientId.getSubsystemCode()
: TestUtils.NAME_FOR + "test-member";
});
clientService = new ClientService(clientRepository, globalConfFacade);
pemBytes = IOUtils.toByteArray(this.getClass().getClassLoader().
getResourceAsStream("google-cert.pem"));
derBytes = IOUtils.toByteArray(this.getClass().getClassLoader().
getResourceAsStream("google-cert.der"));
sqlFileBytes = IOUtils.toByteArray(this.getClass().getClassLoader().
getResourceAsStream("data.sql"));
assertTrue(pemBytes.length > 1);
assertTrue(derBytes.length > 1);
assertTrue(sqlFileBytes.length > 1);
}
@Test
public void updateConnectionType() throws Exception {
ClientId id = TestUtils.getM1Ss1ClientId();
ClientType clientType = clientService.getClient(id);
assertEquals("SSLNOAUTH", clientType.getIsAuthentication());
assertEquals(2, clientType.getLocalGroup().size());
try {
clientService.updateConnectionType(id, "FUBAR");
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException expected) {
}
clientService.updateConnectionType(id, "NOSSL");
clientType = clientService.getClient(id);
assertEquals("NOSSL", clientType.getIsAuthentication());
assertEquals(2, clientType.getLocalGroup().size());
}
@Test
public void addCertificatePem() throws Exception {
ClientId id = TestUtils.getM1Ss1ClientId();
ClientType clientType = clientService.getClient(id);
assertEquals(0, clientType.getIsCert().size());
clientService.addTlsCertificate(id, pemBytes);
clientType = clientService.getClient(id);
assertEquals(1, clientType.getIsCert().size());
assertTrue(Arrays.equals(derBytes, clientType.getIsCert().get(0).getData()));
}
@Test
public void addInvalidCertificate() throws Exception {
ClientId id = TestUtils.getM1Ss1ClientId();
ClientType clientType = clientService.getClient(id);
assertEquals(0, clientType.getIsCert().size());
try {
clientService.addTlsCertificate(id, sqlFileBytes);
fail("should have thrown CertificateException");
} catch (CertificateException expected) {
}
}
@Test
public void addCertificateDer() throws Exception {
ClientId id = TestUtils.getM1Ss1ClientId();
ClientType clientType = clientService.getClient(id);
assertEquals(0, clientType.getIsCert().size());
clientService.addTlsCertificate(id, derBytes);
clientType = clientService.getClient(id);
assertEquals(1, clientType.getIsCert().size());
assertTrue(Arrays.equals(derBytes, clientType.getIsCert().get(0).getData()));
}
@Test
public void addDuplicate() throws Exception {
ClientId id = TestUtils.getM1Ss1ClientId();
ClientType clientType = clientService.getClient(id);
assertEquals(0, clientType.getIsCert().size());
clientService.addTlsCertificate(id, derBytes);
try {
clientService.addTlsCertificate(id, pemBytes);
fail("should have thrown CertificateAlreadyExistsException");
} catch (CertificateAlreadyExistsException expected) {
}
}
@Test
public void deleteCertificate() throws Exception {
ClientId id = TestUtils.getM1Ss1ClientId();
ClientType clientType = clientService.getClient(id);
assertEquals(0, clientType.getIsCert().size());
clientService.addTlsCertificate(id, derBytes);
String hash = CryptoUtils.calculateCertHexHash(derBytes);
try {
clientService.deleteTlsCertificate(id, "wrong hash");
fail("should have thrown CertificateNotFoundException");
} catch (CertificateNotFoundException expected) {
}
clientType = clientService.getClient(id);
assertEquals(1, clientType.getIsCert().size());
clientService.deleteTlsCertificate(id, hash);
clientType = clientService.getClient(id);
assertEquals(0, clientType.getIsCert().size());
}
/* Test LOCAL client search */
@Test
public void findLocalClientsByNameIncludeMembers() {
List<ClientType> clients = clientService.findLocalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1, null,
null,
null, null, true);
assertEquals(1, clients.size());
}
@Test
public void findLocalClientsByInstanceIncludeMembers() {
List<ClientType> clients = clientService.findLocalClients(null, TestUtils.INSTANCE_FI, null,
null, null, true);
assertEquals(5, clients.size());
}
@Test
public void findLocalClientsByClassIncludeMembers() {
List<ClientType> clients = clientService.findLocalClients(null, null, TestUtils.MEMBER_CLASS_GOV,
null, null, true);
assertEquals(5, clients.size());
}
@Test
public void findLocalClientsByInstanceAndMemberCodeIncludeMembers() {
List<ClientType> clients = clientService.findLocalClients(null, TestUtils.INSTANCE_FI, null,
TestUtils.MEMBER_CODE_M1, null, true);
assertEquals(3, clients.size());
}
@Test
public void findLocalClientsByAllTermsIncludeMembers() {
List<ClientType> clients = clientService.findLocalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1,
TestUtils.INSTANCE_FI,
TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1, TestUtils.SUBSYSTEM1, true);
assertEquals(1, clients.size());
}
@Test
public void findLocalClientsByNameExcludeMembers() {
List<ClientType> clients = clientService.findLocalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1, null,
null,
null, null, false);
assertEquals(1, clients.size());
}
@Test
public void findLocalClientsByInstanceExcludeMembers() {
List<ClientType> clients = clientService.findLocalClients(null, TestUtils.INSTANCE_FI, null,
null, null, false);
assertEquals(4, clients.size());
}
@Test
public void findLocalClientsByClassExcludeMembers() {
List<ClientType> clients = clientService.findLocalClients(null, null, TestUtils.MEMBER_CLASS_GOV,
null, null, false);
assertEquals(4, clients.size());
}
@Test
public void findLocalClientsByInstanceAndMemberCodeExcludeMembers() {
List<ClientType> clients = clientService.findLocalClients(null, TestUtils.INSTANCE_FI, null,
TestUtils.MEMBER_CODE_M1, null, false);
assertEquals(2, clients.size());
}
@Test
public void findLocalClientsByAllTermsExcludeMembers() {
List<ClientType> clients = clientService.findLocalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1,
TestUtils.INSTANCE_FI,
TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1, TestUtils.SUBSYSTEM1, false);
assertEquals(1, clients.size());
}
/* Test GLOBAL client search */
@Test
public void findGlobalClientsByNameIncludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1, null,
null,
null, null, true);
assertEquals(2, clients.size());
}
@Test
public void findGlobalClientsByInstanceIncludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(null, TestUtils.INSTANCE_EE, null,
null, null, true);
assertEquals(4, clients.size());
}
@Test
public void findGlobalClientsByClassIncludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(null, null, TestUtils.MEMBER_CLASS_GOV,
null, null, true);
assertEquals(3, clients.size());
}
@Test
public void findGlobalClientsByInstanceAndMemberCodeIncludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(null, TestUtils.INSTANCE_FI, null,
TestUtils.MEMBER_CODE_M1, null, true);
assertEquals(3, clients.size());
}
@Test
public void findGlobalClientsByAllTermsIncludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1,
TestUtils.INSTANCE_FI,
TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1, TestUtils.SUBSYSTEM1, true);
assertEquals(1, clients.size());
}
@Test
public void findGlobalClientsByNameExcludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1, null,
null,
null, null, false);
assertEquals(2, clients.size());
}
@Test
public void findGlobalClientsByInstanceExcludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(null, TestUtils.INSTANCE_EE, null,
null, null, false);
assertEquals(2, clients.size());
}
@Test
public void findGlobalClientsByClassExcludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(null, null, TestUtils.MEMBER_CLASS_GOV,
null, null, false);
assertEquals(2, clients.size());
}
@Test
public void findGlobalClientsByInstanceAndMemberCodeExcludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(null, TestUtils.INSTANCE_FI, null,
TestUtils.MEMBER_CODE_M1, null, false);
assertEquals(2, clients.size());
}
@Test
public void findGlobalClientsByAllTermsExcludeMembers() {
List<ClientType> clients = clientService.findGlobalClients(TestUtils.NAME_FOR + TestUtils.SUBSYSTEM1,
TestUtils.INSTANCE_FI,
TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1, TestUtils.SUBSYSTEM1, false);
assertEquals(1, clients.size());
}
@Test
public void getLocalClientMemberIds() {
Set<ClientId> expected = new HashSet();
expected.add(ClientId.create(TestUtils.INSTANCE_FI,
TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M1));
expected.add(ClientId.create(TestUtils.INSTANCE_FI,
TestUtils.MEMBER_CLASS_GOV, TestUtils.MEMBER_CODE_M2));
Set<ClientId> result = clientService.getLocalClientMemberIds();
assertEquals(expected, result);
}
}
|
kowalt/neuroCloud
|
BE/nnCloudRESTService/src/main/java/com/mycompany/nncloudrestservice/pojo/User.java
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.mycompany.nncloudrestservice.pojo;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
*
* @author Tomasz
*/
public class User
{
private int id;
private String login;
private String email;
private String password;
private boolean activated;
private String info_to_admin;
private String session_id;
private Date registered;
private List<Network> networks = new ArrayList<>();
private PerformanceSettings performance_settings;
public PerformanceSettings getPerformance_settings() {
return performance_settings;
}
public void setPerformance_settings(PerformanceSettings performance_settings) {
this.performance_settings = performance_settings;
}
public List<Network> getNetworks() {
return networks;
}
public void setNetworks(List<Network> networks) {
this.networks = networks;
}
public int getId()
{
return id;
}
public void setId(int id)
{
this.id = id;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public boolean isActivated() {
return activated;
}
public void setActivated(boolean activated) {
this.activated = activated;
}
public String getInfo_to_admin() {
return info_to_admin;
}
public void setInfo_to_admin(String info_to_admin) {
this.info_to_admin = info_to_admin;
}
public String getSession_id() {
return session_id;
}
public void setSession_id(String session_id) {
this.session_id = session_id;
}
public Date getRegistered() {
return registered;
}
public void setRegistered(Date registered) {
this.registered = registered;
}
}
|
hw233/home3
|
core/common/data/commonData/src/main/java/com/home/commonData/message/login/serverRequest/login/login/ClientLoginTransferMO.java
|
<gh_stars>1-10
package com.home.commonData.message.login.serverRequest.login.login;
import com.home.commonData.data.login.ClientLoginDO;
/** 客户端登陆转移消息 */
public class ClientLoginTransferMO
{
/** 登录数据 */
ClientLoginDO data;
/** http消息ID */
int httpID;
/** ip地址 */
String ip;
}
|
antonpaly4/urbanizze
|
node_modules/webpack/node_modules/webpack-core/lib/ConcatSource.js
|
<reponame>antonpaly4/urbanizze
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author <NAME> @sokra
*/
var SourceMapNodeFastSource = require("./SourceMapNodeFastSource");
var SourceNode = require("source-map").SourceNode;
function ConcatSource() {
this.children = Array.prototype.slice.call(arguments);
SourceMapNodeFastSource.call(this);
}
module.exports = ConcatSource;
ConcatSource.prototype = Object.create(SourceMapNodeFastSource.prototype);
ConcatSource.prototype.constructor = ConcatSource;
ConcatSource.prototype._bakeSource = function() {
return this.children.map(function(item) {
return typeof item === "string" ? item : item.source();
}).join("");
};
ConcatSource.prototype._bake = function() {
var node = new SourceNode(null, null, null, this.children.map(function(item) {
return typeof item === "string" ? item : item.node();
}));
return node;
};
ConcatSource.prototype.add = function(item) {
this.children.push(item);
if(this._node) {
if(typeof item === "string")
this._node.add(item);
else
this._node.add(item.node());
}
};
|
int-tt/aws-sdk-go-v2
|
service/iot/api_op_ListAttachedPolicies.go
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package iot
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/iot/types"
"github.com/awslabs/smithy-go/middleware"
smithyhttp "github.com/awslabs/smithy-go/transport/http"
)
// Lists the policies attached to the specified thing group.
func (c *Client) ListAttachedPolicies(ctx context.Context, params *ListAttachedPoliciesInput, optFns ...func(*Options)) (*ListAttachedPoliciesOutput, error) {
if params == nil {
params = &ListAttachedPoliciesInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListAttachedPolicies", params, optFns, addOperationListAttachedPoliciesMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListAttachedPoliciesOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListAttachedPoliciesInput struct {
// The group or principal for which the policies will be listed. Valid principals
// are CertificateArn (arn:aws:iot:region:accountId:cert/certificateId),
// thingGroupArn (arn:aws:iot:region:accountId:thinggroup/groupName) and CognitoId
// (region:id).
//
// This member is required.
Target *string
// The token to retrieve the next set of results.
Marker *string
// The maximum number of results to be returned per request.
PageSize *int32
// When true, recursively list attached policies.
Recursive *bool
}
type ListAttachedPoliciesOutput struct {
// The token to retrieve the next set of results, or ``null`` if there are no more
// results.
NextMarker *string
// The policies.
Policies []*types.Policy
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addOperationListAttachedPoliciesMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsRestjson1_serializeOpListAttachedPolicies{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsRestjson1_deserializeOpListAttachedPolicies{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddAttemptClockSkewMiddleware(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpListAttachedPoliciesValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListAttachedPolicies(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opListAttachedPolicies(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "execute-api",
OperationName: "ListAttachedPolicies",
}
}
|
TimeBombx/launch_academy
|
phase 11/exercises/ajax-and-rails/spec/models/format_spec.rb
|
<reponame>TimeBombx/launch_academy<filename>phase 11/exercises/ajax-and-rails/spec/models/format_spec.rb
require 'rails_helper'
RSpec.describe Format, type: :model do
it { should have_many(:videos) }
context "name" do
subject { Format.new(name: "VHS") }
it { should validate_presence_of(:name) }
it { should validate_uniqueness_of(:name) }
end
end
|
jayliu9/OOD-exercises-NEU
|
Labs/lab6/src/main/java/problem2/EmptySet.java
|
package problem2;
/**
* Represents the empty set of integers.
*/
public class EmptySet implements ISet {
/**
* Checks if the set is empty
* @return true if the set is empty, false otherwise
*/
@Override
public Boolean isEmpty() {
return true;
}
/**
* Adds an integer to the set
* @param n The integer to add
* @return A new Set object with the added integer.
*/
@Override
public ISet add(Integer n) {
return new ConsSet(n, this);
}
/**
* Checks if the set contains a given integer
* @param n The integer to check
* @return true if the set contains a given integer, false otherwise
*/
@Override
public Boolean contains(Integer n) {
return false;
}
/**
* Removes the given integer from the set
* @param n The integer to remove
* @return A new Set object without the removed integer
*/
@Override
public ISet remove(Integer n) {
return this;
}
/**
* Returns the number of elements in the set
* @return The number of elements.
*/
@Override
public Integer size() {
return 0;
}
/**
* Gets a hash code value for the object.
* @return a hash code value for the object.
*/
@Override
public int hashCode() {
return 42;
}
/**
* Checks if two objects are equal
* @param o the object to compare this to
* @return true if these two objects are equal, false otherwise.
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return true;
}
/**
* Creates a string representation of the EmptySet.
* @return a string representation of the EmptySet.
*/
@Override
public String toString() {
return "EmptySet{}";
}
}
|
sahujaunpuri/Backendless
|
src/com/backendless/media/StreamVideoQuality.java
|
/*
* ********************************************************************************************************************
* <p/>
* BACKENDLESS.COM CONFIDENTIAL
* <p/>
* ********************************************************************************************************************
* <p/>
* Copyright 2012 BACKENDLESS.COM. All Rights Reserved.
* <p/>
* NOTICE: All information contained herein is, and remains the property of Backendless.com and its suppliers,
* if any. The intellectual and technical concepts contained herein are proprietary to Backendless.com and its
* suppliers and may be covered by U.S. and Foreign Patents, patents in process, and are protected by trade secret
* or copyright law. Dissemination of this information or reproduction of this material is strictly forbidden
* unless prior written permission is obtained from Backendless.com.
* <p/>
* ********************************************************************************************************************
*/
package com.backendless.media;
public enum StreamVideoQuality
{
LOW_170("176x144, 30 fps, 170 Kbps"), LOW_200("176x144, 30 fps, 200 Kbps"), LOW_250("176x144, 30 fps, 250 Kbps"), MEDIUM_250(
"320x240, 30 fps, 250 Kbps"), MEDIUM_300("352x288, 30 fps, 300 Kbps"), MEDIUM_400("352x288, 30 fps, 400 Kbps"), HIGH_600(
"640x480, 30 fps, 600 Kbps");
private String value = "";
public static StreamVideoQuality getFromString( String quality )
{
if( quality == null )
{
return null;
}
for( StreamVideoQuality streamQuality : values() )
{
if( quality.equals( streamQuality.getValue() ) )
{
return streamQuality;
}
}
return null;
}
private StreamVideoQuality(String value)
{
this.value = value;
}
public String getValue()
{
return value;
}
}
|
kdl222/ISIS3
|
isis/src/juno/objs/JunoCamera/JunoDistortionMap.cpp
|
/** This is free and unencumbered software released into the public domain.
The authors of ISIS do not claim copyright on the contents of this file.
For more details about the LICENSE terms and the AUTHORS, you will
find files of those names at the top level of this repository. **/
/* SPDX-License-Identifier: CC0-1.0 */
#include "IString.h"
#include "JunoDistortionMap.h"
namespace Isis {
/**
* Juno JunoCam distortion map constructor
*
* Create a distortion map for Juno's JunoCam camera. This class maps between distorted
* and undistorted focal plane x/y's. The default mapping is the
* identity, that is, the focal plane x/y and undistorted focal plane
* x/y will be identical. The Z direction is set internally to positive for
* JunoCam.
*
* @param parent the parent camera that will use this distortion map
*
*/
JunoDistortionMap::JunoDistortionMap(Camera *parent)
: CameraDistortionMap(parent, 1.0) {
}
/**
* Destructor
*/
JunoDistortionMap::~JunoDistortionMap() {
}
/**
* Load distortion coefficients for JunoCam
*
* This method loads the distortion coefficients from the instrument
* kernel. JunoCam's coefficients in the NAIF instrument kernel are
* expected to be in the form of:
*
* @code
* INS-61500_DISTORTION_K0 = coefficient, index 0
* INS-61500_DISTORTION_K1 = coefficient, index 1
* INS-61500_DISTORTION_K2 = coefficient, index 2
* @endcode
*
* These coefficients are designed for use with pixel coordinates, so they
* are scaled based on the pixel pitch to operate in focal plane millimeters.
* These coefficient will be used to convert from undistorted focal plane x,y
* to distorted focal plane x,y as follows
*
* @code
* r2 = r2 = (ux * ux) + (uy * uy);
* dr = 1 + INS-61500_DISTORTION_K0 + INS-61500_DISTORTION_K1*r2 +INS-61500_DISTORTION_K2*r2*r2;
* dx = ux * dr;
* dy = uy * dr;
* @endcode
*
* @param naifIkCode Code to search for in instrument kernel
*/
void JunoDistortionMap::SetDistortion(int naifIkCode) {
// Use the pixel pitch to scale k1 and k2 coefficients to operate in focal
// plane coordinates (millimeters). The coefficients found in the kernels
// are based on detector coordinates (pixels).
double pp = p_camera->PixelPitch();
double p2 = pp * pp;
// Currently k0 is non-existant in kernels (i.e equals zero). The try is
// here in case this coefficient is needed for future distortion models.
try {
QString odk0 = "INS" + toString(naifIkCode) + "_DISTORTION_K0";
p_odk.push_back(p_camera->Spice::getDouble(odk0));
}
catch (IException &e) {
p_odk.push_back(0.0);
}
QString odk1 = "INS" + toString(naifIkCode) + "_DISTORTION_K1";
p_odk.push_back(p_camera->Spice::getDouble(odk1) / p2);
QString odk2 = "INS" + toString(naifIkCode) + "_DISTORTION_K2";
p_odk.push_back(p_camera->Spice::getDouble(odk2) / (p2 * p2));
}
/**
* Compute distorted focal plane x/y
*
* Compute distorted focal plane x/y given an undistorted focal plane x/y.
* This virtual method is used to apply various techniques for adding
* optical distortion in the focal plane of a camera. The default
* implementation of this virtual method uses a polynomial distortion if
* the SetDistortion method was invoked.
* After calling this method, you can obtain the distorted x/y via the
* FocalPlaneX and FocalPlaneY methods
*
* @param ux undistorted focal plane x in millimeters
* @param uy undistorted focal plane y in millimeters
*
* @return @b if the conversion was successful
*
* @see SetDistortion
*/
bool JunoDistortionMap::SetUndistortedFocalPlane(const double ux,
const double uy) {
p_undistortedFocalPlaneX = ux;
p_undistortedFocalPlaneY = uy;
// Compute the distance from the focal plane center and if we are
// close to the center then assume no distortion
double r2 = (ux * ux) + (uy * uy);
if (r2 <= 1.0E-6) {
p_focalPlaneX = ux;
p_focalPlaneY = uy;
return true;
}
// The equation given in the IK computes the undistorted focal plane
// ux = dx * (1 + k1*r^2), r^2 = dx^2 + dy^2
double dr = 1 + p_odk[0] + p_odk[1]*r2 + p_odk[2]*r2*r2;
p_focalPlaneX = ux * dr;
p_focalPlaneY = uy * dr;
return true;
}
/**
* Compute undistorted focal plane x/y
*
* Compute undistorted focal plane x/y given a distorted focal plane x/y.
* This virtual method can be used to apply various techniques for removing
* optical distortion in the focal plane of a camera. The default
* implementation uses a polynomial distortion if the SetDistortion method
* is invoked. After calling this method, you can obtain the undistorted
* x/y via the UndistortedFocalPlaneX and UndistortedFocalPlaneY methods
*
* @param dx distorted focal plane x in millimeters
* @param dy distorted focal plane y in millimeters
*
* @return if the conversion was successful
* @see SetDistortion
* @todo Generalize polynomial equation
*/
bool JunoDistortionMap::SetFocalPlane(double dx,
double dy) {
p_focalPlaneX = dx;
p_focalPlaneY = dy;
// Get the distance from the focal plane center and if we are close
// then skip the distortion
double r2 = (dx * dx) + (dy * dy);
if (r2 <= 1.0E-6) {
p_undistortedFocalPlaneX = dx;
p_undistortedFocalPlaneY = dy;
return true;
}
bool converged = false;
int i = 0;
int maximumIterations = 15;
double tolerance = p_camera->PixelPitch() / 100.0;
double uxEstimate = dx;
double uyEstimate = dy;
double uxPrev = dx;
double uyPrev = dy;
double xDistortion = 0.0;
double yDistortion = 0.0;
double dr = 0.0;
while (!converged) {
dr = p_odk[0] + p_odk[1]*r2 + p_odk[2]*r2*r2;
xDistortion = uxEstimate * dr;
yDistortion = uyEstimate * dr;
uxEstimate = dx - xDistortion;
uyEstimate = dy - yDistortion;
i++;
if (fabs(uxEstimate - uxPrev) < tolerance &&
fabs(uyEstimate - uyPrev) < tolerance ) {
converged = true;
}
// If doesn't converge, don't do correction
if (i > maximumIterations) {
p_undistortedFocalPlaneX = dx;
p_undistortedFocalPlaneY = dy;
break;
}
r2 = (uxEstimate * uxEstimate) + (uyEstimate * uyEstimate);
uxPrev = uxEstimate;
uyPrev = uyEstimate;
}
p_undistortedFocalPlaneX = uxEstimate;
p_undistortedFocalPlaneY = uyEstimate;
return true;
}
}
|
SmoothSync/smoothsetup
|
library/src/main/java/com/smoothsync/smoothsetup/wizard/RequestPermissions.java
|
/*
* Copyright (c) 2018 dmfs GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smoothsync.smoothsetup.wizard;
import android.content.Context;
import android.os.Build;
import android.os.Parcel;
import com.smoothsync.smoothsetup.microfragments.PermissionMicroFragment;
import com.smoothsync.smoothsetup.utils.Denied;
import com.smoothsync.smoothsetup.utils.IterableBox;
import com.smoothsync.smoothsetup.utils.StringBox;
import org.dmfs.android.microfragments.MicroFragment;
import org.dmfs.android.microwizard.MicroWizard;
import org.dmfs.android.microwizard.box.Box;
import org.dmfs.android.microwizard.box.Boxable;
import org.dmfs.android.microwizard.box.Unboxed;
import org.dmfs.jems.iterable.decorators.Mapped;
/**
* A MicroWizard which asks the user about one or multiple permission.
* <p>
* If running on Android 5 or lower this step is always skipped.
*
* @author <NAME>
*/
public final class RequestPermissions<T extends Boxable<T>> implements MicroWizard<T>
{
private final Iterable<String> mPermissions;
private final MicroWizard<T> mNext;
public RequestPermissions(Iterable<String> permissions, MicroWizard<T> next)
{
mPermissions = permissions;
mNext = next;
}
@Override
public MicroFragment<?> microFragment(Context context, T argument)
{
// skip this on Android 5 and below or if all permissions have been granted before
return Build.VERSION.SDK_INT < 23 || !new Denied(context, mPermissions).iterator().hasNext() ?
mNext.microFragment(context, argument) : new PermissionMicroFragment<>(mPermissions, argument, mNext);
}
@Override
public Box<MicroWizard<T>> boxed()
{
return new WizardBox<>(mPermissions, mNext);
}
public final static class WizardBox<T extends Boxable<T>> implements Box<MicroWizard<T>>
{
private final Iterable<String> mPermissions;
private final MicroWizard<T> mNext;
public WizardBox(Iterable<String> permissions, MicroWizard<T> next)
{
mPermissions = permissions;
mNext = next;
}
@Override
public int describeContents()
{
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int i)
{
parcel.writeParcelable(new IterableBox<>(new Mapped<>(StringBox::new, mPermissions)), i);
parcel.writeParcelable(mNext.boxed(), i);
}
@Override
public MicroWizard<T> value()
{
return new RequestPermissions<>(mPermissions, mNext);
}
public final static Creator<WizardBox<?>> CREATOR = new Creator<WizardBox<?>>()
{
@SuppressWarnings("unchecked") // in static context we don't know about the generic type
@Override
public WizardBox<?> createFromParcel(Parcel parcel)
{
Iterable<Boxable<String>> boxablePermissions = new Unboxed<Iterable<Boxable<String>>>(parcel).value();
Iterable<String> permissions = new Mapped<>(b -> b.boxed().value(), boxablePermissions);
return new WizardBox(permissions, new RequestPermissions(permissions, new Unboxed<MicroWizard<?>>(parcel).value()));
}
@Override
public WizardBox<?>[] newArray(int i)
{
return new WizardBox[i];
}
};
}
}
|
joeyliu2012/js-practice
|
node/mvc-newslist/controller/admin.js
|
const adminService = require('../service/admin');
module.exports = {
async showIndex(ctx) {
// ctx.body = "admin homepage";
await ctx.render('admin/admin.pug')
},
async addNews(ctx) {
// ctx.body = "addNews page";
await ctx.render('admin/addNews.pug')
},
async newsList(ctx) {
// ctx.body = "addNews page";
let size = 5;
let p = ctx.query.p || 1;
let formatData = adminService.newsList(p,size);
let prev = adminService.getPages(p,size).prev;
let next = adminService.getPages(p,size).next;
let pages = adminService.getPages(p,size).pages;
await ctx.render('admin/newsList.pug',{
formatData,
prev,
next,
pages
})
},
async addNewsData(ctx) {
// console.log(ctx.request.body);
// console.log(ctx.request.files);
let res = await adminService.addNewsData(ctx.request);
await ctx.render('admin/message.pug', {
res
});
},
async deleteList(ctx) {
// console.log(ctx.query.id);
let id = ctx.query.id;
let res = await adminService.deleteList(id);
console.log(res);
// if(res.code == 0) {
ctx.redirect('/admin/newsList');
// }
}
}
|
viloboda/DynamicEditor
|
app/src/main/java/com/example/vloboda/dynamicentityeditor/dynamic/DynamicAttributesFactoryImpl.java
|
<gh_stars>1-10
package com.example.vloboda.dynamicentityeditor.dynamic;
import android.util.SparseArray;
import com.example.bl.DynamicFieldsFactory;
import com.example.bl.ui.DymamicViewGroup;
import com.example.bl.ui.DynamicView;
import com.example.bl.ui.ViewFactory;
import com.example.bl.ui.DynamicViewContainer;
import com.example.bl.ui.UICommonServices;
import com.example.model.FieldConfiguration;
import com.example.model.EditObjectTemplateDto;
public class DynamicAttributesFactoryImpl implements DynamicFieldsFactory {
private SparseArray<ViewFactory> map = new SparseArray<>(15);
public DynamicAttributesFactoryImpl(UICommonServices commonServices) {
map.put(FieldConfiguration.CONTROL_TYPE_TEXT, new EditTextFactory(commonServices));
map.put(FieldConfiguration.CONTROL_TYPE_BOOLEAN, new EditSwitchBoxFactory(commonServices));
map.put(FieldConfiguration.CONTROL_TYPE_NUMBER, new EditNumberFactory(commonServices));
map.put(FieldConfiguration.CONTROL_TYPE_RADIO_LIST, new EditRadioGroupListFactory(commonServices));
}
@Override
public DynamicView getView(DynamicViewContainer container, FieldConfiguration config, EditObjectTemplateDto.ItemDto templateItem) {
int controlType = templateItem != null && templateItem.getEditControlType() != 0 ? templateItem.getEditControlType() : config.controlType;
ViewFactory factory = map.get(controlType);
if (factory == null) {
throw new RuntimeException("Can't find factory for " + controlType);
}
return factory.getView(container, config);
}
@Override
public DymamicViewGroup getViewGroup(DynamicViewContainer container, String groupName) {
return new ViewGroupFactory().getView(container, groupName);
}
}
|
YJBeetle/QtAndroidAPI
|
android-28/android/widget/GridLayout_Spec.cpp
|
<reponame>YJBeetle/QtAndroidAPI<filename>android-28/android/widget/GridLayout_Spec.cpp
#include "./GridLayout_Alignment.hpp"
#include "../../JObject.hpp"
#include "./GridLayout_Spec.hpp"
namespace android::widget
{
// Fields
// QJniObject forward
GridLayout_Spec::GridLayout_Spec(QJniObject obj) : JObject(obj) {}
// Constructors
// Methods
jboolean GridLayout_Spec::equals(JObject arg0) const
{
return callMethod<jboolean>(
"equals",
"(Ljava/lang/Object;)Z",
arg0.object<jobject>()
);
}
jint GridLayout_Spec::hashCode() const
{
return callMethod<jint>(
"hashCode",
"()I"
);
}
} // namespace android::widget
|
egymgmbh/sitebricks
|
sitebricks-acceptance-tests/src/test/java/com/google/sitebricks/acceptance/HtmlValidatingAcceptanceTest.java
|
package com.google.sitebricks.acceptance;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.openqa.selenium.WebDriver;
import org.testng.annotations.Test;
import com.google.sitebricks.acceptance.page.HtmlValidatingPage;
import com.google.sitebricks.acceptance.util.AcceptanceTest;
/**
*
*/
@Test(suiteName = AcceptanceTest.SUITE)
public class HtmlValidatingAcceptanceTest {
public void shouldGetValidationViolations() {
WebDriver driver = AcceptanceTest.createWebDriver();
HtmlValidatingPage page = HtmlValidatingPage.open(driver);
List<String> expectedValidationViolations = Arrays.asList(
"Constraint Violation Length First Name Message",
"Constraint Violation Null Age Message",
"Constraint Violation Length Last Name Message");
List<String> actualValidationViolations = page.getValidationViolations();
assert CollectionUtils.isEqualCollection(expectedValidationViolations, actualValidationViolations)
: "validation violations didn't match what was expected";
}
}
|
qq676708415/mq
|
common/src/main/java/com/hailiang/common/mq/thread/TaskProducer.java
|
package com.hailiang.common.mq.thread;
import com.hailiang.common.mq.queue.Queue;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
/**
* 阻塞消息队列管理类
*/
@Component
public class TaskProducer
{
private static org.slf4j.Logger logger = LoggerFactory.getLogger(TaskProducer.class);
/**
* 注入queue
*/
private static Queue queue;
/**
* 注入线程池
*/
private static Consumer consumer;
private static void startConsumer() {
logger.info("开始启动Consumer线程池");
//处理线程池
consumer.startConsume();
try {
Thread.sleep(1000);
}
catch (InterruptedException ie)
{
logger.error("等待线程池启动时发生错误", ie);
}
}
@Autowired(required = true)
public void setQueue(@Qualifier("queueffjfout") Queue queue) {
TaskProducer.queue = queue;
}
@Autowired(required = true)
public void setConsumer(@Qualifier("outputpoollic") Consumer consumer) {
TaskProducer.consumer = consumer;
startConsumer();
}
/**
* 入口
* @param msg
*/
public void addRedisRecord(String msg) {
logger.debug("开始加入消息到队列:"+msg);
queue.put(msg);
}
}
|
Darkdragon84/ldt
|
ldt/tests/dicts/test_resources.py
|
<filename>ldt/tests/dicts/test_resources.py
# -*- coding: utf-8 -*-
import unittest
import os
os.environ["TESTING_LDT"] = "TRUE"
import ldt
class Tests(unittest.TestCase):
"""
The tests in this block inspect the loading of various resources: names,
numbers, associations, file and url detection.
"""
@classmethod
def setUpClass(cls):
"""Setting up the test variables."""
cls.web_dict = ldt.dicts.resources.WebDictionary()
cls.name_dict = ldt.dicts.resources.NameDictionary(
language="english", lowercasing=False)
cls.number_dict = ldt.dicts.resources.NumberDictionary(
language="english")
cls.file_dict = ldt.dicts.resources.FileDictionary()
@classmethod
def tearDownClass(cls):
"""Clearning up the test variables."""
cls.web_dict = None
cls.name_dict = None
cls.number_dict = None
cls.file_dict = None
def test_names(self):
self.assertEqual("en", self.name_dict.language)
def test_names(self):
self.assertTrue(self.name_dict.is_a_word("Alice"))
def test_names(self):
test_dict = ldt.dicts.resources.NameDictionary(language="english",
lowercasing=True)
self.assertTrue(test_dict.is_a_word("alice"))
def test_numbers(self):
self.assertTrue(self.number_dict.is_a_word("one"))
def test_numbers(self):
self.assertTrue(self.number_dict.is_a_word("2"))
#todo: split such cases?
def test_numbers(self):
self.assertFalse(self.number_dict.is_a_word("test2"))
def test_associations(self):
test_dict = ldt.dicts.resources.AssociationDictionary(
language="english", lowercasing=False)
self.assertTrue(self.test_dict.is_a_word("falcon"))
def test_associations(self):
test_dict = ldt.dicts.resources.AssociationDictionary(
language="english", lowercasing=True)
# print(list(test_dict.data.keys()))
self.assertIn("eagle", test_dict.data["falcon"])
def test_associations_pair(self):
test_dict = ldt.dicts.resources.AssociationDictionary(
language="english", lowercasing=True)
self.assertTrue(test_dict.are_related("eagle", "falcon"))
def test_domain(self):
self.assertTrue(self.web_dict.is_a_word("example.com"))
def test_wwww(self):
self.assertTrue(self.web_dict.is_a_word("www.bizarre.bzzzz"))
def test_domain_long(self):
self.assertTrue(self.web_dict.is_a_word("example.com/sub/something"))
def test_file(self):
self.assertTrue(self.file_dict.is_a_word("cat.jpg"))
def test_file(self):
self.assertTrue(self.file_dict.is_a_word("path/to/cat/cat.jpg"))
if __name__ == '__main__':
unittest.main()
|
rohankumardubey/tinkerpop
|
gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/P.java
|
<filename>gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/P.java<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal;
import org.apache.tinkerpop.gremlin.process.traversal.util.AndP;
import org.apache.tinkerpop.gremlin.process.traversal.util.OrP;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.function.BiPredicate;
import java.util.function.Predicate;
/**
* @author <NAME> (http://markorodriguez.com)
* @author <NAME> (http://stephen.genoprime.com)
*/
public class P<V> implements Predicate<V>, Serializable, Cloneable {
protected BiPredicate<V, V> biPredicate;
protected V value;
protected V originalValue;
public P(final BiPredicate<V, V> biPredicate, final V value) {
this.value = value;
this.originalValue = value;
this.biPredicate = biPredicate;
}
public BiPredicate<V, V> getBiPredicate() {
return this.biPredicate;
}
/**
* Gets the original value used at time of construction of the {@code P}. This value can change its type
* in some cases.
*/
public V getOriginalValue() {
return originalValue;
}
/**
* Gets the current value to be passed to the predicate for testing.
*/
public V getValue() {
return this.value;
}
public void setValue(final V value) {
this.value = value;
}
@Override
public boolean test(final V testValue) {
return this.biPredicate.test(testValue, this.value);
}
@Override
public int hashCode() {
int result = this.biPredicate.hashCode();
if (null != this.originalValue)
result ^= this.originalValue.hashCode();
return result;
}
@Override
public boolean equals(final Object other) {
return other instanceof P &&
((P) other).getClass().equals(this.getClass()) &&
((P) other).getBiPredicate().equals(this.biPredicate) &&
((((P) other).getOriginalValue() == null && this.originalValue == null) || ((P) other).getOriginalValue().equals(this.originalValue));
}
@Override
public String toString() {
return null == this.originalValue ? this.biPredicate.toString() : this.biPredicate.toString() + "(" + this.originalValue + ")";
}
@Override
public P<V> negate() {
return new P<>(this.biPredicate.negate(), this.originalValue);
}
@Override
public P<V> and(final Predicate<? super V> predicate) {
if (!(predicate instanceof P))
throw new IllegalArgumentException("Only P predicates can be and'd together");
return new AndP<>(Arrays.asList(this, (P<V>) predicate));
}
@Override
public P<V> or(final Predicate<? super V> predicate) {
if (!(predicate instanceof P))
throw new IllegalArgumentException("Only P predicates can be or'd together");
return new OrP<>(Arrays.asList(this, (P<V>) predicate));
}
public P<V> clone() {
try {
return (P<V>) super.clone();
} catch (final CloneNotSupportedException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
//////////////// statics
public static <V> P<V> eq(final V value) {
return new P(Compare.eq, value);
}
public static <V> P<V> neq(final V value) {
return new P(Compare.neq, value);
}
public static <V> P<V> lt(final V value) {
return new P(Compare.lt, value);
}
public static <V> P<V> lte(final V value) {
return new P(Compare.lte, value);
}
public static <V> P<V> gt(final V value) {
return new P(Compare.gt, value);
}
public static <V> P<V> gte(final V value) {
return new P(Compare.gte, value);
}
public static <V> P<V> inside(final V first, final V second) {
return new AndP<V>(Arrays.asList(new P(Compare.gt, first), new P(Compare.lt, second)));
}
public static <V> P<V> outside(final V first, final V second) {
return new OrP<V>(Arrays.asList(new P(Compare.lt, first), new P(Compare.gt, second)));
}
public static <V> P<V> between(final V first, final V second) {
return new AndP<V>(Arrays.asList(new P(Compare.gte, first), new P(Compare.lt, second)));
}
public static <V> P<V> within(final V... values) {
return P.within(Arrays.asList(values));
}
public static <V> P<V> within(final Collection<V> value) {
return new P(Contains.within, value);
}
public static <V> P<V> without(final V... values) {
return P.without(Arrays.asList(values));
}
public static <V> P<V> without(final Collection<V> value) {
return new P(Contains.without, value);
}
public static P test(final BiPredicate biPredicate, final Object value) {
return new P(biPredicate, value);
}
public static <V> P<V> not(final P<V> predicate) {
return predicate.negate();
}
}
|
wolfram74/wolfram74.github.io
|
schmutz_jager/scripts/spaces.js
|
<reponame>wolfram74/wolfram74.github.io
var spaces = {
livingRoom: {
tasks: [
{
title: 'sweep',
weekPeriod: 1,
description:'Use a broom or vacuum cleaner to get clean the floor',
completionSigns:"You've swept over the floor and moved furniture at some point, it's hard to see hair while standing."
},
{
title: 'tidy couch',
weekPeriod: 1,
description:'Fold the blankets and stow them plausibly, rearrange the pillows',
completionSigns:"It looks picturesque, pictures might be appropriate at some point."
},
{
title: 'Clear table',
weekPeriod: 1,
description:'Get the coffee table clear of clutter',
completionSigns: "table is clear of papers, chords and computers"
},
{
title: 'Wipe down table',
weekPeriod: 2,
description:'Using a moist cloth wipe down table',
completionSigns: "You got something moist at some point."
},
{
title: 'Clean couch cushions',
weekPeriod: 2,
description:'Lift up the cushions and clean the grit off them.',
completionSigns: "At some point the couch was partially disassembled."
},
]
},
kitchen: {
tasks: [
{
title: 'sweep',
weekPeriod: 1,
description:'Use a broom or vacuum cleaner to get clean the floor',
completionSigns:"You've swept over the floor and moved furniture at some point, it's hard to see hair while standing."
},
{
title: 'Clear counters',
weekPeriod: 1,
description:'Get the various counters clear of clutter',
completionSigns: "You could put the big cutting board down on any of the portion of counter big enough to use it, produce with signs of rot have been composted."
},
{
title: 'Wipe down counters',
weekPeriod: 1,
description:'Using a moist cloth wipe down counters',
completionSigns: "You got something moist at some point."
},
{
title: 'Clean dishes in the sink',
weekPeriod: 1,
description:'Make sure the sink is empty of dirty dishes at some point in the week',
completionSigns: "dishes were cleaned."
},
{
title: 'Move dried dishes to where they belong',
weekPeriod: 1,
description:'self explanatory',
completionSigns: "The drying wrack is empty."
},
{
title: 'Clean stove top',
weekPeriod: 2,
description:'take off the risers for the stove and wash them and surrounding areas.',
completionSigns: "You'd feel comfortable leaving a sandwich on the stove for a few minutes then eating it."
},
{
title: 'Clean sink',
weekPeriod: 4,
description:'Get some disinfectant and wipe down the sing area',
completionSigns: "You used chemicals on the sink area."
},
]
},
bathRoom: {
tasks: [
{
title: 'sweep',
weekPeriod: 1,
description:'Use a broom or vacuum cleaner to get clean the floor',
completionSigns:"You've swept over the floor and moved furniture at some point, it's hard to see hair while standing."
},
{
title: 'scrub toilet',
weekPeriod: 1,
description:'Use toilet brush to remove water stain on bowl.',
completionSigns:"You've gotten the brush wet and at least the upper portion of the bowl is clean."
},
{
title: 'wipe down sink',
weekPeriod: 1,
description:'Use some towel or brush to clean off some of the residue on the sink. relocate objects to ensure you get all the surfaces.',
completionSigns:"You've gotten something wet and there are no visible stains on the surface."
},
{
title: 'scrub tub',
weekPeriod: 4,
description:'Use toilet brush to remove water stain on bowl.',
completionSigns:"You've gotten the brush wet and at least the upper portion of the bowl is clean."
},
]
},
diningSpaceandStairs: {
tasks: [
{
title: 'sweep',
weekPeriod: 1,
description:'Use a broom or vacuum cleaner to get clean the floor',
completionSigns:"You've swept over the floor and moved furniture at some point, it's hard to see hair while standing."
},
{
title: 'Clear table',
weekPeriod: 1,
description:'Get the dinner table clear of clutter',
completionSigns: "table is clear of papers, chords and computers"
},
{
title: 'Wipe down table',
weekPeriod: 2,
description:'Using a moist cloth wipe down table',
completionSigns: "You got something moist at some point."
},
]
},
}
var residents = ["Cari","Peter","Ryan"]
console.log('tasks loaded')
/*
http://www.allyou.com/budget-home/organizing-cleaning/fast-track-your-cleaning-routine
http://3.bp.blogspot.com/-K5cPs43ZjTc/Tud1VRlRs2I/AAAAAAAABGg/PRU73yf2x7Q/s1600/Slide1.GIF
towards bottom
http://happymoneysaver.com/my-ultimate-happy-home-cleaning-routine-plus-free-printable-checklist/
http://www.cleanandscentsible.com/2014/01/developing-daily-cleaning-schedule.html
look at that, not only is the problem solved, surveys have been done of solution space
http://piganddac.com/a-pinterest-roundup-of-cleaning-routines/
*/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.