text
stringlengths
2
99k
meta
dict
bin/ja2 bin/ja2-launcher lib/libstracciatella.so man/man6/ja2.6.gz share/applications/ja2-stracciatella.desktop share/icons/hicolor/scalable/apps/ja2-stracciatella.svg %%DATADIR%%/externalized/ammo_types.json %%DATADIR%%/externalized/army-gun-choice-extended.json %%DATADIR%%/externalized/army-gun-choice-normal.json %%DATADIR%%/externalized/bobby-ray-inventory-new.json %%DATADIR%%/externalized/bobby-ray-inventory-used.json %%DATADIR%%/externalized/calibres.json %%DATADIR%%/externalized/dealer-inventory-alberto-santos.json %%DATADIR%%/externalized/dealer-inventory-arnie.json %%DATADIR%%/externalized/dealer-inventory-carlo-santos.json %%DATADIR%%/externalized/dealer-inventory-devin.json %%DATADIR%%/externalized/dealer-inventory-elgin.json %%DATADIR%%/externalized/dealer-inventory-frank.json %%DATADIR%%/externalized/dealer-inventory-franz.json %%DATADIR%%/externalized/dealer-inventory-fredo.json %%DATADIR%%/externalized/dealer-inventory-gabby.json %%DATADIR%%/externalized/dealer-inventory-herve-santos.json %%DATADIR%%/externalized/dealer-inventory-howard.json %%DATADIR%%/externalized/dealer-inventory-jake.json %%DATADIR%%/externalized/dealer-inventory-keith.json %%DATADIR%%/externalized/dealer-inventory-manny.json %%DATADIR%%/externalized/dealer-inventory-micky.json %%DATADIR%%/externalized/dealer-inventory-perko.json %%DATADIR%%/externalized/dealer-inventory-peter-santos.json %%DATADIR%%/externalized/dealer-inventory-sam.json %%DATADIR%%/externalized/dealer-inventory-tony.json %%DATADIR%%/externalized/game.json %%DATADIR%%/externalized/imp.json %%DATADIR%%/externalized/magazines.json %%DATADIR%%/externalized/music.json %%DATADIR%%/externalized/readme.txt %%DATADIR%%/externalized/sti/interface/LOADSCREENTAB.STI %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-dut.json %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-eng.json %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-fr.json %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-ger.json %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-it.json %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-pl.json %%DATADIR%%/externalized/strings/ammo-calibre-bobbyray-rus.json %%DATADIR%%/externalized/strings/ammo-calibre-dut.json %%DATADIR%%/externalized/strings/ammo-calibre-eng.json %%DATADIR%%/externalized/strings/ammo-calibre-fr.json %%DATADIR%%/externalized/strings/ammo-calibre-ger.json %%DATADIR%%/externalized/strings/ammo-calibre-it.json %%DATADIR%%/externalized/strings/ammo-calibre-pl.json %%DATADIR%%/externalized/strings/ammo-calibre-rus.json %%DATADIR%%/externalized/strings/new-strings-dut.json %%DATADIR%%/externalized/strings/new-strings-eng.json %%DATADIR%%/externalized/strings/new-strings-fr.json %%DATADIR%%/externalized/strings/new-strings-ger.json %%DATADIR%%/externalized/strings/new-strings-it.json %%DATADIR%%/externalized/strings/new-strings-pl.json %%DATADIR%%/externalized/strings/new-strings-rus.json %%DATADIR%%/externalized/weapons.json %%DATADIR%%/mods/from-russia-with-love/data/maps/A9.dat %%DATADIR%%/mods/from-russia-with-love/license.txt %%DATADIR%%/mods/from-russia-with-love/readme.txt %%DATADIR%%/mods/generous-rebels/data/Maps/A10_b1.dat %%DATADIR%%/mods/generous-rebels/Generous Rebels Mod.txt %%DATADIR%%/mods/generous-rebels/GR mod.jpg %%DATADIR%%/mods/imp-quiz-honest-answers/data/Binarydata/IMPTEXT.EDT %%DATADIR%%/mods/imp-quiz-honest-answers/IMP quiz honest answers mod.txt %%DATADIR%%/mods/o-fortuna/data/music.json %%DATADIR%%/mods/o-fortuna/data/music/o-fortuna.wav %%DATADIR%%/mods/o-fortuna/license.txt %%DATADIR%%/mods/o-fortuna/readme.txt %%DATADIR%%/mods/readme.txt %%DATADIR%%/mods/test-json-dialogs/data/mercedt/009.edt.json %%DATADIR%%/mods/test-json-dialogs/readme.txt %%DATADIR%%/unittests/datatypes/doubles.bin %%DATADIR%%/unittests/datatypes/floats.bin %%DATADIR%%/unittests/find-files/file-without-extension %%DATADIR%%/unittests/find-files/lowercase-ext.txt %%DATADIR%%/unittests/find-files/subfolder/file.txt %%DATADIR%%/unittests/find-files/uppercase-ext.TXT %%DATADIR%%/unittests/saves/strac-linux/SaveGame01.sav %%DATADIR%%/unittests/saves/strac-macos/imp.dat %%DATADIR%%/unittests/saves/strac-macos/SaveGame09.sav %%DATADIR%%/unittests/saves/strac-win/SaveGame09.sav %%DATADIR%%/unittests/saves/vanilla-russian/IMP.dat %%DATADIR%%/unittests/saves/vanilla-russian/SaveGame06.sav
{ "pile_set_name": "Github" }
1 0 5 0 5 1 1 41 0 11 1 0 0 1 3 0 3 0 1 0 7 0 0 0 1 0 1 0 0 0 1 3 1 0 1 1 0 0 8 0 1 0 1 0 1 1 0 0 1 0 1 0 5 1 0 1 0 0 1 3 7 3 1 1 1 23 0 1 3 1 0 1 1 0 0 0 0 37 33 0 0 0 1 0 1 0 1 17 14 0 3 1 0 0 0 3 1 1 1 1 6 1 17 0 1 3 18 1 16 1 0 0 16 4 6 1 1 15 24 0 0 17 0 3 2 19 1 1 1 11 6 14 1 1 0 0 3 5 0 0 0 1 25 1 18 1 1 0 0 3 1 1 5 1 17 0 30 0 1 7 1 1 1 2 0 14 18 15 3 0 0 4 0 0 0 17 1 0 0 0 3 19 7 3 3 3 1 0 1 16 18 1 0 1 4 1 16 4 0 0 3 14 3 1 1 1 0 25 2 0 22 15 0 0 0 16 1 3 1 0 0 3 1 0 1 1 14 13 13 3 0 18 3 1 22 1 4 0 0 15 5 1 1 0 18 1 8 16 0 0 0 16 3 1 1 0 1 1 18 1 1 1 29 0 20 20 4 1 0 3 1 3 15 0 0 1 1 1 5 17 3 15 21 1 0 13 9 1
{ "pile_set_name": "Github" }
## Transformers This is the Transformers directory.
{ "pile_set_name": "Github" }
#include <cstdio> int main() { int T, N, M; scanf("%d", &T); while (T-->0) { scanf("%d %d", &N, &M); int moedas[N]; for(int i=0; i<N;i++) { scanf("%d", moedas+i); } int pd[M+1], minimo; pd[0]=0; for (int i=1; i<=M; i++) { minimo = i; for (int j=0; j<N; j++) { if (i-moedas[j]>= 0 && minimo>pd[i-moedas[j]]) minimo=pd[i-moedas[j]]; } pd[i]=minimo+1; } printf("%d\n", pd[M]); } return 0; }
{ "pile_set_name": "Github" }
// DON'T REMOVE THIS LINE. var global = (function() { return this; })(); ; global.amd.define("commonbridge1.ios.js", [ "module" ], function(module) { var exports = module.exports; "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function() { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function(Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = global.amd.re2("f5yThj586pw2FvtRgyhspw"); var _react2 = _interopRequireDefault(_react); var _reactNative = global.amd.re2("bSxh+Y/5CLWVnPx5SyRVPg"); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var ReactExample = function(_Component) { _inherits(ReactExample, _Component); function ReactExample() { _classCallCheck(this, ReactExample); return _possibleConstructorReturn(this, (ReactExample.__proto__ || Object.getPrototypeOf(ReactExample)).apply(this, arguments)); } _createClass(ReactExample, [ { key: "render", value: function render() { return _react2.default.createElement(_reactNative.View, { style: styles.container }, _react2.default.createElement(_reactNative.Text, { style: styles.welcome }, "Welcome to commonbridge1!")); } } ]); return ReactExample; }(_react.Component); exports.default = ReactExample; var styles = _reactNative.StyleSheet.create({ container: { flex: 1, justifyContent: "center", alignItems: "center", backgroundColor: "#F5FCFF" }, welcome: { fontSize: 20, textAlign: "center", margin: 10 }, instructions: { textAlign: "center", color: "#333333", marginBottom: 5 } }); ; return module.exports; });; // DON'T REMOVE THIS LINE. ;(function() { // ... var global = (function() { return this; })(); global.amd.require(['commonbridge1.ios.js'], function(entry) { var ret = global.amd.re2('commonbridge1.ios.js'); global.molesChange(ret.default); }, false); })();
{ "pile_set_name": "Github" }
var convert = require('./convert'), func = convert('forOwnRight', require('../forOwnRight')); func.placeholder = require('./placeholder'); module.exports = func;
{ "pile_set_name": "Github" }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.querycache.utils; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.map.impl.MapService; import com.hazelcast.map.impl.querycache.NodeQueryCacheContext; import com.hazelcast.map.impl.querycache.QueryCacheContext; import com.hazelcast.map.impl.querycache.accumulator.Accumulator; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.Map; import static com.hazelcast.map.impl.querycache.utils.QueryCacheUtil.getAccumulatorOrNull; import static com.hazelcast.map.impl.querycache.utils.QueryCacheUtil.getAccumulators; import static com.hazelcast.test.Accessors.getNodeEngineImpl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class QueryCacheUtilTest extends HazelcastTestSupport { private QueryCacheContext context; @Before public void setUp() { HazelcastInstance instance = createHazelcastInstance(); MapService mapService = getNodeEngineImpl(instance).getService(MapService.SERVICE_NAME); context = new NodeQueryCacheContext(mapService.getMapServiceContext()); } @Test public void testConstructor() { assertUtilityConstructor(QueryCacheUtil.class); } @Test public void getAccumulators_whenNoAccumulatorsRegistered_thenReturnEmptyMap() { Map<Integer, Accumulator> accumulators = getAccumulators(context, "myMap", "myCache"); assertNotNull(accumulators); assertEquals(0, accumulators.size()); } @Test public void getAccumulatorOrNull_whenNoAccumulatorsRegistered_thenReturnNull() { Accumulator accumulator = getAccumulatorOrNull(context, "myMap", "myCache", -1); assertNull(accumulator); } }
{ "pile_set_name": "Github" }
{ "version": 2, "groups": [ { "title": "Cluster Group 1", "description": "This is the description of the cluster group. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.", "clusters": [ { "title": "Cluster 1", "description": "This is a cluster description", "url": "real/url/here" }, { "title": "Multiviewer", "description": "This is a multiviewer cluster!", "url": "multiviewer/url/here", "type": "multiviewer" }, { "title": "Disabled Cluster", "description": "This is a disabled cluster", "type": "disabled" }, { "title": "Silent Cluster", "description": "This is a silent cluster", "type": "noAlerts" } ] }, { "title": "Cluster Group 2", "clusters": [ { "title": "Another Cluster", "url": "real/url/here" } ] } ] }
{ "pile_set_name": "Github" }
/* Copyright (c) 2012-2014 LevelUP contributors * See list at <https://github.com/rvagg/node-levelup#contributing> * MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> */ var referee = require('referee') , assert = referee.assert , refute = referee.refute , crypto = require('crypto') , async = require('async') , rimraf = require('rimraf') , fs = require('fs') , path = require('path') , delayed = require('delayed').delayed , levelup = require('../lib/levelup.js') , dbidx = 0 referee.add('isInstanceOf', { assert: function (actual, expected) { return actual instanceof expected } , refute: function (actual, expected) { return !(actual instanceof expected) } , assertMessage: '${0} expected to be instance of ${1}' , refuteMessage: '${0} expected not to be instance of ${1}' }) referee.add('isUndefined', { assert: function (actual) { return actual === undefined } , refute: function (actual) { return actual !== undefined } , assertMessage: '${0} expected to be undefined' , refuteMessage: '${0} expected not to be undefined' }) module.exports.nextLocation = function () { return path.join(__dirname, '_levelup_test_db_' + dbidx++) } module.exports.cleanup = function (callback) { fs.readdir(__dirname, function (err, list) { if (err) return callback(err) list = list.filter(function (f) { return (/^_levelup_test_db_/).test(f) }) if (!list.length) return callback() var ret = 0 list.forEach(function (f) { rimraf(path.join(__dirname, f), function () { if (++ret == list.length) callback() }) }) }) } module.exports.openTestDatabase = function () { var options = typeof arguments[0] == 'object' ? arguments[0] : { createIfMissing: true, errorIfExists: true } , callback = typeof arguments[0] == 'function' ? arguments[0] : arguments[1] , location = typeof arguments[0] == 'string' ? arguments[0] : module.exports.nextLocation() rimraf(location, function (err) { refute(err) this.cleanupDirs.push(location) levelup(location, options, function (err, db) { refute(err) if (!err) { this.closeableDatabases.push(db) callback(db) } }.bind(this)) }.bind(this)) } module.exports.commonTearDown = function (done) { async.forEach( this.closeableDatabases , function (db, callback) { db.close(callback) } , module.exports.cleanup.bind(null, done) ) } module.exports.loadBinaryTestData = function (callback) { fs.readFile(path.join(__dirname, 'data/testdata.bin'), callback) } module.exports.binaryTestDataMD5Sum = '920725ef1a3b32af40ccd0b78f4a62fd' module.exports.checkBinaryTestData = function (testData, callback) { var md5sum = crypto.createHash('md5'); md5sum.update(testData) assert.equals(md5sum.digest('hex'), module.exports.binaryTestDataMD5Sum) callback() } module.exports.commonSetUp = function (done) { this.cleanupDirs = [] this.closeableDatabases = [] this.openTestDatabase = module.exports.openTestDatabase.bind(this) this.timeout = 10000 module.exports.cleanup(done) } module.exports.readStreamSetUp = function (done) { module.exports.commonSetUp.call(this, function () { var i, k this.dataSpy = this.spy() this.endSpy = this.spy() this.sourceData = [] for (i = 0; i < 100; i++) { k = (i < 10 ? '0' : '') + i this.sourceData.push({ type : 'put' , key : k , value : Math.random() }) } this.verify = delayed(function (rs, done, data) { if (!data) data = this.sourceData // can pass alternative data array for verification assert.equals(this.endSpy.callCount, 1, 'ReadStream emitted single "end" event') assert.equals(this.dataSpy.callCount, data.length, 'ReadStream emitted correct number of "data" events') data.forEach(function (d, i) { var call = this.dataSpy.getCall(i) if (call) { assert.equals(call.args.length, 1, 'ReadStream "data" event #' + i + ' fired with 1 argument') refute.isNull(call.args[0].key, 'ReadStream "data" event #' + i + ' argument has "key" property') refute.isNull(call.args[0].value, 'ReadStream "data" event #' + i + ' argument has "value" property') assert.equals(call.args[0].key, d.key, 'ReadStream "data" event #' + i + ' argument has correct "key"') assert.equals( +call.args[0].value , +d.value , 'ReadStream "data" event #' + i + ' argument has correct "value"' ) } }.bind(this)) done() }, 0.05, this) done() }.bind(this)) }
{ "pile_set_name": "Github" }
require 'application_system_test_case' module Conversations class NotificationsTest < ApplicationSystemTestCase def start_conversation(as_user, to_user, change_subscription_state: nil) login_as(as_user, scope: :user) visit new_user_conversation_url(as_user, locale: :en, other_user: to_user.name) fill_in 'Message', with: 'This is a test' case change_subscription_state when true check 'Notify me of any replies' when false uncheck 'Notify me of any replies' end click_button 'Create conversation' assert_content "Conversation with #{to_user.name}" end def reply(as_user, conversation) ActionMailer::Base.deliveries.clear login_as(as_user, scope: :user) visit user_conversation_url(as_user, conversation, locale: :en) fill_in 'Message', with: 'This is a reply' click_button 'Post reply' assert_content 'This is a reply' end test 'notifications on start to the other user when they subscribe by default' do ActionMailer::Base.deliveries.clear user = User.first to_user = users(:junior) to_user.update!(subscribe_on_conversation_receiver: true) start_conversation(user, to_user) mail = ActionMailer::Base.deliveries.last assert_not_nil mail assert_equal [to_user.email], mail.to end test "no notifications on start to the other user when they don't subscribe by default" do ActionMailer::Base.deliveries.clear user = User.first to_user = users(:junior) to_user.update!(subscribe_on_conversation_receiver: false) start_conversation(user, to_user) mail = ActionMailer::Base.deliveries.last assert_nil mail end test 'notifications on reply when the other user is subscribed by default' do ActionMailer::Base.deliveries.clear user = User.first user.update!(subscribe_on_conversation_starter: true) to_user = users(:junior) start_conversation(user, to_user) logout reply(to_user, Conversation.last) mail = ActionMailer::Base.deliveries.last assert_not_nil mail assert_equal [user.email], mail.to end test 'no notifications on reply when the other user is not subscribed by default' do ActionMailer::Base.deliveries.clear user = User.first user.update!(subscribe_on_conversation_starter: false) to_user = users(:junior) start_conversation(user, to_user) logout reply(to_user, Conversation.last) mail = ActionMailer::Base.deliveries.last assert_nil mail end test 'no notifications on reply when the other user chooses not to subscribe' do ActionMailer::Base.deliveries.clear user = User.first user.update!(subscribe_on_conversation_starter: true) to_user = users(:junior) start_conversation(user, to_user, change_subscription_state: false) logout reply(to_user, Conversation.last) mail = ActionMailer::Base.deliveries.last assert_nil mail end end end
{ "pile_set_name": "Github" }
# How to Contribute We'd love to accept your patches and contributions to this project. There are just a few small guidelines you need to follow. ## Contributor License Agreement Contributions to this project must be accompanied by a Contributor License Agreement. You (or your employer) retain the copyright to your contribution; this simply gives us permission to use and redistribute your contributions as part of the project. Head over to <https://cla.developers.google.com/> to see your current agreements on file or to sign a new one. You generally only need to submit a CLA once, so if you've already submitted one (even if it was for a different project), you probably don't need to do it again. ## Code reviews All submissions, including submissions by project members, require review. We use GitHub pull requests for this purpose. Consult [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more information on using pull requests. ## Community Guidelines This project follows [Google's Open Source Community Guidelines](https://opensource.google.com/conduct/).
{ "pile_set_name": "Github" }
package gw.util.transform.java.Examples;// Example 57 from page 43 of Java Precisely second edition (The MIT Press 2005) // Author: Peter Sestoft (sestoft@itu.dk) class Example57 { public static void main(String[] args) { Point p = new Point(10, 20); int[] a = new int[5]; int d = 8; System.out.println("p is " + p); // Prints: p is (10, 20) System.out.println("a[3] is " + a[3]); // Prints: a[3] is 0 m(p, d, a); System.out.println("p is " + p); // Prints: p is (18, 28) System.out.println("d is " + d); // Prints: d is 8 System.out.println("a[3] is " + a[3]); // Prints: a[3] is 22 } static void m(Point pp, int dd, int[] aa) { pp.move(dd, dd); dd = 117; aa[3] = 22; } }
{ "pile_set_name": "Github" }
1 301 404 551 828 1128 1428 1669 1934 1936 2236 2267 2567 2638 2938 3238 3427
{ "pile_set_name": "Github" }
/** * @name Cleartext storage of sensitive information using storable class * @description Storing sensitive information in cleartext can expose it to an attacker. * @kind problem * @problem.severity recommendation * @precision medium * @id java/cleartext-storage-in-class * @tags security * external/cwe/cwe-499 * external/cwe/cwe-312 */ import java import SensitiveStorage from SensitiveSource data, ClassStore s, Expr input, Expr store where input = s.getAnInput() and store = s.getAStore() and data.flowsToCached(input) and // Exclude results in test code. not testMethod(store.getEnclosingCallable()) and not testMethod(data.getEnclosingCallable()) select store, "Storable class $@ containing $@ is stored here. Data was added $@.", s, s.toString(), data, "sensitive data", input, "here"
{ "pile_set_name": "Github" }
// Copyright (C) 2016 by rr- // // This file is part of arc_unpacker. // // arc_unpacker is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or (at // your option) any later version. // // arc_unpacker is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // // You should have received a copy of the GNU General Public License // along with arc_unpacker. If not, see <http://www.gnu.org/licenses/>. #pragma once #include <functional> #include <memory> #include "io/file.h" #include "io/path.h" namespace au { class VirtualFileSystem final { public: static void enable(); static void disable(); static void clear(); static void register_file( const io::path &path, const std::function<std::unique_ptr<io::File>()> factory); static void unregister_file(const io::path &path); static void register_directory(const io::path &path); static void unregister_directory(const io::path &path); static std::unique_ptr<io::File> get_by_stem(const std::string &stem); static std::unique_ptr<io::File> get_by_name(const std::string &name); static std::unique_ptr<io::File> get_by_path(const io::path &path); }; }
{ "pile_set_name": "Github" }
/* autogenerated by gensyscalls.py */ #include <asm/unistd.h> #include <linux/err.h> #include <machine/asm.h> ENTRY(__brk) mov ip, r7 ldr r7, =__NR_brk swi #0 mov r7, ip cmn r0, #(MAX_ERRNO + 1) bxls lr neg r0, r0 b __set_errno END(__brk)
{ "pile_set_name": "Github" }
{{/* vim: set filetype=mustache: */}} {{/* Expand the name of the chart. */}} {{- define "name" -}} {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} {{- end -}} {{/* Create a default fully qualified app name. We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). */}} {{- define "fullname" -}} {{- $name := default .Chart.Name .Values.nameOverride -}} {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} {{- end -}}
{ "pile_set_name": "Github" }
/* * o2micro.h 1.13 1999/10/25 20:03:34 * * The contents of this file are subject to the Mozilla Public License * Version 1.1 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License * at http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and * limitations under the License. * * The initial developer of the original code is David A. Hinds * <dahinds@users.sourceforge.net>. Portions created by David A. Hinds * are Copyright (C) 1999 David A. Hinds. All Rights Reserved. * * Alternatively, the contents of this file may be used under the * terms of the GNU General Public License version 2 (the "GPL"), in which * case the provisions of the GPL are applicable instead of the * above. If you wish to allow the use of your version of this file * only under the terms of the GPL and not to allow others to use * your version of this file under the MPL, indicate your decision by * deleting the provisions above and replace them with the notice and * other provisions required by the GPL. If you do not delete the * provisions above, a recipient may use your version of this file * under either the MPL or the GPL. */ #ifndef _LINUX_O2MICRO_H #define _LINUX_O2MICRO_H /* Additional PCI configuration registers */ #define O2_MUX_CONTROL 0x90 /* 32 bit */ #define O2_MUX_RING_OUT 0x0000000f #define O2_MUX_SKTB_ACTV 0x000000f0 #define O2_MUX_SCTA_ACTV_ENA 0x00000100 #define O2_MUX_SCTB_ACTV_ENA 0x00000200 #define O2_MUX_SER_IRQ_ROUTE 0x0000e000 #define O2_MUX_SER_PCI 0x00010000 #define O2_MUX_SKTA_TURBO 0x000c0000 /* for 6833, 6860 */ #define O2_MUX_SKTB_TURBO 0x00300000 #define O2_MUX_AUX_VCC_3V 0x00400000 #define O2_MUX_PCI_VCC_5V 0x00800000 #define O2_MUX_PME_MUX 0x0f000000 /* Additional ExCA registers */ #define O2_MODE_A 0x38 #define O2_MODE_A_2 0x26 /* for 6833B, 6860C */ #define O2_MODE_A_CD_PULSE 0x04 #define O2_MODE_A_SUSP_EDGE 0x08 #define O2_MODE_A_HOST_SUSP 0x10 #define O2_MODE_A_PWR_MASK 0x60 #define O2_MODE_A_QUIET 0x80 #define O2_MODE_B 0x39 #define O2_MODE_B_2 0x2e /* for 6833B, 6860C */ #define O2_MODE_B_IDENT 0x03 #define O2_MODE_B_ID_BSTEP 0x00 #define O2_MODE_B_ID_CSTEP 0x01 #define O2_MODE_B_ID_O2 0x02 #define O2_MODE_B_VS1 0x04 #define O2_MODE_B_VS2 0x08 #define O2_MODE_B_IRQ15_RI 0x80 #define O2_MODE_C 0x3a #define O2_MODE_C_DREQ_MASK 0x03 #define O2_MODE_C_DREQ_INPACK 0x01 #define O2_MODE_C_DREQ_WP 0x02 #define O2_MODE_C_DREQ_BVD2 0x03 #define O2_MODE_C_ZVIDEO 0x08 #define O2_MODE_C_IREQ_SEL 0x30 #define O2_MODE_C_MGMT_SEL 0xc0 #define O2_MODE_D 0x3b #define O2_MODE_D_IRQ_MODE 0x03 #define O2_MODE_D_PCI_CLKRUN 0x04 #define O2_MODE_D_CB_CLKRUN 0x08 #define O2_MODE_D_SKT_ACTV 0x20 #define O2_MODE_D_PCI_FIFO 0x40 /* for OZ6729, OZ6730 */ #define O2_MODE_D_W97_IRQ 0x40 #define O2_MODE_D_ISA_IRQ 0x80 #define O2_MHPG_DMA 0x3c #define O2_MHPG_CHANNEL 0x07 #define O2_MHPG_CINT_ENA 0x08 #define O2_MHPG_CSC_ENA 0x10 #define O2_FIFO_ENA 0x3d #define O2_FIFO_ZVIDEO_3 0x08 #define O2_FIFO_PCI_FIFO 0x10 #define O2_FIFO_POSTWR 0x40 #define O2_FIFO_BUFFER 0x80 #define O2_MODE_E 0x3e #define O2_MODE_E_MHPG_DMA 0x01 #define O2_MODE_E_SPKR_OUT 0x02 #define O2_MODE_E_LED_OUT 0x08 #define O2_MODE_E_SKTA_ACTV 0x10 #define O2_RESERVED1 0x94 #define O2_RESERVED2 0xD4 #define O2_RES_READ_PREFETCH 0x02 #define O2_RES_WRITE_BURST 0x08 static int o2micro_override(struct yenta_socket *socket) { /* * 'reserved' register at 0x94/D4. allows setting read prefetch and write * bursting. read prefetching for example makes the RME Hammerfall DSP * working. for some bridges it is at 0x94, for others at 0xD4. it's * ok to write to both registers on all O2 bridges. * from Eric Still, 02Micro. */ u8 a, b; bool use_speedup; if (PCI_FUNC(socket->dev->devfn) == 0) { a = config_readb(socket, O2_RESERVED1); b = config_readb(socket, O2_RESERVED2); dev_dbg(&socket->dev->dev, "O2: 0x94/0xD4: %02x/%02x\n", a, b); switch (socket->dev->device) { /* * older bridges have problems with both read prefetch and write * bursting depending on the combination of the chipset, bridge * and the cardbus card. so disable them to be on the safe side. */ case PCI_DEVICE_ID_O2_6729: case PCI_DEVICE_ID_O2_6730: case PCI_DEVICE_ID_O2_6812: case PCI_DEVICE_ID_O2_6832: case PCI_DEVICE_ID_O2_6836: case PCI_DEVICE_ID_O2_6933: use_speedup = false; break; default: use_speedup = true; break; } /* the user may override our decision */ if (strcasecmp(o2_speedup, "on") == 0) use_speedup = true; else if (strcasecmp(o2_speedup, "off") == 0) use_speedup = false; else if (strcasecmp(o2_speedup, "default") != 0) dev_warn(&socket->dev->dev, "O2: Unknown parameter, using 'default'"); if (use_speedup) { dev_info(&socket->dev->dev, "O2: enabling read prefetch/write burst. If you experience problems or performance issues, use the yenta_socket parameter 'o2_speedup=off'\n"); config_writeb(socket, O2_RESERVED1, a | O2_RES_READ_PREFETCH | O2_RES_WRITE_BURST); config_writeb(socket, O2_RESERVED2, b | O2_RES_READ_PREFETCH | O2_RES_WRITE_BURST); } else { dev_info(&socket->dev->dev, "O2: disabling read prefetch/write burst. If you experience problems or performance issues, use the yenta_socket parameter 'o2_speedup=on'\n"); config_writeb(socket, O2_RESERVED1, a & ~(O2_RES_READ_PREFETCH | O2_RES_WRITE_BURST)); config_writeb(socket, O2_RESERVED2, b & ~(O2_RES_READ_PREFETCH | O2_RES_WRITE_BURST)); } } return 0; } static void o2micro_restore_state(struct yenta_socket *socket) { /* * as long as read prefetch is the only thing in * o2micro_override, it's safe to call it from here */ o2micro_override(socket); } #endif /* _LINUX_O2MICRO_H */
{ "pile_set_name": "Github" }
var assignValue = require('./_assignValue'), castPath = require('./_castPath'), isIndex = require('./_isIndex'), isObject = require('./isObject'), toKey = require('./_toKey'); /** * The base implementation of `_.set`. * * @private * @param {Object} object The object to modify. * @param {Array|string} path The path of the property to set. * @param {*} value The value to set. * @param {Function} [customizer] The function to customize path creation. * @returns {Object} Returns `object`. */ function baseSet(object, path, value, customizer) { if (!isObject(object)) { return object; } path = castPath(path, object); var index = -1, length = path.length, lastIndex = length - 1, nested = object; while (nested != null && ++index < length) { var key = toKey(path[index]), newValue = value; if (index != lastIndex) { var objValue = nested[key]; newValue = customizer ? customizer(objValue, key, nested) : undefined; if (newValue === undefined) { newValue = isObject(objValue) ? objValue : (isIndex(path[index + 1]) ? [] : {}); } } assignValue(nested, key, newValue); nested = nested[key]; } return object; } module.exports = baseSet;
{ "pile_set_name": "Github" }
{ "parent": "block/cube_all", "textures": { "all": "immersiveengineering:block/metal/sheetmetal_light_gray" } }
{ "pile_set_name": "Github" }
/////////////////////////////////////////////////////////////////////////////// // weighted_tail_quantile.hpp // // Copyright 2006 Daniel Egloff, Olivier Gygi. Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_ACCUMULATORS_STATISTICS_WEIGHTED_TAIL_QUANTILE_HPP_DE_01_01_2006 #define BOOST_ACCUMULATORS_STATISTICS_WEIGHTED_TAIL_QUANTILE_HPP_DE_01_01_2006 #include <vector> #include <limits> #include <functional> #include <sstream> #include <stdexcept> #include <boost/throw_exception.hpp> #include <boost/parameter/keyword.hpp> #include <boost/mpl/placeholders.hpp> #include <boost/mpl/if.hpp> #include <boost/type_traits/is_same.hpp> #include <boost/accumulators/numeric/functional.hpp> #include <boost/accumulators/framework/depends_on.hpp> #include <boost/accumulators/framework/accumulator_base.hpp> #include <boost/accumulators/framework/extractor.hpp> #include <boost/accumulators/framework/parameters/sample.hpp> #include <boost/accumulators/statistics_fwd.hpp> #include <boost/accumulators/statistics/tail.hpp> #include <boost/accumulators/statistics/tail_quantile.hpp> #include <boost/accumulators/statistics/parameters/quantile_probability.hpp> #ifdef _MSC_VER # pragma warning(push) # pragma warning(disable: 4127) // conditional expression is constant #endif namespace boost { namespace accumulators { namespace impl { /////////////////////////////////////////////////////////////////////////////// // weighted_tail_quantile_impl // Tail quantile estimation based on order statistics of weighted samples /** @brief Tail quantile estimation based on order statistics of weighted samples (for both left and right tails) An estimator \f$\hat{q}\f$ of tail quantiles with level \f$\alpha\f$ based on order statistics \f$X_{1:n} \leq X_{2:n} \leq\dots\leq X_{n:n}\f$ of weighted samples are given by \f$X_{\lambda:n}\f$ (left tail) and \f$X_{\rho:n}\f$ (right tail), where \f[ \lambda = \inf\left\{ l \left| \frac{1}{\bar{w}_n}\sum_{i=1}^{l} w_i \geq \alpha \right. \right\} \f] and \f[ \rho = \sup\left\{ r \left| \frac{1}{\bar{w}_n}\sum_{i=r}^{n} w_i \geq (1 - \alpha) \right. \right\}, \f] \f$n\f$ being the number of samples and \f$\bar{w}_n\f$ the sum of all weights. @param quantile_probability */ template<typename Sample, typename Weight, typename LeftRight> struct weighted_tail_quantile_impl : accumulator_base { typedef typename numeric::functional::fdiv<Weight, std::size_t>::result_type float_type; // for boost::result_of typedef Sample result_type; weighted_tail_quantile_impl(dont_care) {} template<typename Args> result_type result(Args const &args) const { float_type threshold = sum_of_weights(args) * ( ( is_same<LeftRight, left>::value ) ? args[quantile_probability] : 1. - args[quantile_probability] ); std::size_t n = 0; Weight sum = Weight(0); while (sum < threshold) { if (n < static_cast<std::size_t>(tail_weights(args).size())) { sum += *(tail_weights(args).begin() + n); n++; } else { if (std::numeric_limits<result_type>::has_quiet_NaN) { return std::numeric_limits<result_type>::quiet_NaN(); } else { std::ostringstream msg; msg << "index n = " << n << " is not in valid range [0, " << tail(args).size() << ")"; boost::throw_exception(std::runtime_error(msg.str())); return Sample(0); } } } // Note that the cached samples of the left are sorted in ascending order, // whereas the samples of the right tail are sorted in descending order return *(boost::begin(tail(args)) + n - 1); } }; } // namespace impl /////////////////////////////////////////////////////////////////////////////// // tag::weighted_tail_quantile<> // namespace tag { template<typename LeftRight> struct weighted_tail_quantile : depends_on<sum_of_weights, tail_weights<LeftRight> > { /// INTERNAL ONLY typedef accumulators::impl::weighted_tail_quantile_impl<mpl::_1, mpl::_2, LeftRight> impl; }; } /////////////////////////////////////////////////////////////////////////////// // extract::weighted_tail_quantile // namespace extract { extractor<tag::quantile> const weighted_tail_quantile = {}; BOOST_ACCUMULATORS_IGNORE_GLOBAL(weighted_tail_quantile) } using extract::weighted_tail_quantile; }} // namespace boost::accumulators #ifdef _MSC_VER # pragma warning(pop) #endif #endif
{ "pile_set_name": "Github" }
// Copyright Peter Dimov 2001 // Copyright Aleksey Gurtovoy 2001-2004 // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // Preprocessed version of "boost/mpl/aux_/basic_bind.hpp" header // -- DO NOT modify by hand! namespace boost { namespace mpl { namespace aux { template< bool > struct resolve_arg_impl { template< typename T, typename U1, typename U2, typename U3 , typename U4, typename U5 > struct result_ { typedef T type; }; }; template<> struct resolve_arg_impl<true> { template< typename T, typename U1, typename U2, typename U3 , typename U4, typename U5 > struct result_ { typedef typename apply_wrap5< T , U1, U2, U3, U4, U5 >::type type; }; }; template< typename T > struct is_bind_template; template< typename T, typename U1, typename U2, typename U3, typename U4 , typename U5 > struct resolve_bind_arg : resolve_arg_impl< is_bind_template<T>::value > ::template result_< T,U1,U2,U3,U4,U5 > { }; template< int arity_ > struct bind_chooser; aux::no_tag is_bind_helper(...); template< typename T > aux::no_tag is_bind_helper(protect<T>*); template< int N > aux::yes_tag is_bind_helper(arg<N>*); template< bool is_ref_ = true > struct is_bind_template_impl { template< typename T > struct result_ { BOOST_STATIC_CONSTANT(bool, value = false); }; }; template<> struct is_bind_template_impl<false> { template< typename T > struct result_ { BOOST_STATIC_CONSTANT(bool, value = sizeof(aux::is_bind_helper(static_cast<T*>(0))) == sizeof(aux::yes_tag) ); }; }; template< typename T > struct is_bind_template : is_bind_template_impl< ::boost::detail::is_reference_impl<T>::value > ::template result_<T> { }; } // namespace aux template< typename F > struct bind0 { template< typename U1 = na, typename U2 = na, typename U3 = na , typename U4 = na, typename U5 = na > struct apply { private: typedef typename aux::resolve_bind_arg< F,U1,U2,U3,U4,U5 >::type f_; public: typedef typename apply_wrap0< f_ >::type type; }; }; namespace aux { template< typename F > aux::yes_tag is_bind_helper(bind0<F>*); } // namespace aux BOOST_MPL_AUX_ARITY_SPEC(1, bind0) BOOST_MPL_AUX_TEMPLATE_ARITY_SPEC(1, bind0) template< typename F, typename T1 > struct bind1 { template< typename U1 = na, typename U2 = na, typename U3 = na , typename U4 = na, typename U5 = na > struct apply { private: typedef typename aux::resolve_bind_arg< F,U1,U2,U3,U4,U5 >::type f_; typedef aux::resolve_bind_arg< T1,U1,U2,U3,U4,U5 > t1; public: typedef typename apply_wrap1< f_ , typename t1::type >::type type; }; }; namespace aux { template< typename F, typename T1 > aux::yes_tag is_bind_helper(bind1< F,T1 >*); } // namespace aux BOOST_MPL_AUX_ARITY_SPEC(2, bind1) BOOST_MPL_AUX_TEMPLATE_ARITY_SPEC(2, bind1) template< typename F, typename T1, typename T2 > struct bind2 { template< typename U1 = na, typename U2 = na, typename U3 = na , typename U4 = na, typename U5 = na > struct apply { private: typedef typename aux::resolve_bind_arg< F,U1,U2,U3,U4,U5 >::type f_; typedef aux::resolve_bind_arg< T1,U1,U2,U3,U4,U5 > t1; typedef aux::resolve_bind_arg< T2,U1,U2,U3,U4,U5 > t2; public: typedef typename apply_wrap2< f_ , typename t1::type, typename t2::type >::type type; }; }; namespace aux { template< typename F, typename T1, typename T2 > aux::yes_tag is_bind_helper(bind2< F,T1,T2 >*); } // namespace aux BOOST_MPL_AUX_ARITY_SPEC(3, bind2) BOOST_MPL_AUX_TEMPLATE_ARITY_SPEC(3, bind2) template< typename F, typename T1, typename T2, typename T3 > struct bind3 { template< typename U1 = na, typename U2 = na, typename U3 = na , typename U4 = na, typename U5 = na > struct apply { private: typedef typename aux::resolve_bind_arg< F,U1,U2,U3,U4,U5 >::type f_; typedef aux::resolve_bind_arg< T1,U1,U2,U3,U4,U5 > t1; typedef aux::resolve_bind_arg< T2,U1,U2,U3,U4,U5 > t2; typedef aux::resolve_bind_arg< T3,U1,U2,U3,U4,U5 > t3; public: typedef typename apply_wrap3< f_ , typename t1::type, typename t2::type, typename t3::type >::type type; }; }; namespace aux { template< typename F, typename T1, typename T2, typename T3 > aux::yes_tag is_bind_helper(bind3< F,T1,T2,T3 >*); } // namespace aux BOOST_MPL_AUX_ARITY_SPEC(4, bind3) BOOST_MPL_AUX_TEMPLATE_ARITY_SPEC(4, bind3) template< typename F, typename T1, typename T2, typename T3, typename T4 > struct bind4 { template< typename U1 = na, typename U2 = na, typename U3 = na , typename U4 = na, typename U5 = na > struct apply { private: typedef typename aux::resolve_bind_arg< F,U1,U2,U3,U4,U5 >::type f_; typedef aux::resolve_bind_arg< T1,U1,U2,U3,U4,U5 > t1; typedef aux::resolve_bind_arg< T2,U1,U2,U3,U4,U5 > t2; typedef aux::resolve_bind_arg< T3,U1,U2,U3,U4,U5 > t3; typedef aux::resolve_bind_arg< T4,U1,U2,U3,U4,U5 > t4; public: typedef typename apply_wrap4< f_ , typename t1::type, typename t2::type, typename t3::type , typename t4::type >::type type; }; }; namespace aux { template< typename F, typename T1, typename T2, typename T3, typename T4 > aux::yes_tag is_bind_helper(bind4< F,T1,T2,T3,T4 >*); } // namespace aux BOOST_MPL_AUX_ARITY_SPEC(5, bind4) BOOST_MPL_AUX_TEMPLATE_ARITY_SPEC(5, bind4) template< typename F, typename T1, typename T2, typename T3, typename T4 , typename T5 > struct bind5 { template< typename U1 = na, typename U2 = na, typename U3 = na , typename U4 = na, typename U5 = na > struct apply { private: typedef typename aux::resolve_bind_arg< F,U1,U2,U3,U4,U5 >::type f_; typedef aux::resolve_bind_arg< T1,U1,U2,U3,U4,U5 > t1; typedef aux::resolve_bind_arg< T2,U1,U2,U3,U4,U5 > t2; typedef aux::resolve_bind_arg< T3,U1,U2,U3,U4,U5 > t3; typedef aux::resolve_bind_arg< T4,U1,U2,U3,U4,U5 > t4; typedef aux::resolve_bind_arg< T5,U1,U2,U3,U4,U5 > t5; public: typedef typename apply_wrap5< f_ , typename t1::type, typename t2::type, typename t3::type , typename t4::type, typename t5::type >::type type; }; }; namespace aux { template< typename F, typename T1, typename T2, typename T3, typename T4 , typename T5 > aux::yes_tag is_bind_helper(bind5< F,T1,T2,T3,T4,T5 >*); } // namespace aux BOOST_MPL_AUX_ARITY_SPEC(6, bind5) BOOST_MPL_AUX_TEMPLATE_ARITY_SPEC(6, bind5) }}
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This file was autogenerated by go-to-protobuf. Do not edit it manually! syntax = 'proto2'; package k8s.io.api.authorization.v1beta1; import "k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto"; import "k8s.io/apimachinery/pkg/runtime/generated.proto"; import "k8s.io/apimachinery/pkg/runtime/schema/generated.proto"; // Package-wide variables from generator "generated". option go_package = "v1beta1"; // ExtraValue masks the value so protobuf can generate // +protobuf.nullable=true // +protobuf.options.(gogoproto.goproto_stringer)=false message ExtraValue { // items, if empty, will result in an empty slice repeated string items = 1; } // LocalSubjectAccessReview checks whether or not a user or group can perform an action in a given namespace. // Having a namespace scoped resource makes it much easier to grant namespace scoped policy that includes permissions // checking. message LocalSubjectAccessReview { // +optional optional k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta metadata = 1; // Spec holds information about the request being evaluated. spec.namespace must be equal to the namespace // you made the request against. If empty, it is defaulted. optional SubjectAccessReviewSpec spec = 2; // Status is filled in by the server and indicates whether the request is allowed or not // +optional optional SubjectAccessReviewStatus status = 3; } // NonResourceAttributes includes the authorization attributes available for non-resource requests to the Authorizer interface message NonResourceAttributes { // Path is the URL path of the request // +optional optional string path = 1; // Verb is the standard HTTP verb // +optional optional string verb = 2; } // NonResourceRule holds information that describes a rule for the non-resource message NonResourceRule { // Verb is a list of kubernetes non-resource API verbs, like: get, post, put, delete, patch, head, options. "*" means all. repeated string verbs = 1; // NonResourceURLs is a set of partial urls that a user should have access to. *s are allowed, but only as the full, // final step in the path. "*" means all. // +optional repeated string nonResourceURLs = 2; } // ResourceAttributes includes the authorization attributes available for resource requests to the Authorizer interface message ResourceAttributes { // Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces // "" (empty) is defaulted for LocalSubjectAccessReviews // "" (empty) is empty for cluster-scoped resources // "" (empty) means "all" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview // +optional optional string namespace = 1; // Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. "*" means all. // +optional optional string verb = 2; // Group is the API Group of the Resource. "*" means all. // +optional optional string group = 3; // Version is the API Version of the Resource. "*" means all. // +optional optional string version = 4; // Resource is one of the existing resource types. "*" means all. // +optional optional string resource = 5; // Subresource is one of the existing resource types. "" means none. // +optional optional string subresource = 6; // Name is the name of the resource being requested for a "get" or deleted for a "delete". "" (empty) means all. // +optional optional string name = 7; } // ResourceRule is the list of actions the subject is allowed to perform on resources. The list ordering isn't significant, // may contain duplicates, and possibly be incomplete. message ResourceRule { // Verb is a list of kubernetes resource API verbs, like: get, list, watch, create, update, delete, proxy. "*" means all. repeated string verbs = 1; // APIGroups is the name of the APIGroup that contains the resources. If multiple API groups are specified, any action requested against one of // the enumerated resources in any API group will be allowed. "*" means all. // +optional repeated string apiGroups = 2; // Resources is a list of resources this rule applies to. "*" means all in the specified apiGroups. // "*/foo" represents the subresource 'foo' for all resources in the specified apiGroups. // +optional repeated string resources = 3; // ResourceNames is an optional white list of names that the rule applies to. An empty set means that everything is allowed. "*" means all. // +optional repeated string resourceNames = 4; } // SelfSubjectAccessReview checks whether or the current user can perform an action. Not filling in a // spec.namespace means "in all namespaces". Self is a special case, because users should always be able // to check whether they can perform an action message SelfSubjectAccessReview { // +optional optional k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta metadata = 1; // Spec holds information about the request being evaluated. user and groups must be empty optional SelfSubjectAccessReviewSpec spec = 2; // Status is filled in by the server and indicates whether the request is allowed or not // +optional optional SubjectAccessReviewStatus status = 3; } // SelfSubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAuthorizationAttributes // and NonResourceAuthorizationAttributes must be set message SelfSubjectAccessReviewSpec { // ResourceAuthorizationAttributes describes information for a resource access request // +optional optional ResourceAttributes resourceAttributes = 1; // NonResourceAttributes describes information for a non-resource access request // +optional optional NonResourceAttributes nonResourceAttributes = 2; } // SelfSubjectRulesReview enumerates the set of actions the current user can perform within a namespace. // The returned list of actions may be incomplete depending on the server's authorization mode, // and any errors experienced during the evaluation. SelfSubjectRulesReview should be used by UIs to show/hide actions, // or to quickly let an end user reason about their permissions. It should NOT Be used by external systems to // drive authorization decisions as this raises confused deputy, cache lifetime/revocation, and correctness concerns. // SubjectAccessReview, and LocalAccessReview are the correct way to defer authorization decisions to the API server. message SelfSubjectRulesReview { // +optional optional k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta metadata = 1; // Spec holds information about the request being evaluated. optional SelfSubjectRulesReviewSpec spec = 2; // Status is filled in by the server and indicates the set of actions a user can perform. // +optional optional SubjectRulesReviewStatus status = 3; } message SelfSubjectRulesReviewSpec { // Namespace to evaluate rules for. Required. optional string namespace = 1; } // SubjectAccessReview checks whether or not a user or group can perform an action. message SubjectAccessReview { // +optional optional k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta metadata = 1; // Spec holds information about the request being evaluated optional SubjectAccessReviewSpec spec = 2; // Status is filled in by the server and indicates whether the request is allowed or not // +optional optional SubjectAccessReviewStatus status = 3; } // SubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAuthorizationAttributes // and NonResourceAuthorizationAttributes must be set message SubjectAccessReviewSpec { // ResourceAuthorizationAttributes describes information for a resource access request // +optional optional ResourceAttributes resourceAttributes = 1; // NonResourceAttributes describes information for a non-resource access request // +optional optional NonResourceAttributes nonResourceAttributes = 2; // User is the user you're testing for. // If you specify "User" but not "Group", then is it interpreted as "What if User were not a member of any groups // +optional optional string user = 3; // Groups is the groups you're testing for. // +optional repeated string group = 4; // Extra corresponds to the user.Info.GetExtra() method from the authenticator. Since that is input to the authorizer // it needs a reflection here. // +optional map<string, ExtraValue> extra = 5; // UID information about the requesting user. // +optional optional string uid = 6; } // SubjectAccessReviewStatus message SubjectAccessReviewStatus { // Allowed is required. True if the action would be allowed, false otherwise. optional bool allowed = 1; // Denied is optional. True if the action would be denied, otherwise // false. If both allowed is false and denied is false, then the // authorizer has no opinion on whether to authorize the action. Denied // may not be true if Allowed is true. // +optional optional bool denied = 4; // Reason is optional. It indicates why a request was allowed or denied. // +optional optional string reason = 2; // EvaluationError is an indication that some error occurred during the authorization check. // It is entirely possible to get an error and be able to continue determine authorization status in spite of it. // For instance, RBAC can be missing a role, but enough roles are still present and bound to reason about the request. // +optional optional string evaluationError = 3; } // SubjectRulesReviewStatus contains the result of a rules check. This check can be incomplete depending on // the set of authorizers the server is configured with and any errors experienced during evaluation. // Because authorization rules are additive, if a rule appears in a list it's safe to assume the subject has that permission, // even if that list is incomplete. message SubjectRulesReviewStatus { // ResourceRules is the list of actions the subject is allowed to perform on resources. // The list ordering isn't significant, may contain duplicates, and possibly be incomplete. repeated ResourceRule resourceRules = 1; // NonResourceRules is the list of actions the subject is allowed to perform on non-resources. // The list ordering isn't significant, may contain duplicates, and possibly be incomplete. repeated NonResourceRule nonResourceRules = 2; // Incomplete is true when the rules returned by this call are incomplete. This is most commonly // encountered when an authorizer, such as an external authorizer, doesn't support rules evaluation. optional bool incomplete = 3; // EvaluationError can appear in combination with Rules. It indicates an error occurred during // rule evaluation, such as an authorizer that doesn't support rule evaluation, and that // ResourceRules and/or NonResourceRules may be incomplete. // +optional optional string evaluationError = 4; }
{ "pile_set_name": "Github" }
version: 1 dn: m-oid=2.5.13.32,ou=normalizers,cn=system,ou=schema m-oid: 2.5.13.32 m-fqcn: org.apache.directory.api.ldap.model.schema.normalizers.NoOpNormalizer objectclass: metaNormalizer objectclass: metaTop objectclass: top creatorsname: uid=admin,ou=system
{ "pile_set_name": "Github" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54b.c Label Definition File: CWE134_Uncontrolled_Format_String.vasinks.label.xml Template File: sources-vasinks-54b.tmpl.c */ /* * @description * CWE: 134 Uncontrolled Format String * BadSource: connect_socket Read data using a connect socket (client side) * GoodSource: Copy a fixed string into data * Sinks: w32_vsnprintf * GoodSink: vsnprintf with a format string * BadSink : vsnprintf without a format string * Flow Variant: 54 Data flow: data passed as an argument from one function through three others to a fifth; all five functions are in different source files * * */ #include <stdarg.h> #include "std_testcase.h" #ifndef _WIN32 #include <wchar.h> #endif #ifdef _WIN32 #include <winsock2.h> #include <windows.h> #include <direct.h> #pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */ #define CLOSE_SOCKET closesocket #else /* NOT _WIN32 */ #include <sys/types.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <unistd.h> #define INVALID_SOCKET -1 #define SOCKET_ERROR -1 #define CLOSE_SOCKET close #define SOCKET int #endif #define TCP_PORT 27015 #define IP_ADDRESS "127.0.0.1" #ifndef OMITBAD /* bad function declaration */ void CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54c_badSink(char * data); void CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54b_badSink(char * data) { CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54c_badSink(data); } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B uses the GoodSource with the BadSink */ void CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54c_goodG2BSink(char * data); void CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54b_goodG2BSink(char * data) { CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54c_goodG2BSink(data); } /* goodB2G uses the BadSource with the GoodSink */ void CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54c_goodB2GSink(char * data); void CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54b_goodB2GSink(char * data) { CWE134_Uncontrolled_Format_String__char_connect_socket_w32_vsnprintf_54c_goodB2GSink(data); } #endif /* OMITGOOD */
{ "pile_set_name": "Github" }
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. PooledSender.senderDisconnectFail=No pude desconectar al remitente receiverBase.bind.failed=Fallo al atachar el escuchador de replicación en la dirección: [{0}]\n receiverBase.unable.bind=Imposible vincular el socket del servidor a:[{0}] lanzando error. receiverBase.unable.bind.udp=Imposible atar el socket UDP a:[{0}] lanzando error.\n
{ "pile_set_name": "Github" }
using Grand.Domain.Catalog; using System.Collections.Generic; using System.Threading.Tasks; namespace Grand.Services.Catalog { /// <summary> /// Product tag service interface /// </summary> public partial interface IProductTagService { /// <summary> /// Delete a product tag /// </summary> /// <param name="productTag">Product tag</param> Task DeleteProductTag(ProductTag productTag); /// <summary> /// Gets all product tags /// </summary> /// <returns>Product tags</returns> Task<IList<ProductTag>> GetAllProductTags(); /// <summary> /// Gets product tag /// </summary> /// <param name="productTagId">Product tag identifier</param> /// <returns>Product tag</returns> Task<ProductTag> GetProductTagById(string productTagId); /// <summary> /// Gets product tag by name /// </summary> /// <param name="name">Product tag name</param> /// <returns>Product tag</returns> Task<ProductTag> GetProductTagByName(string name); /// <summary> /// Gets product tag by sename /// </summary> /// <param name="sename">Product tag sename</param> /// <returns>Product tag</returns> Task<ProductTag> GetProductTagBySeName(string sename); /// <summary> /// Inserts a product tag /// </summary> /// <param name="productTag">Product tag</param> Task InsertProductTag(ProductTag productTag); /// <summary> /// Update a product tag /// </summary> /// <param name="productTag">Product tag</param> Task UpdateProductTag(ProductTag productTag); /// <summary> /// Assign a tag to the product /// </summary> /// <param name="productTag">Product Tag</param> Task AttachProductTag(ProductTag productTag); /// <summary> /// Detach a tag from the product /// </summary> /// <param name="productTag">Product Tag</param> Task DetachProductTag(ProductTag productTag); /// <summary> /// Get number of products /// </summary> /// <param name="productTagId">Product tag identifier</param> /// <param name="storeId">Store identifier</param> /// <returns>Number of products</returns> Task<int> GetProductCount(string productTagId, string storeId); } }
{ "pile_set_name": "Github" }
/* * A 32-bit implementation of the TEA algorithm * Copyright (c) 2015 Vesselin Bontchev * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #ifndef AVUTIL_TEA_H #define AVUTIL_TEA_H #include <stdint.h> /** * @file * @brief Public header for libavutil TEA algorithm * @defgroup lavu_tea TEA * @ingroup lavu_crypto * @{ */ extern const int av_tea_size; struct AVTEA; /** * Allocate an AVTEA context * To free the struct: av_free(ptr) */ struct AVTEA *av_tea_alloc(void); /** * Initialize an AVTEA context. * * @param ctx an AVTEA context * @param key a key of 16 bytes used for encryption/decryption * @param rounds the number of rounds in TEA (64 is the "standard") */ void av_tea_init(struct AVTEA *ctx, const uint8_t key[16], int rounds); /** * Encrypt or decrypt a buffer using a previously initialized context. * * @param ctx an AVTEA context * @param dst destination array, can be equal to src * @param src source array, can be equal to dst * @param count number of 8 byte blocks * @param iv initialization vector for CBC mode, if NULL then ECB will be used * @param decrypt 0 for encryption, 1 for decryption */ void av_tea_crypt(struct AVTEA *ctx, uint8_t *dst, const uint8_t *src, int count, uint8_t *iv, int decrypt); /** * @} */ #endif /* AVUTIL_TEA_H */
{ "pile_set_name": "Github" }
// Copyright 2015 The etcd Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package snap import ( "errors" "fmt" "hash/crc32" "io/ioutil" "os" "path/filepath" "sort" "strings" "time" "go.etcd.io/etcd/etcdserver/api/snap/snappb" pioutil "go.etcd.io/etcd/pkg/ioutil" "go.etcd.io/etcd/pkg/pbutil" "go.etcd.io/etcd/raft" "go.etcd.io/etcd/raft/raftpb" "github.com/coreos/pkg/capnslog" "go.uber.org/zap" ) const snapSuffix = ".snap" var ( plog = capnslog.NewPackageLogger("go.etcd.io/etcd/v3", "snap") ErrNoSnapshot = errors.New("snap: no available snapshot") ErrEmptySnapshot = errors.New("snap: empty snapshot") ErrCRCMismatch = errors.New("snap: crc mismatch") crcTable = crc32.MakeTable(crc32.Castagnoli) // A map of valid files that can be present in the snap folder. validFiles = map[string]bool{ "db": true, } ) type Snapshotter struct { lg *zap.Logger dir string } func New(lg *zap.Logger, dir string) *Snapshotter { return &Snapshotter{ lg: lg, dir: dir, } } func (s *Snapshotter) SaveSnap(snapshot raftpb.Snapshot) error { if raft.IsEmptySnap(snapshot) { return nil } return s.save(&snapshot) } func (s *Snapshotter) save(snapshot *raftpb.Snapshot) error { start := time.Now() fname := fmt.Sprintf("%016x-%016x%s", snapshot.Metadata.Term, snapshot.Metadata.Index, snapSuffix) b := pbutil.MustMarshal(snapshot) crc := crc32.Update(0, crcTable, b) snap := snappb.Snapshot{Crc: crc, Data: b} d, err := snap.Marshal() if err != nil { return err } snapMarshallingSec.Observe(time.Since(start).Seconds()) spath := filepath.Join(s.dir, fname) fsyncStart := time.Now() err = pioutil.WriteAndSyncFile(spath, d, 0666) snapFsyncSec.Observe(time.Since(fsyncStart).Seconds()) if err != nil { if s.lg != nil { s.lg.Warn("failed to write a snap file", zap.String("path", spath), zap.Error(err)) } rerr := os.Remove(spath) if rerr != nil { if s.lg != nil { s.lg.Warn("failed to remove a broken snap file", zap.String("path", spath), zap.Error(err)) } else { plog.Errorf("failed to remove broken snapshot file %s", spath) } } return err } snapSaveSec.Observe(time.Since(start).Seconds()) return nil } func (s *Snapshotter) Load() (*raftpb.Snapshot, error) { names, err := s.snapNames() if err != nil { return nil, err } var snap *raftpb.Snapshot for _, name := range names { if snap, err = loadSnap(s.lg, s.dir, name); err == nil { break } } if err != nil { return nil, ErrNoSnapshot } return snap, nil } func loadSnap(lg *zap.Logger, dir, name string) (*raftpb.Snapshot, error) { fpath := filepath.Join(dir, name) snap, err := Read(lg, fpath) if err != nil { brokenPath := fpath + ".broken" if lg != nil { lg.Warn("failed to read a snap file", zap.String("path", fpath), zap.Error(err)) } if rerr := os.Rename(fpath, brokenPath); rerr != nil { if lg != nil { lg.Warn("failed to rename a broken snap file", zap.String("path", fpath), zap.String("broken-path", brokenPath), zap.Error(rerr)) } else { plog.Warningf("cannot rename broken snapshot file %v to %v: %v", fpath, brokenPath, rerr) } } else { if lg != nil { lg.Warn("renamed to a broken snap file", zap.String("path", fpath), zap.String("broken-path", brokenPath)) } } } return snap, err } // Read reads the snapshot named by snapname and returns the snapshot. func Read(lg *zap.Logger, snapname string) (*raftpb.Snapshot, error) { b, err := ioutil.ReadFile(snapname) if err != nil { if lg != nil { lg.Warn("failed to read a snap file", zap.String("path", snapname), zap.Error(err)) } else { plog.Errorf("cannot read file %v: %v", snapname, err) } return nil, err } if len(b) == 0 { if lg != nil { lg.Warn("failed to read empty snapshot file", zap.String("path", snapname)) } else { plog.Errorf("unexpected empty snapshot") } return nil, ErrEmptySnapshot } var serializedSnap snappb.Snapshot if err = serializedSnap.Unmarshal(b); err != nil { if lg != nil { lg.Warn("failed to unmarshal snappb.Snapshot", zap.String("path", snapname), zap.Error(err)) } else { plog.Errorf("corrupted snapshot file %v: %v", snapname, err) } return nil, err } if len(serializedSnap.Data) == 0 || serializedSnap.Crc == 0 { if lg != nil { lg.Warn("failed to read empty snapshot data", zap.String("path", snapname)) } else { plog.Errorf("unexpected empty snapshot") } return nil, ErrEmptySnapshot } crc := crc32.Update(0, crcTable, serializedSnap.Data) if crc != serializedSnap.Crc { if lg != nil { lg.Warn("snap file is corrupt", zap.String("path", snapname), zap.Uint32("prev-crc", serializedSnap.Crc), zap.Uint32("new-crc", crc), ) } else { plog.Errorf("corrupted snapshot file %v: crc mismatch", snapname) } return nil, ErrCRCMismatch } var snap raftpb.Snapshot if err = snap.Unmarshal(serializedSnap.Data); err != nil { if lg != nil { lg.Warn("failed to unmarshal raftpb.Snapshot", zap.String("path", snapname), zap.Error(err)) } else { plog.Errorf("corrupted snapshot file %v: %v", snapname, err) } return nil, err } return &snap, nil } // snapNames returns the filename of the snapshots in logical time order (from newest to oldest). // If there is no available snapshots, an ErrNoSnapshot will be returned. func (s *Snapshotter) snapNames() ([]string, error) { dir, err := os.Open(s.dir) if err != nil { return nil, err } defer dir.Close() names, err := dir.Readdirnames(-1) if err != nil { return nil, err } snaps := checkSuffix(s.lg, names) if len(snaps) == 0 { return nil, ErrNoSnapshot } sort.Sort(sort.Reverse(sort.StringSlice(snaps))) return snaps, nil } func checkSuffix(lg *zap.Logger, names []string) []string { snaps := []string{} for i := range names { if strings.HasSuffix(names[i], snapSuffix) { snaps = append(snaps, names[i]) } else { // If we find a file which is not a snapshot then check if it's // a vaild file. If not throw out a warning. if _, ok := validFiles[names[i]]; !ok { if lg != nil { lg.Warn("found unexpected non-snap file; skipping", zap.String("path", names[i])) } else { plog.Warningf("skipped unexpected non snapshot file %v", names[i]) } } } } return snaps }
{ "pile_set_name": "Github" }
#include "irods_re_serialization.hpp" #include "irods_plugin_context.hpp" #include "rodsErrorTable.h" #include "boost/lexical_cast.hpp" namespace irods { namespace re_serialization { static void serialize_keyValPair( const keyValPair_t& _kvp, serialized_parameter_t& _out) { if(_kvp.len > 0) { for(int i = 0; i < _kvp.len; ++i) { if(_kvp.keyWord && _kvp.keyWord[i]) { if(_kvp.value && _kvp.value[i]) { _out[_kvp.keyWord[i]] = _kvp.value[i]; } else { _out[_kvp.keyWord[i]] = "empty_value"; } } } } else { _out["keyValPair_t"] = "nullptr"; } } static error serialize_float_ptr( boost::any _p, serialized_parameter_t& _out) { try { float* f = boost::any_cast<float*>(_p); _out["float_pointer"] = boost::lexical_cast<std::string>(*f); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast float*" ); } return SUCCESS(); } // serialize_float_ptr static error serialize_const_std_string_ptr( boost::any _p, serialized_parameter_t& _out) { try { const std::string* s = boost::any_cast<const std::string*>(_p); _out["const_std_string_ptr"] = *s; } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast const_std_string*" ); } return SUCCESS(); } // serialize_const_std_string_ptr static error serialize_std_string_ptr( boost::any _p, serialized_parameter_t& _out) { try { std::string* s = boost::any_cast<std::string*>(_p); _out["std_string_ptr"] = *s; } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast std_string_ptr" ); } return SUCCESS(); } // serialize_std_string_ptr static error serialize_std_string( boost::any _p, serialized_parameter_t& _out) { try { _out["std_string"] = boost::any_cast<std::string>(_p); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast std_string*" ); } return SUCCESS(); } // serialize_std_string static error serialize_hierarchy_parser_ptr( boost::any _p, serialized_parameter_t& _out) { try { hierarchy_parser* p = boost::any_cast<hierarchy_parser*>(_p); std::string hier; p->str(hier); _out["hierarchy_parser_ptr"] = hier; } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast hierarchy_parser_ptr*" ); } return SUCCESS(); } // serialize_hierarchy_parser_ptr static error serialize_rodslong( boost::any _p, serialized_parameter_t& _out) { try { rodsLong_t l = boost::any_cast<rodsLong_t>(_p); _out["rodslong"] = boost::lexical_cast<std::string>(l); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast rodslong" ); } return SUCCESS(); } // serialize_rodslong static error serialize_rodslong_ptr( boost::any _p, serialized_parameter_t& _out) { try { rodsLong_t* l = boost::any_cast<rodsLong_t*>(_p); _out["rodslong_ptr"] = boost::lexical_cast<std::string>(*l); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast rodslong" ); } return SUCCESS(); } // serialize_rodslong_ptr static error serialize_sizet( boost::any _p, serialized_parameter_t& _out) { try { size_t l = boost::any_cast<size_t>(_p); _out["sizet"] = boost::lexical_cast<std::string>(l); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast sizet" ); } return SUCCESS(); } // serialize_sizet static error serialize_int( boost::any _p, serialized_parameter_t& _out) { try { int l = boost::any_cast<int>(_p); _out["int"] = boost::lexical_cast<std::string>(l); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast int" ); } return SUCCESS(); } // serialize_int static error serialize_int_ptr( boost::any _p, serialized_parameter_t& _out) { try { int* l = boost::any_cast<int*>(_p); _out["int_ptr"] = boost::lexical_cast<std::string>(*l); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast int ptr" ); } return SUCCESS(); } // serialize_rodslong_ptr static error serialize_char_ptr( boost::any _p, serialized_parameter_t& _out) { try { char* l = boost::any_cast<char*>(_p); if(l) { _out["char_ptr"] = l; } else { _out["char_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast char ptr" ); } return SUCCESS(); } // serialize_char_ptr static error serialize_const_char_ptr( boost::any _p, serialized_parameter_t& _out) { try { const char* l = boost::any_cast<const char*>(_p); if(l) { _out["const_char_ptr"] = l; } else { _out["char_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast const char ptr" ); } return SUCCESS(); } // serialize_const_char_ptr static error serialize_rsComm_ptr( boost::any _p, serialized_parameter_t& _out) { try { rsComm_t* l = boost::any_cast<rsComm_t*>(_p); if (l) { _out["client_addr"] = l->clientAddr; if(l->auth_scheme) {_out["auth_scheme"] = l->auth_scheme;} _out["proxy_user_name"] = l->proxyUser.userName; _out["proxy_rods_zone"] = l->proxyUser.rodsZone; _out["proxy_user_type"] = l->proxyUser.userType; _out["proxy_sys_uid"] = boost::lexical_cast<std::string>(l->proxyUser.sysUid); _out["proxy_auth_info_auth_scheme"] = l->proxyUser.authInfo.authScheme; _out["proxy_auth_info_auth_flag"] = boost::lexical_cast<std::string>(l->proxyUser.authInfo.authFlag); _out["proxy_auth_info_flag"] = boost::lexical_cast<std::string>(l->proxyUser.authInfo.flag); _out["proxy_auth_info_ppid"] = boost::lexical_cast<std::string>(l->proxyUser.authInfo.ppid); _out["proxy_auth_info_host"] = l->proxyUser.authInfo.host; _out["proxy_auth_info_auth_str"] = l->proxyUser.authInfo.authStr; _out["proxy_user_other_info_user_info"] = l->proxyUser.userOtherInfo.userInfo; _out["proxy_user_other_info_user_comments"] = l->proxyUser.userOtherInfo.userComments; _out["proxy_user_other_info_user_create"] = l->proxyUser.userOtherInfo.userCreate; _out["proxy_user_other_info_user_modify"] = l->proxyUser.userOtherInfo.userModify; _out["user_user_name"] = l->clientUser.userName; _out["user_rods_zone"] = l->clientUser.rodsZone; _out["user_user_type"] = l->clientUser.userType; _out["user_sys_uid"] = boost::lexical_cast<std::string>(l->clientUser.sysUid); _out["user_auth_info_auth_scheme"] = l->clientUser.authInfo.authScheme; _out["user_auth_info_auth_flag"] = boost::lexical_cast<std::string>(l->clientUser.authInfo.authFlag); _out["user_auth_info_flag"] = boost::lexical_cast<std::string>(l->clientUser.authInfo.flag); _out["user_auth_info_ppid"] = boost::lexical_cast<std::string>(l->clientUser.authInfo.ppid); _out["user_auth_info_host"] = l->clientUser.authInfo.host; _out["user_auth_info_auth_str"] = l->clientUser.authInfo.authStr; _out["user_user_other_info_user_info"] = l->clientUser.userOtherInfo.userInfo; _out["user_user_other_info_user_comments"] = l->clientUser.userOtherInfo.userComments; _out["user_user_other_info_user_create"] = l->clientUser.userOtherInfo.userCreate; _out["user_user_other_info_user_modify"] = l->clientUser.userOtherInfo.userModify; } else { _out["rsComm_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast rsComm ptr" ); } return SUCCESS(); } // serialize_rsComm_ptr static error serialize_plugin_context( boost::any _p, serialized_parameter_t& _out) { try { plugin_context l = boost::any_cast<plugin_context>(_p); if( l.fco().get() ) { l.fco()->get_re_vars( _out ); } if( l.comm() ) { error ret = serialize_rsComm_ptr( l.comm(), _out ); if(!ret.ok()) { return PASS(ret); } } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast plugin_context" ); } return SUCCESS(); } // serialize_plugin_context static void serialize_spec_coll_info_ptr( specColl_t* _sc, serialized_parameter_t& _out) { if( _sc ) { _out["coll_class"] = boost::lexical_cast<std::string>(_sc->collClass); _out["type"] = boost::lexical_cast<std::string>(_sc->type); _out["collection"] = _sc->collection; _out["obj_path"] = _sc->objPath; _out["resource"] = _sc->resource; _out["resc_hier"] = _sc->rescHier; _out["phy_path"] = _sc->phyPath; _out["cache_dir"] = _sc->cacheDir; try { _out["cache_dirty"] = boost::lexical_cast<std::string>(_sc->cacheDirty); } catch( boost::bad_lexical_cast& ) { _out["cache_dirty"] = "<unconvertable>"; } try { _out["repl_num"] = boost::lexical_cast<std::string>(_sc->replNum); } catch( boost::bad_lexical_cast& ) { _out["repl_num"] = "<unconvertable>"; } } } // serialize_spec_coll_info_ptr static error serialize_dataObjInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { dataObjInp_t* l = boost::any_cast<dataObjInp_t*>(_p); if (l) { _out["obj_path"] = l->objPath; _out["create_mode"] = boost::lexical_cast<std::string>(l->createMode); _out["open_flags"] = boost::lexical_cast<std::string>(l->openFlags); _out["offset"] = boost::lexical_cast<std::string>(l->offset); _out["data_size"] = boost::lexical_cast<std::string>(l->dataSize); _out["num_threads"] = boost::lexical_cast<std::string>(l->numThreads); _out["opr_type"] = boost::lexical_cast<std::string>(l->oprType); if(l->specColl) { serialize_spec_coll_info_ptr( l->specColl, _out ); } serialize_keyValPair(l->condInput, _out); } else { _out["dataObjInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast dataObjInp ptr" ); } return SUCCESS(); } // serialize_dataObjInp_ptr static error serialize_bulkOprInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { auto* blk = boost::any_cast<bulkOprInp_t*>(_p); if(!blk) { _out["bulkOprInp_ptr"] = "nullptr"; return SUCCESS(); } serialize_keyValPair(blk->condInput, _out); auto obj_path = getSqlResultByInx(&blk->attriArray, COL_DATA_NAME); if(!obj_path) { THROW(UNMATCHED_KEY_OR_INDEX, "missing object path"); } auto offset = getSqlResultByInx(&blk->attriArray, OFFSET_INX); if(!offset) { THROW(UNMATCHED_KEY_OR_INDEX, "missing offset"); } std::vector<int> offset_int{}; for (int i = 0; i < blk->attriArray.rowCnt; ++i) { offset_int.push_back(atoi(&offset->value[offset->len * i])); } for(auto i = 0; i < blk->attriArray.rowCnt; ++i) { auto lp = std::string{"logical_path_"}+std::to_string(i); auto ds = std::string{"data_size_"}+std::to_string(i); _out[lp] = &obj_path->value[obj_path->len * i]; _out[ds] = std::to_string(i==0 ? offset_int[0] : offset_int[i]-offset_int[i-1]); } // for i } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast dataObjInp ptr" ); } return SUCCESS(); } // serialize_bulkOprInp_ptr static error serialize_authResponseInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { authResponseInp_t* l = boost::any_cast<authResponseInp_t*>(_p); if (l) { if(l->response) { _out["response"] = l->response; } if(l->username) { _out["username"] = l->username; } } else { _out["authResponseInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast authResponseInp*" ); } return SUCCESS(); } // serialize_authResponseInp_ptr static error serialize_dataObjInfo_ptr( boost::any _p, serialized_parameter_t& _out) { try { dataObjInfo_t* l = boost::any_cast<dataObjInfo_t*>(_p); if (l) { _out["logical_path"] = l->objPath; _out["resc_hier"] = l->rescHier; _out["data_type"] = l->dataType; _out["data_size"] = boost::lexical_cast<std::string>(l->dataSize); _out["checksum"] = l->chksum; _out["version"] = l->version; _out["physical_path"] = l->filePath; _out["data_owner_name"] = l->dataOwnerName; _out["data_owner_zone"] = l->dataOwnerZone; _out["replica_number"] = boost::lexical_cast<std::string>(l->replNum); _out["replica_status"] = boost::lexical_cast<std::string>(l->replStatus); _out["data_id"] = boost::lexical_cast<std::string>(l->dataId); _out["coll_id"] = boost::lexical_cast<std::string>(l->collId); _out["data_map_id"] = boost::lexical_cast<std::string>(l->dataMapId); _out["flags"] = boost::lexical_cast<std::string>(l->flags); _out["data_comments"] = l->dataComments; _out["data_mode"] = l->dataMode; _out["data_expiry"] = l->dataExpiry; _out["data_create"] = l->dataCreate; _out["data_modify"] = l->dataModify; _out["data_access"] = l->dataAccess; _out["data_access_index"] = boost::lexical_cast<std::string>(l->dataAccessInx); _out["write_flag"] = boost::lexical_cast<std::string>(l->writeFlag); _out["dest_resc_name"] = l->destRescName; _out["backup_resc_name"] = l->backupRescName; _out["sub_path"] = l->subPath; _out["reg_uid"] = boost::lexical_cast<std::string>(l->regUid); _out["resc_id"] = l->rescId; if(l->specColl) { serialize_spec_coll_info_ptr( l->specColl, _out ); } serialize_keyValPair(l->condInput, _out); } else { _out["dataObjInfo_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast dataObjInfo ptr" ); } return SUCCESS(); } // serialize_dataObjInfo_ptr static error serialize_keyValPair_ptr( boost::any _p, serialized_parameter_t& _out) { try { keyValPair_t* l = boost::any_cast<keyValPair_t*>(_p); if (l) { for(int i = 0; i < l->len; ++i) { _out[l->keyWord[i]] = l->value[i]; } } else { _out["keyValPair_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast keyValPair ptr" ); } return SUCCESS(); } // serialize_keyValPair_ptr static error serialize_userInfo_ptr( boost::any _p, serialized_parameter_t& _out) { try { userInfo_t* l = boost::any_cast<userInfo_t*>(_p); if (l) { _out["user_name"] = l->userName; _out["rods_zone"] = l->rodsZone; _out["user_type"] = l->userType; _out["sys_uid"] = boost::lexical_cast<std::string>(l->sysUid); _out["auth_info_auth_scheme"] = l->authInfo.authScheme; _out["auth_info_auth_flag"] = boost::lexical_cast<std::string>(l->authInfo.authFlag); _out["auth_info_flag"] = boost::lexical_cast<std::string>(l->authInfo.flag); _out["auth_info_ppid"] = boost::lexical_cast<std::string>(l->authInfo.ppid); _out["auth_info_host"] = l->authInfo.host; _out["auth_info_auth_str"] = l->authInfo.authStr; _out["user_other_info_user_info"] = l->userOtherInfo.userInfo; _out["user_other_info_user_comments"] = l->userOtherInfo.userComments; _out["user_other_info_user_create"] = l->userOtherInfo.userCreate; _out["user_other_info_user_modify"] = l->userOtherInfo.userModify; } else { _out["userInfo_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast userInfo ptr" ); } return SUCCESS(); } // serialize_userInfo_ptr static error serialize_collInfo_ptr( boost::any _p, serialized_parameter_t& _out) { try { collInfo_t* l = boost::any_cast<collInfo_t*>(_p); if (l) { _out["coll_id"] = boost::lexical_cast<std::string>(l->collId); _out["coll_name"] = l->collName; _out["coll_parent_name"] = l->collParentName; _out["coll_owner_name"] = l->collOwnerName; _out["coll_owner_zone"] = l->collOwnerZone; _out["coll_map_id"] = boost::lexical_cast<std::string>(l->collMapId); _out["coll_access_index"] = boost::lexical_cast<std::string>(l->collAccessInx); _out["coll_comments"] = l->collComments; _out["coll_inheritance"] = l->collInheritance; _out["coll_expiry"] = l->collExpiry; _out["coll_create"] = l->collCreate; _out["coll_modify"] = l->collModify; _out["coll_access"] = l->collAccess; _out["coll_type"] = l->collType; _out["coll_info1"] = l->collInfo1; _out["coll_info2"] = l->collInfo2; serialize_keyValPair(l->condInput, _out); } else { _out["collInfo_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast collInfo ptr" ); } return SUCCESS(); } // serialize_collInfo_ptr static error serialize_collInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { collInp_t* l = boost::any_cast<collInp_t*>(_p); if (l) { _out["coll_name"] = l->collName; _out["flags"] = l->flags; _out["opr_type"] = l->oprType; serialize_keyValPair(l->condInput, _out); } else { _out["collInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast collInp ptr" ); } return SUCCESS(); } // serialize_collInp_ptr static error serialize_modAVUMetaInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { modAVUMetadataInp_t* l = boost::any_cast<modAVUMetadataInp_t*>(_p); if (l) { if( l->arg0 ) { _out["arg0"] = l->arg0; } if( l->arg1 ) { _out["arg1"] = l->arg1; } if( l->arg2 ) { _out["arg2"] = l->arg2; } if( l->arg3 ) { _out["arg3"] = l->arg3; } if( l->arg4 ) { _out["arg4"] = l->arg4; } if( l->arg5 ) { _out["arg5"] = l->arg5; } if( l->arg6 ) { _out["arg6"] = l->arg6; } if( l->arg7 ) { _out["arg7"] = l->arg7; } if( l->arg8 ) { _out["arg8"] = l->arg8; } if( l->arg9 ) { _out["arg9"] = l->arg9; } } else { _out["modAVUMetaInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast modAVUMetaInp ptr" ); } return SUCCESS(); } // serialize_modAVUMetaInp_ptr static error serialize_modAccessControlInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { modAccessControlInp_t* l = boost::any_cast<modAccessControlInp_t*>(_p); if (l) { _out["recursive_flag"] = boost::lexical_cast<std::string>(l->recursiveFlag); if( l->accessLevel ) { _out["access_level"] = l->accessLevel; } if( l->userName ) { _out["user_name"] = l->userName; } if( l->zone ) { _out["zone"] = l->zone; } if( l->path ) { _out["path"] = l->path; } } else { _out["modAccessControlInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast modAccessControlInp ptr" ); } return SUCCESS(); } // serialize_modAccessControlInp_ptr static error serialize_modDataObjMeta_ptr( boost::any _p, serialized_parameter_t& _out) { try { modDataObjMeta_t* l = boost::any_cast<modDataObjMeta_t*>(_p); if (l) { error ret = serialize_dataObjInfo_ptr( l->dataObjInfo, _out); if(!ret.ok()) { irods::log(PASS(ret)); } if( l->regParam ) { for(int i = 0; i < l->regParam->len; ++i) { _out[l->regParam->keyWord[i]] = l->regParam->value[i]; } } } else { _out["modDataObjMeta_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast modDataObjMeta ptr" ); } return SUCCESS(); } // serialize_modDataObjMeta_ptr static error serialize_ruleExecSubmitInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { ruleExecSubmitInp_t* l = boost::any_cast<ruleExecSubmitInp_t*>(_p); if (l) { _out["rule_name"] = l->ruleName; _out["rei_file_path"] = l->reiFilePath; _out["user_name"] = l->userName; _out["exe_address"] = l->exeAddress; _out["exe_time"] = l->exeTime; _out["exe_frequency"] = l->exeFrequency; _out["priority"] = l->priority; _out["last_exec_time"] = l->lastExecTime; _out["exe_status"] = l->exeStatus; _out["estimate_exe_time"] = l->estimateExeTime; _out["notification_addr"] = l->notificationAddr; _out["rule_exec_id"] = l->ruleExecId; for(int i = 0; i < l->condInput.len; ++i) { _out[l->condInput.keyWord[i]] = l->condInput.value[i]; } } else { _out["ruleExecSubmitInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast ruleExecSubmitInp ptr" ); } return SUCCESS(); } // serialize_ruleExecSubmitInp_ptr static error serialize_dataObjCopyInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { dataObjCopyInp_t* l = boost::any_cast<dataObjCopyInp_t*>(_p); if (l) { serialized_parameter_t src; error ret = serialize_dataObjInp_ptr( &l->srcDataObjInp, src ); if(!ret.ok()) { irods::log(PASS(ret)); } else { for( auto p : src ) { _out["src_"+p.first] = p.second; } } serialized_parameter_t dst; ret = serialize_dataObjInp_ptr( &l->destDataObjInp, dst ); if(!ret.ok()) { irods::log(PASS(ret)); } else { for( auto p : dst ) { _out["dst_"+p.first] = p.second; } } } else { _out["dataObjCopyInp_ptr"] = "nullptr"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast dataObjCopyInp ptr" ); } return SUCCESS(); } // serialize_dataObjCopyInp_ptr static error serialize_rodsObjStat_ptr_ptr( boost::any _p, serialized_parameter_t& _out) { try { rodsObjStat_t** tmp = boost::any_cast<rodsObjStat_t**>(_p); if(tmp && *tmp) { rodsObjStat_t* l = *tmp; _out["obj_size"] = boost::lexical_cast<std::string>(l->objSize); _out["obj_type"] = boost::lexical_cast<std::string>((int)l->objType); _out["data_mode"] = boost::lexical_cast<std::string>(l->dataMode); _out["data_id"] = boost::lexical_cast<std::string>(l->dataId); _out["checksum"] = boost::lexical_cast<std::string>(l->chksum); _out["ownerName"] = boost::lexical_cast<std::string>(l->ownerName); _out["owner_zone"] = boost::lexical_cast<std::string>(l->ownerZone); _out["create_time"] = boost::lexical_cast<std::string>(l->createTime); _out["modify_time"] = boost::lexical_cast<std::string>(l->modifyTime); _out["resc_hier"] = boost::lexical_cast<std::string>(l->rescHier); if(l->specColl) { serialize_spec_coll_info_ptr( l->specColl, _out ); } } else { _out["null_value"] = "null_value"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast rodsObjStat ptr ptr" ); } return SUCCESS(); } // serialize_rodsObjStat_ptr_ptr static error serialize_rodsObjStat_ptr( boost::any _p, serialized_parameter_t& _out) { try { rodsObjStat_t* tmp = boost::any_cast<rodsObjStat_t*>(_p); if(tmp) { rodsObjStat_t* l = tmp; _out["obj_size"] = boost::lexical_cast<std::string>(l->objSize); _out["obj_type"] = boost::lexical_cast<std::string>((int)l->objType); _out["data_mode"] = boost::lexical_cast<std::string>(l->dataMode); _out["data_id"] = boost::lexical_cast<std::string>(l->dataId); _out["checksum"] = boost::lexical_cast<std::string>(l->chksum); _out["ownerName"] = boost::lexical_cast<std::string>(l->ownerName); _out["owner_zone"] = boost::lexical_cast<std::string>(l->ownerZone); _out["create_time"] = boost::lexical_cast<std::string>(l->createTime); _out["modify_time"] = boost::lexical_cast<std::string>(l->modifyTime); _out["resc_hier"] = boost::lexical_cast<std::string>(l->rescHier); if(l->specColl) { serialize_spec_coll_info_ptr( l->specColl, _out ); } } else { _out["null_value"] = "null_value"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast rodsObjStat ptr" ); } return SUCCESS(); } // serialize_rodsObjStat_ptr static error serialize_genQueryInp_ptr( boost::any _p, serialized_parameter_t& _out) { try { genQueryInp_t* tmp = boost::any_cast<genQueryInp_t*>(_p); if(tmp) { genQueryInp_t* l = tmp; _out["maxRows"] = boost::lexical_cast<std::string>(l->maxRows); _out["continueInx"] = boost::lexical_cast<std::string>(l->continueInx); _out["rowOffset"] = boost::lexical_cast<std::string>(l->rowOffset); _out["options"] = boost::lexical_cast<std::string>(l->options); for(int i = 0; i < l->condInput.len; ++i) { _out[l->condInput.keyWord[i]] = l->condInput.value[i]; } for (int i = 0; i < l->selectInp.len; ++i) { std::string index = boost::lexical_cast<std::string>(l->selectInp.inx[i]); std::string value = boost::lexical_cast<std::string>(l->selectInp.value[i]); _out["select_" + index] = value; } for (int i = 0; i < l->sqlCondInp.len; ++i) { std::string index = boost::lexical_cast<std::string>(l->sqlCondInp.inx[i]); _out["where_" + index] = l->sqlCondInp.value[i]; } } else { _out["null_value"] = "null_value"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast genQueryInp ptr" ); } return SUCCESS(); } // serialize_genQueryInp_ptr static error serialize_genQueryOut_ptr( boost::any _p, serialized_parameter_t& _out) { try { genQueryOut_t* tmp = boost::any_cast<genQueryOut_t*>(_p); if (tmp) { genQueryOut_t* l = tmp; _out["rowCnt"] = boost::lexical_cast<std::string>(l->rowCnt); _out["attriCnt"] = boost::lexical_cast<std::string>(l->attriCnt); _out["continueInx"] = boost::lexical_cast<std::string>(l->continueInx); _out["totalRowCount"] = boost::lexical_cast<std::string>(l->totalRowCount); for (int i = 0; i < l->attriCnt; ++i) { for (int j = 0; j < l->rowCnt; ++j) { std::string i_str = boost::lexical_cast<std::string>(i); std::string j_str = boost::lexical_cast<std::string>(j); _out["attriInx_" + i_str] = boost::lexical_cast<std::string>(l->sqlResult[i].attriInx); _out["len_" + i_str] = boost::lexical_cast<std::string>(l->sqlResult[i].len); _out["value_" + j_str + "_" + i_str] = boost::lexical_cast<std::string>(l->sqlResult[i].value+j*l->sqlResult[i].len); } } } else { _out["null_value"] = "null_value"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast genQueryOut ptr" ); } return SUCCESS(); } //serialize_genQueryOut_ptr static error serialize_char_ptr_ptr( boost::any _p, serialized_parameter_t& _out) { try { char** tmp = boost::any_cast<char**>(_p); if(tmp && *tmp ) { _out["value"] = *tmp; } else { _out["null_value"] = "null_value"; } } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast char ptr ptr" ); } return SUCCESS(); } // serialize_char_ptr_ptr #if 0 static error serialize_XXXX_ptr( boost::any _p, serialized_parameter_t& _out) { try { XXXX_t* l = boost::any_cast<XXXX_t*>(_p); } catch ( std::exception& ) { return ERROR( INVALID_ANY_CAST, "failed to cast XXXX ptr" ); } return SUCCESS(); } // serialize_XXXX_ptr #endif serialization_map_t& get_serialization_map() { static serialization_map_t the_map { { std::type_index(typeid(float*)), serialize_float_ptr }, { std::type_index(typeid(const std::string*)), serialize_const_std_string_ptr }, { std::type_index(typeid(std::string*)), serialize_std_string_ptr }, { std::type_index(typeid(std::string)), serialize_std_string }, { std::type_index(typeid(hierarchy_parser*)), serialize_hierarchy_parser_ptr }, { std::type_index(typeid(rodsLong_t)), serialize_rodslong }, { std::type_index(typeid(rodsLong_t*)), serialize_rodslong_ptr }, { std::type_index(typeid(size_t)), serialize_sizet }, { std::type_index(typeid(int)), serialize_int }, { std::type_index(typeid(int*)), serialize_int_ptr }, { std::type_index(typeid(char*)), serialize_char_ptr }, { std::type_index(typeid(const char*)), serialize_const_char_ptr }, { std::type_index(typeid(rsComm_t*)), serialize_rsComm_ptr }, { std::type_index(typeid(plugin_context)), serialize_plugin_context }, { std::type_index(typeid(dataObjInp_t*)), serialize_dataObjInp_ptr }, { std::type_index(typeid(bulkOprInp_t*)), serialize_bulkOprInp_ptr }, { std::type_index(typeid(authResponseInp_t*)), serialize_authResponseInp_ptr }, { std::type_index(typeid(dataObjInfo_t*)), serialize_dataObjInfo_ptr }, { std::type_index(typeid(keyValPair_t*)), serialize_keyValPair_ptr }, { std::type_index(typeid(userInfo_t*)), serialize_userInfo_ptr }, { std::type_index(typeid(collInfo_t*)), serialize_collInfo_ptr }, { std::type_index(typeid(collInp_t*)), serialize_collInp_ptr }, { std::type_index(typeid(modAVUMetadataInp_t*)), serialize_modAVUMetaInp_ptr }, { std::type_index(typeid(modAccessControlInp_t*)), serialize_modAccessControlInp_ptr }, { std::type_index(typeid(modDataObjMeta_t*)), serialize_modDataObjMeta_ptr }, { std::type_index(typeid(ruleExecSubmitInp_t*)), serialize_ruleExecSubmitInp_ptr }, { std::type_index(typeid(dataObjCopyInp_t*)), serialize_dataObjCopyInp_ptr }, { std::type_index(typeid(rodsObjStat_t**)), serialize_rodsObjStat_ptr_ptr }, { std::type_index(typeid(rodsObjStat_t*)), serialize_rodsObjStat_ptr }, { std::type_index(typeid(genQueryInp_t*)), serialize_genQueryInp_ptr }, { std::type_index(typeid(genQueryOut_t*)), serialize_genQueryOut_ptr }, { std::type_index(typeid(char**)), serialize_char_ptr_ptr } }; return the_map; } // get_serialization_map error add_operation( const index_t& _index, operation_t _operation ) { serialization_map_t& the_map = get_serialization_map(); if(the_map.find(_index) != the_map.end() ) { return ERROR( KEY_NOT_FOUND, "type_index exists"); } the_map[ _index ] = _operation; return SUCCESS(); } // add_operation static std::string demangle(const char* name) { int status = -4; // some arbitrary value to eliminate the compiler warning std::unique_ptr<char, void(*)(void*)> res { abi::__cxa_demangle(name, NULL, NULL, &status), std::free }; return (status==0) ? res.get() : name ; } error serialize_parameter( boost::any _in_param, serialized_parameter_t& _out_param ) { index_t idx = std::type_index(_in_param.type()); serialization_map_t& the_map = get_serialization_map(); if(the_map.find(idx) == the_map.end() ) { std::string err = "["; err += demangle( _in_param.type().name() ); err += "] not supported"; _out_param["ERROR"] = err; return SUCCESS(); } return the_map[idx](_in_param, _out_param); } // serialize_parameter }; // re_serialization }; // namespace irods
{ "pile_set_name": "Github" }
<?php /** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ /** * Renderer for Payflow Link information */ namespace Magento\Paypal\Block\Adminhtml\System\Config\Payflowlink; class Info extends \Magento\Config\Block\System\Config\Form\Field { /** * Template path * * @var string */ protected $_template = 'Magento_Paypal::system/config/payflowlink/info.phtml'; /** * Render fieldset html * * @param \Magento\Framework\Data\Form\Element\AbstractElement $element * @return string */ public function render(\Magento\Framework\Data\Form\Element\AbstractElement $element) { $columns = $this->getRequest()->getParam('website') || $this->getRequest()->getParam('store') ? 5 : 4; return $this->_decorateRowHtml($element, "<td colspan='{$columns}'>" . $this->toHtml() . '</td>'); } }
{ "pile_set_name": "Github" }
# encoding: utf-8 import pytest from rpython.rtyper.lltypesystem import rffi, lltype from pypy.interpreter.error import OperationError from pypy.module.cpyext.test.test_api import BaseApiTest, raises_w from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase from pypy.module.cpyext.bytesobject import ( new_empty_str, PyBytesObject, _PyString_Resize, PyString_Concat, PyString_ConcatAndDel, PyString_Format, PyString_InternFromString, PyString_AsEncodedObject, PyString_AsDecodedObject, _PyString_Eq, _PyString_Join) from pypy.module.cpyext.api import PyObjectP, PyObject, Py_ssize_tP, generic_cpy_call from pypy.module.cpyext.pyobject import decref, from_ref, make_ref from pypy.module.cpyext.buffer import PyObject_AsCharBuffer from pypy.module.cpyext.api import PyTypeObjectPtr class AppTestBytesObject(AppTestCpythonExtensionBase): def test_bytesobject(self): module = self.import_extension('foo', [ ("get_hello1", "METH_NOARGS", """ return PyBytes_FromStringAndSize( "Hello world<should not be included>", 11); """), ("get_hello2", "METH_NOARGS", """ return PyBytes_FromString("Hello world"); """), ("test_Size", "METH_NOARGS", """ PyObject* s = PyBytes_FromString("Hello world"); int result = PyBytes_Size(s); Py_DECREF(s); return PyLong_FromLong(result); """), ("test_Size_exception", "METH_NOARGS", """ PyObject* f = PyFloat_FromDouble(1.0); PyBytes_Size(f); Py_DECREF(f); return NULL; """), ("test_is_bytes", "METH_VARARGS", """ return PyBool_FromLong(PyBytes_Check(PyTuple_GetItem(args, 0))); """)], prologue='#include <stdlib.h>') assert module.get_hello1() == b'Hello world' assert module.get_hello2() == b'Hello world' assert module.test_Size() == 11 raises(TypeError, module.test_Size_exception) assert module.test_is_bytes(b"") assert not module.test_is_bytes(()) def test_bytes_buffer_init(self): module = self.import_extension('foo', [ ("getbytes", "METH_NOARGS", """ PyObject *s, *t; char* c; s = PyBytes_FromStringAndSize(NULL, 4); if (s == NULL) return NULL; t = PyBytes_FromStringAndSize(NULL, 3); if (t == NULL) return NULL; Py_DECREF(t); c = PyBytes_AS_STRING(s); c[0] = 'a'; c[1] = 'b'; c[2] = 0; c[3] = 'c'; return s; """), ]) s = module.getbytes() assert len(s) == 4 assert s == b'ab\x00c' def test_bytes_tp_alloc(self): module = self.import_extension('foo', [ ("tpalloc", "METH_NOARGS", """ PyObject *base; PyTypeObject * type; PyBytesObject *obj; base = PyBytes_FromString("test"); if (PyBytes_GET_SIZE(base) != 4) return PyLong_FromLong(-PyBytes_GET_SIZE(base)); type = base->ob_type; if (type->tp_itemsize != 1) return PyLong_FromLong(type->tp_itemsize); obj = (PyBytesObject*)type->tp_alloc(type, 10); if (PyBytes_GET_SIZE(obj) != 10) return PyLong_FromLong(PyBytes_GET_SIZE(obj)); /* cannot work, there is only RO access memcpy(PyBytes_AS_STRING(obj), "works", 6); */ Py_INCREF(obj); return (PyObject*)obj; """), ('alloc_rw', "METH_NOARGS", ''' PyObject *obj = (PyObject*)_PyObject_NewVar(&PyBytes_Type, 10); memcpy(PyBytes_AS_STRING(obj), "works", 6); return (PyObject*)obj; '''), ]) s = module.alloc_rw() assert s[:6] == b'works\0' # s[6:10] contains random garbage s = module.tpalloc() assert s == b'\x00' * 10 def test_AsString(self): module = self.import_extension('foo', [ ("getbytes", "METH_NOARGS", """ char *c; PyObject* s2, *s1 = PyBytes_FromStringAndSize("test", 4); c = PyBytes_AsString(s1); s2 = PyBytes_FromStringAndSize(c, 4); Py_DECREF(s1); return s2; """), ]) s = module.getbytes() assert s == b'test' def test_manipulations(self): module = self.import_extension('foo', [ ("bytes_as_string", "METH_VARARGS", ''' return PyBytes_FromStringAndSize(PyBytes_AsString( PyTuple_GetItem(args, 0)), 4); ''' ), ("concat", "METH_VARARGS", """ PyObject ** v; PyObject * left = PyTuple_GetItem(args, 0); Py_INCREF(left); /* the reference will be stolen! */ v = &left; PyBytes_Concat(v, PyTuple_GetItem(args, 1)); return *v; """)]) assert module.bytes_as_string(b"huheduwe") == b"huhe" ret = module.concat(b'abc', b'def') assert ret == b'abcdef' ret = module.concat('abc', u'def') assert not isinstance(ret, str) assert isinstance(ret, unicode) assert ret == 'abcdef' def test_py_bytes_as_string_None(self): module = self.import_extension('foo', [ ("string_None", "METH_VARARGS", ''' if (PyBytes_AsString(Py_None)) { Py_RETURN_NONE; } return NULL; ''' )]) raises(TypeError, module.string_None) def test_AsStringAndSize(self): module = self.import_extension('foo', [ ("getbytes", "METH_NOARGS", """ PyObject* s1 = PyBytes_FromStringAndSize("te\\0st", 5); char *buf; Py_ssize_t len; if (PyBytes_AsStringAndSize(s1, &buf, &len) < 0) return NULL; if (len != 5) { PyErr_SetString(PyExc_AssertionError, "Bad Length"); return NULL; } if (PyBytes_AsStringAndSize(s1, &buf, NULL) >= 0) { PyErr_SetString(PyExc_AssertionError, "Should Have failed"); return NULL; } PyErr_Clear(); Py_DECREF(s1); Py_INCREF(Py_None); return Py_None; """), ("c_only", "METH_NOARGS", """ int ret; char * buf2; PyObject * obj = PyBytes_FromStringAndSize(NULL, 1024); if (!obj) return NULL; buf2 = PyBytes_AsString(obj); if (!buf2) return NULL; /* buf should not have been forced, issue #2395 */ ret = _PyBytes_Resize(&obj, 512); if (ret < 0) return NULL; Py_DECREF(obj); Py_INCREF(Py_None); return Py_None; """), ]) module.getbytes() module.c_only() def test_py_string_as_string_Unicode(self): module = self.import_extension('foo', [ ("getstring_unicode", "METH_NOARGS", """ Py_UNICODE chars[] = {'t', 'e', 's', 't'}; PyObject* u1 = PyUnicode_FromUnicode(chars, 4); char *buf; buf = PyString_AsString(u1); if (buf == NULL) return NULL; if (buf[3] != 't') { PyErr_SetString(PyExc_AssertionError, "Bad conversion"); return NULL; } Py_DECREF(u1); Py_INCREF(Py_None); return Py_None; """), ("getstringandsize_unicode", "METH_NOARGS", """ Py_UNICODE chars[] = {'t', 'e', 's', 't'}; PyObject* u1 = PyUnicode_FromUnicode(chars, 4); char *buf; Py_ssize_t len; if (PyString_AsStringAndSize(u1, &buf, &len) < 0) return NULL; if (len != 4) { PyErr_SetString(PyExc_AssertionError, "Bad Length"); return NULL; } Py_DECREF(u1); Py_INCREF(Py_None); return Py_None; """), ]) module.getstring_unicode() module.getstringandsize_unicode() def test_format_v(self): module = self.import_extension('foo', [ ("test_string_format_v", "METH_VARARGS", ''' return helper("bla %d ble %s\\n", PyInt_AsLong(PyTuple_GetItem(args, 0)), PyString_AsString(PyTuple_GetItem(args, 1))); ''' ) ], prologue=''' PyObject* helper(char* fmt, ...) { va_list va; PyObject* res; va_start(va, fmt); res = PyString_FromFormatV(fmt, va); va_end(va); return res; } ''') res = module.test_string_format_v(1, "xyz") assert res == "bla 1 ble xyz\n" def test_format(self): module = self.import_extension('foo', [ ("test_string_format", "METH_VARARGS", ''' return PyString_FromFormat("bla %d ble %s\\n", PyInt_AsLong(PyTuple_GetItem(args, 0)), PyString_AsString(PyTuple_GetItem(args, 1))); ''' ) ]) res = module.test_string_format(1, "xyz") assert res == "bla 1 ble xyz\n" def test_intern_inplace(self): module = self.import_extension('foo', [ ("test_intern_inplace", "METH_O", ''' PyObject *s = args; Py_INCREF(s); PyString_InternInPlace(&s); if (((PyBytesObject*)s)->ob_sstate == SSTATE_NOT_INTERNED) { Py_DECREF(s); s = PyString_FromString("interned error"); } return s; ''' ) ]) # This does not test much, but at least the refcounts are checked. assert module.test_intern_inplace('s') == 's' def test_bytes_macros(self): """The PyString_* macros cast, and calls expecting that build.""" module = self.import_extension('foo', [ ("test_macro_invocations", "METH_NOARGS", """ PyObject* o = PyString_FromString(""); PyBytesObject* u = (PyBytesObject*)o; PyString_GET_SIZE(u); PyString_GET_SIZE(o); PyString_AS_STRING(o); PyString_AS_STRING(u); return o; """)]) assert module.test_macro_invocations() == '' def test_hash_and_state(self): module = self.import_extension('foo', [ ("test_hash", "METH_VARARGS", ''' PyObject* obj = (PyTuple_GetItem(args, 0)); long hash = ((PyBytesObject*)obj)->ob_shash; return PyLong_FromLong(hash); ''' ), ("test_sstate", "METH_NOARGS", ''' PyObject *s = PyString_FromString("xyz"); /*int sstate = ((PyBytesObject*)s)->ob_sstate; printf("sstate now %d\\n", sstate);*/ PyString_InternInPlace(&s); /*sstate = ((PyBytesObject*)s)->ob_sstate; printf("sstate now %d\\n", sstate);*/ Py_DECREF(s); return PyBool_FromLong(1); '''), ], prologue='#include <stdlib.h>') res = module.test_hash("xyz") assert res == hash('xyz') # doesn't really test, but if printf is enabled will prove sstate assert module.test_sstate() def test_subclass(self): # taken from PyStringArrType_Type in numpy's scalartypes.c.src module = self.import_extension('bar', [ ("newsubstr", "METH_O", """ PyObject * obj; char * data; int len; data = PyString_AS_STRING(args); len = PyString_GET_SIZE(args); if (data == NULL) Py_RETURN_NONE; obj = PyArray_Scalar(data, len); return obj; """), ("get_len", "METH_O", """ return PyLong_FromLong(PyObject_Size(args)); """), ('has_nb_add', "METH_O", ''' if (args->ob_type->tp_as_number == NULL) { Py_RETURN_FALSE; } if (args->ob_type->tp_as_number->nb_add == NULL) { Py_RETURN_FALSE; } Py_RETURN_TRUE; '''), ], prologue=""" #include <Python.h> PyTypeObject PyStringArrType_Type = { PyObject_HEAD_INIT(NULL) 0, /* ob_size */ "bar.string_", /* tp_name*/ sizeof(PyBytesObject), /* tp_basicsize*/ 0 /* tp_itemsize */ }; static PyObject * stringtype_repr(PyObject *self) { const char *dptr, *ip; int len; PyObject *new; ip = dptr = PyString_AS_STRING(self); len = PyString_GET_SIZE(self); dptr += len-1; while(len > 0 && *dptr-- == 0) { len--; } new = PyString_FromStringAndSize(ip, len); if (new == NULL) { return PyString_FromString(""); } return new; } static PyObject * stringtype_str(PyObject *self) { const char *dptr, *ip; int len; PyObject *new; ip = dptr = PyString_AS_STRING(self); len = PyString_GET_SIZE(self); dptr += len-1; while(len > 0 && *dptr-- == 0) { len--; } new = PyString_FromStringAndSize(ip, len); if (new == NULL) { return PyString_FromString(""); } return new; } PyObject * PyArray_Scalar(char *data, int n) { PyTypeObject *type = &PyStringArrType_Type; PyObject *obj; void *destptr; int itemsize = n; obj = type->tp_alloc(type, itemsize); if (obj == NULL) { return NULL; } destptr = PyString_AS_STRING(obj); ((PyBytesObject *)obj)->ob_shash = -1; memcpy(destptr, data, itemsize); return obj; } """, more_init = ''' PyStringArrType_Type.tp_alloc = NULL; PyStringArrType_Type.tp_free = NULL; PyStringArrType_Type.tp_repr = stringtype_repr; PyStringArrType_Type.tp_str = stringtype_str; PyStringArrType_Type.tp_flags = Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE; PyStringArrType_Type.tp_itemsize = sizeof(char); PyStringArrType_Type.tp_base = &PyString_Type; PyStringArrType_Type.tp_hash = PyString_Type.tp_hash; if (PyType_Ready(&PyStringArrType_Type) < 0) INITERROR; ''') a = module.newsubstr('abc') assert module.has_nb_add('a') is False assert module.has_nb_add(a) is False assert type(a).__name__ == 'string_' assert a == 'abc' assert 3 == module.get_len(a) b = module.newsubstr('') assert 0 == module.get_len(b) class TestBytes(BaseApiTest): def test_bytes_resize(self, space): py_str = new_empty_str(space, 10) ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') py_str.c_ob_sval[0] = 'a' py_str.c_ob_sval[1] = 'b' py_str.c_ob_sval[2] = 'c' ar[0] = rffi.cast(PyObject, py_str) _PyString_Resize(space, ar, 3) py_str = rffi.cast(PyBytesObject, ar[0]) assert py_str.c_ob_size == 3 assert py_str.c_ob_sval[1] == 'b' assert py_str.c_ob_sval[3] == '\x00' # the same for growing ar[0] = rffi.cast(PyObject, py_str) _PyString_Resize(space, ar, 10) py_str = rffi.cast(PyBytesObject, ar[0]) assert py_str.c_ob_size == 10 assert py_str.c_ob_sval[1] == 'b' assert py_str.c_ob_sval[10] == '\x00' decref(space, ar[0]) lltype.free(ar, flavor='raw') def test_string_buffer(self, space): py_str = new_empty_str(space, 10) c_buf = py_str.c_ob_type.c_tp_as_buffer assert c_buf py_obj = rffi.cast(PyObject, py_str) assert generic_cpy_call(space, c_buf.c_bf_getsegcount, py_obj, lltype.nullptr(Py_ssize_tP.TO)) == 1 ref = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') assert generic_cpy_call(space, c_buf.c_bf_getsegcount, py_obj, ref) == 1 assert ref[0] == 10 lltype.free(ref, flavor='raw') ref = lltype.malloc(rffi.VOIDPP.TO, 1, flavor='raw') assert generic_cpy_call(space, c_buf.c_bf_getreadbuffer, py_obj, 0, ref) == 10 lltype.free(ref, flavor='raw') decref(space, py_obj) def test_Concat(self, space): ref = make_ref(space, space.wrap('abc')) ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') ptr[0] = ref prev_refcnt = ref.c_ob_refcnt PyString_Concat(space, ptr, space.wrap('def')) assert ref.c_ob_refcnt == prev_refcnt - 1 assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' with pytest.raises(OperationError): PyString_Concat(space, ptr, space.w_None) assert not ptr[0] ptr[0] = lltype.nullptr(PyObject.TO) PyString_Concat(space, ptr, space.wrap('def')) # should not crash lltype.free(ptr, flavor='raw') def test_ConcatAndDel(self, space): ref1 = make_ref(space, space.wrap('abc')) ref2 = make_ref(space, space.wrap('def')) ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') ptr[0] = ref1 prev_refcnf = ref2.c_ob_refcnt PyString_ConcatAndDel(space, ptr, ref2) assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' assert ref2.c_ob_refcnt == prev_refcnf - 1 decref(space, ptr[0]) ptr[0] = lltype.nullptr(PyObject.TO) ref2 = make_ref(space, space.wrap('foo')) prev_refcnf = ref2.c_ob_refcnt PyString_ConcatAndDel(space, ptr, ref2) # should not crash assert ref2.c_ob_refcnt == prev_refcnf - 1 lltype.free(ptr, flavor='raw') def test_format(self, space): assert "1 2" == space.unwrap( PyString_Format(space, space.wrap('%s %d'), space.wrap((1, 2)))) def test_asbuffer(self, space): bufp = lltype.malloc(rffi.CCHARPP.TO, 1, flavor='raw') lenp = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') w_text = space.wrap("text") ref = make_ref(space, w_text) prev_refcnt = ref.c_ob_refcnt assert PyObject_AsCharBuffer(space, ref, bufp, lenp) == 0 assert ref.c_ob_refcnt == prev_refcnt assert lenp[0] == 4 assert rffi.charp2str(bufp[0]) == 'text' lltype.free(bufp, flavor='raw') lltype.free(lenp, flavor='raw') decref(space, ref) def test_intern(self, space): buf = rffi.str2charp("test") w_s1 = PyString_InternFromString(space, buf) w_s2 = PyString_InternFromString(space, buf) rffi.free_charp(buf) assert w_s1 is w_s2 def test_AsEncodedObject(self, space): ptr = space.wrap('abc') errors = rffi.str2charp("strict") encoding = rffi.str2charp("hex") res = PyString_AsEncodedObject(space, ptr, encoding, errors) assert space.unwrap(res) == "616263" res = PyString_AsEncodedObject(space, ptr, encoding, lltype.nullptr(rffi.CCHARP.TO)) assert space.unwrap(res) == "616263" rffi.free_charp(encoding) encoding = rffi.str2charp("unknown_encoding") with raises_w(space, LookupError): PyString_AsEncodedObject(space, ptr, encoding, errors) rffi.free_charp(encoding) rffi.free_charp(errors) NULL = lltype.nullptr(rffi.CCHARP.TO) res = PyString_AsEncodedObject(space, ptr, NULL, NULL) assert space.unwrap(res) == "abc" with raises_w(space, TypeError): PyString_AsEncodedObject(space, space.wrap(2), NULL, NULL) def test_AsDecodedObject(self, space): w_str = space.wrap('caf\xe9') encoding = rffi.str2charp("latin-1") w_res = PyString_AsDecodedObject(space, w_str, encoding, None) rffi.free_charp(encoding) assert space.unwrap(w_res) == u"caf\xe9" def test_eq(self, space): assert 1 == _PyString_Eq( space, space.wrap("hello"), space.wrap("hello")) assert 0 == _PyString_Eq( space, space.wrap("hello"), space.wrap("world")) def test_join(self, space): w_sep = space.wrap('<sep>') w_seq = space.wrap(['a', 'b']) w_joined = _PyString_Join(space, w_sep, w_seq) assert space.unwrap(w_joined) == 'a<sep>b'
{ "pile_set_name": "Github" }
#ifndef UV_HELPERS_H #define UV_HELPERS_H #define uv_fatal(e) { \ assert(0 != e); \ fprintf(stderr, "%s:%d - err:%s: %s\n", \ __FILE__, __LINE__, uv_err_name((e)), uv_strerror((e))); \ exit(1); } /** * Bind a listen socket * Abort if any failure. */ void uv_bind_listen_socket(uv_tcp_t* listen, const char* host, const int port, uv_loop_t* loop); #endif /* UV_HELPERS_H */
{ "pile_set_name": "Github" }
/*globals define, module, Symbol */ /*jshint -W056 */ (function (globals) { 'use strict'; var strings, messages, predicates, functions, assert, not, maybe, collections, slice, neginf, posinf, isArray, haveSymbols; strings = { v: 'value', n: 'number', s: 'string', b: 'boolean', o: 'object', t: 'type', a: 'array', al: 'array-like', i: 'iterable', d: 'date', f: 'function', l: 'length' }; messages = {}; predicates = {}; [ { n: 'equal', f: equal, s: 'v' }, { n: 'undefined', f: isUndefined, s: 'v' }, { n: 'null', f: isNull, s: 'v' }, { n: 'assigned', f: assigned, s: 'v' }, { n: 'primitive', f: primitive, s: 'v' }, { n: 'includes', f: includes, s: 'v' }, { n: 'zero', f: zero }, { n: 'infinity', f: infinity }, { n: 'number', f: number }, { n: 'integer', f: integer }, { n: 'even', f: even }, { n: 'odd', f: odd }, { n: 'greater', f: greater }, { n: 'less', f: less }, { n: 'between', f: between }, { n: 'greaterOrEqual', f: greaterOrEqual }, { n: 'lessOrEqual', f: lessOrEqual }, { n: 'inRange', f: inRange }, { n: 'positive', f: positive }, { n: 'negative', f: negative }, { n: 'string', f: string, s: 's' }, { n: 'emptyString', f: emptyString, s: 's' }, { n: 'nonEmptyString', f: nonEmptyString, s: 's' }, { n: 'contains', f: contains, s: 's' }, { n: 'match', f: match, s: 's' }, { n: 'boolean', f: boolean, s: 'b' }, { n: 'object', f: object, s: 'o' }, { n: 'emptyObject', f: emptyObject, s: 'o' }, { n: 'nonEmptyObject', f: nonEmptyObject, s: 'o' }, { n: 'instanceStrict', f: instanceStrict, s: 't' }, { n: 'instance', f: instance, s: 't' }, { n: 'like', f: like, s: 't' }, { n: 'array', f: array, s: 'a' }, { n: 'emptyArray', f: emptyArray, s: 'a' }, { n: 'nonEmptyArray', f: nonEmptyArray, s: 'a' }, { n: 'arrayLike', f: arrayLike, s: 'al' }, { n: 'iterable', f: iterable, s: 'i' }, { n: 'date', f: date, s: 'd' }, { n: 'function', f: isFunction, s: 'f' }, { n: 'hasLength', f: hasLength, s: 'l' }, ].map(function (data) { var n = data.n; messages[n] = 'Invalid ' + strings[data.s || 'n']; predicates[n] = data.f; }); functions = { apply: apply, map: map, all: all, any: any }; collections = [ 'array', 'arrayLike', 'iterable', 'object' ]; slice = Array.prototype.slice; neginf = Number.NEGATIVE_INFINITY; posinf = Number.POSITIVE_INFINITY; isArray = Array.isArray; haveSymbols = typeof Symbol === 'function'; functions = mixin(functions, predicates); assert = createModifiedPredicates(assertModifier, assertImpl); not = createModifiedPredicates(notModifier, notImpl); maybe = createModifiedPredicates(maybeModifier, maybeImpl); assert.not = createModifiedModifier(assertModifier, not); assert.maybe = createModifiedModifier(assertModifier, maybe); collections.forEach(createOfPredicates); createOfModifiers(assert, assertModifier); createOfModifiers(not, notModifier); collections.forEach(createMaybeOfModifiers); exportFunctions(mixin(functions, { assert: assert, not: not, maybe: maybe })); /** * Public function `equal`. * * Returns true if `lhs` and `rhs` are strictly equal, without coercion. * Returns false otherwise. */ function equal (lhs, rhs) { return lhs === rhs; } /** * Public function `undefined`. * * Returns true if `data` is undefined, false otherwise. */ function isUndefined (data) { return data === undefined; } /** * Public function `null`. * * Returns true if `data` is null, false otherwise. */ function isNull (data) { return data === null; } /** * Public function `assigned`. * * Returns true if `data` is not null or undefined, false otherwise. */ function assigned (data) { return data !== undefined && data !== null; } /** * Public function `primitive`. * * Returns true if `data` is a primitive type, false otherwise. */ function primitive (data) { var type; switch (data) { case null: case undefined: case false: case true: return true; } type = typeof data; return type === 'string' || type === 'number' || (haveSymbols && type === 'symbol'); } /** * Public function `zero`. * * Returns true if `data` is zero, false otherwise. */ function zero (data) { return data === 0; } /** * Public function `infinity`. * * Returns true if `data` is positive or negative infinity, false otherwise. */ function infinity (data) { return data === neginf || data === posinf; } /** * Public function `number`. * * Returns true if `data` is a number, false otherwise. */ function number (data) { return typeof data === 'number' && data > neginf && data < posinf; } /** * Public function `integer`. * * Returns true if `data` is an integer, false otherwise. */ function integer (data) { return typeof data === 'number' && data % 1 === 0; } /** * Public function `even`. * * Returns true if `data` is an even number, false otherwise. */ function even (data) { return typeof data === 'number' && data % 2 === 0; } /** * Public function `odd`. * * Returns true if `data` is an odd number, false otherwise. */ function odd (data) { return integer(data) && data % 2 !== 0; } /** * Public function `greater`. * * Returns true if `lhs` is a number greater than `rhs`, false otherwise. */ function greater (lhs, rhs) { return number(lhs) && lhs > rhs; } /** * Public function `less`. * * Returns true if `lhs` is a number less than `rhs`, false otherwise. */ function less (lhs, rhs) { return number(lhs) && lhs < rhs; } /** * Public function `between`. * * Returns true if `data` is a number between `x` and `y`, false otherwise. */ function between (data, x, y) { if (x < y) { return greater(data, x) && data < y; } return less(data, x) && data > y; } /** * Public function `greaterOrEqual`. * * Returns true if `lhs` is a number greater than or equal to `rhs`, false * otherwise. */ function greaterOrEqual (lhs, rhs) { return number(lhs) && lhs >= rhs; } /** * Public function `lessOrEqual`. * * Returns true if `lhs` is a number less than or equal to `rhs`, false * otherwise. */ function lessOrEqual (lhs, rhs) { return number(lhs) && lhs <= rhs; } /** * Public function `inRange`. * * Returns true if `data` is a number in the range `x..y`, false otherwise. */ function inRange (data, x, y) { if (x < y) { return greaterOrEqual(data, x) && data <= y; } return lessOrEqual(data, x) && data >= y; } /** * Public function `positive`. * * Returns true if `data` is a positive number, false otherwise. */ function positive (data) { return greater(data, 0); } /** * Public function `negative`. * * Returns true if `data` is a negative number, false otherwise. */ function negative (data) { return less(data, 0); } /** * Public function `string`. * * Returns true if `data` is a string, false otherwise. */ function string (data) { return typeof data === 'string'; } /** * Public function `emptyString`. * * Returns true if `data` is the empty string, false otherwise. */ function emptyString (data) { return data === ''; } /** * Public function `nonEmptyString`. * * Returns true if `data` is a non-empty string, false otherwise. */ function nonEmptyString (data) { return string(data) && data !== ''; } /** * Public function `contains`. * * Returns true if `data` is a string that contains `substring`, false * otherwise. */ function contains (data, substring) { return string(data) && data.indexOf(substring) !== -1; } /** * Public function `match`. * * Returns true if `data` is a string that matches `regex`, false otherwise. */ function match (data, regex) { return string(data) && !! data.match(regex); } /** * Public function `boolean`. * * Returns true if `data` is a boolean value, false otherwise. */ function boolean (data) { return data === false || data === true; } /** * Public function `object`. * * Returns true if `data` is a plain-old JS object, false otherwise. */ function object (data) { return Object.prototype.toString.call(data) === '[object Object]'; } /** * Public function `emptyObject`. * * Returns true if `data` is an empty object, false otherwise. */ function emptyObject (data) { return object(data) && Object.keys(data).length === 0; } /** * Public function `nonEmptyObject`. * * Returns true if `data` is a non-empty object, false otherwise. */ function nonEmptyObject (data) { return object(data) && Object.keys(data).length > 0; } /** * Public function `instanceStrict`. * * Returns true if `data` is an instance of `prototype`, false otherwise. */ function instanceStrict (data, prototype) { try { return data instanceof prototype; } catch (error) { return false; } } /** * Public function `instance`. * * Returns true if `data` is an instance of `prototype`, false otherwise. * Falls back to testing constructor.name and Object.prototype.toString * if the initial instanceof test fails. */ function instance (data, prototype) { try { return instanceStrict(data, prototype) || data.constructor.name === prototype.name || Object.prototype.toString.call(data) === '[object ' + prototype.name + ']'; } catch (error) { return false; } } /** * Public function `like`. * * Tests whether `data` 'quacks like a duck'. Returns true if `data` has all * of the properties of `archetype` (the 'duck'), false otherwise. */ function like (data, archetype) { var name; for (name in archetype) { if (archetype.hasOwnProperty(name)) { if (data.hasOwnProperty(name) === false || typeof data[name] !== typeof archetype[name]) { return false; } if (object(data[name]) && like(data[name], archetype[name]) === false) { return false; } } } return true; } /** * Public function `array`. * * Returns true if `data` is an array, false otherwise. */ function array (data) { return isArray(data); } /** * Public function `emptyArray`. * * Returns true if `data` is an empty array, false otherwise. */ function emptyArray (data) { return array(data) && data.length === 0; } /** * Public function `nonEmptyArray`. * * Returns true if `data` is a non-empty array, false otherwise. */ function nonEmptyArray (data) { return array(data) && greater(data.length, 0); } /** * Public function `arrayLike`. * * Returns true if `data` is an array-like object, false otherwise. */ function arrayLike (data) { return assigned(data) && greaterOrEqual(data.length, 0); } /** * Public function `iterable`. * * Returns true if `data` is an iterable, false otherwise. */ function iterable (data) { if (! haveSymbols) { // Fall back to `arrayLike` predicate in pre-ES6 environments. return arrayLike(data); } return assigned(data) && isFunction(data[Symbol.iterator]); } /** * Public function `includes`. * * Returns true if `data` contains `value`, false otherwise. */ function includes (data, value) { var iterator, iteration, keys, length, i; if (! assigned(data)) { return false; } if (haveSymbols && data[Symbol.iterator] && isFunction(data.values)) { iterator = data.values(); do { iteration = iterator.next(); if (iteration.value === value) { return true; } } while (! iteration.done); return false; } keys = Object.keys(data); length = keys.length; for (i = 0; i < length; ++i) { if (data[keys[i]] === value) { return true; } } return false; } /** * Public function `hasLength`. * * Returns true if `data` has a length property that equals `length`, false * otherwise. */ function hasLength (data, length) { return assigned(data) && data.length === length; } /** * Public function `date`. * * Returns true if `data` is a valid date, false otherwise. */ function date (data) { return instanceStrict(data, Date) && integer(data.getTime()); } /** * Public function `function`. * * Returns true if `data` is a function, false otherwise. */ function isFunction (data) { return typeof data === 'function'; } /** * Public function `apply`. * * Maps each value from the `data` to the corresponding predicate and returns * the result array. If the same function is to be applied across all of the * data, a single predicate function may be passed in. * */ function apply (data, predicates) { assert.array(data); if (isFunction(predicates)) { return data.map(function (value) { return predicates(value); }); } assert.array(predicates); assert.hasLength(data, predicates.length); return data.map(function (value, index) { return predicates[index](value); }); } /** * Public function `map`. * * Maps each value from the `data` to the corresponding predicate and returns * the result object. Supports nested objects. If the `data` is not nested and * the same function is to be applied across all of it, a single predicate * function may be passed in. * */ function map (data, predicates) { assert.object(data); if (isFunction(predicates)) { return mapSimple(data, predicates); } assert.object(predicates); return mapComplex(data, predicates); } function mapSimple (data, predicate) { var result = {}; Object.keys(data).forEach(function (key) { result[key] = predicate(data[key]); }); return result; } function mapComplex (data, predicates) { var result = {}; Object.keys(predicates).forEach(function (key) { var predicate = predicates[key]; if (isFunction(predicate)) { if (not.assigned(data)) { result[key] = !!predicate.m; } else { result[key] = predicate(data[key]); } } else if (object(predicate)) { result[key] = mapComplex(data[key], predicate); } }); return result; } /** * Public function `all` * * Check that all boolean values are true * in an array (returned from `apply`) * or object (returned from `map`). * */ function all (data) { if (array(data)) { return testArray(data, false); } assert.object(data); return testObject(data, false); } function testArray (data, result) { var i; for (i = 0; i < data.length; i += 1) { if (data[i] === result) { return result; } } return !result; } function testObject (data, result) { var key, value; for (key in data) { if (data.hasOwnProperty(key)) { value = data[key]; if (object(value) && testObject(value, result) === result) { return result; } if (value === result) { return result; } } } return !result; } /** * Public function `any` * * Check that at least one boolean value is true * in an array (returned from `apply`) * or object (returned from `map`). * */ function any (data) { if (array(data)) { return testArray(data, true); } assert.object(data); return testObject(data, true); } function mixin (target, source) { Object.keys(source).forEach(function (key) { target[key] = source[key]; }); return target; } /** * Public modifier `assert`. * * Throws if `predicate` returns false. */ function assertModifier (predicate, defaultMessage) { return function () { return assertPredicate(predicate, arguments, defaultMessage); }; } function assertPredicate (predicate, args, defaultMessage) { var argCount = predicate.l || predicate.length; var message = args[argCount]; var ErrorType = args[argCount + 1]; assertImpl( predicate.apply(null, args), nonEmptyString(message) ? message : defaultMessage, isFunction(ErrorType) ? ErrorType : TypeError ); return args[0]; } function assertImpl (value, message, ErrorType) { if (value) { return value; } throw new (ErrorType || Error)(message || 'Assertion failed'); } /** * Public modifier `not`. * * Negates `predicate`. */ function notModifier (predicate) { var modifiedPredicate = function () { return notImpl(predicate.apply(null, arguments)); }; modifiedPredicate.l = predicate.length; return modifiedPredicate; } function notImpl (value) { return !value; } /** * Public modifier `maybe`. * * Returns true if predicate argument is null or undefined, * otherwise propagates the return value from `predicate`. */ function maybeModifier (predicate) { var modifiedPredicate = function () { if (not.assigned(arguments[0])) { return true; } return predicate.apply(null, arguments); }; modifiedPredicate.l = predicate.length; // Hackishly indicate that this is a maybe.xxx predicate. // Without this flag, the alternative would be to iterate // through the maybe predicates or use indexOf to check, // which would be time-consuming. modifiedPredicate.m = true; return modifiedPredicate; } function maybeImpl (value) { if (assigned(value) === false) { return true; } return value; } /** * Public modifier `of`. * * Applies the chained predicate to members of the collection. */ function ofModifier (target, type, predicate) { var modifiedPredicate = function () { var collection, args; collection = arguments[0]; if (target === 'maybe' && not.assigned(collection)) { return true; } if (!type(collection)) { return false; } collection = coerceCollection(type, collection); args = slice.call(arguments, 1); try { collection.forEach(function (item) { if ( (target !== 'maybe' || assigned(item)) && !predicate.apply(null, [ item ].concat(args)) ) { // TODO: Replace with for...of when ES6 is required. throw 0; } }); } catch (ignore) { return false; } return true; }; modifiedPredicate.l = predicate.length; return modifiedPredicate; } function coerceCollection (type, collection) { switch (type) { case arrayLike: return slice.call(collection); case object: return Object.keys(collection).map(function (key) { return collection[key]; }); default: return collection; } } function createModifiedPredicates (modifier, object) { return createModifiedFunctions([ modifier, predicates, object ]); } function createModifiedFunctions (args) { var modifier, object, functions, result; modifier = args.shift(); object = args.pop(); functions = args.pop(); result = object || {}; Object.keys(functions).forEach(function (key) { Object.defineProperty(result, key, { configurable: false, enumerable: true, writable: false, value: modifier.apply(null, args.concat(functions[key], messages[key])) }); }); return result; } function createModifiedModifier (modifier, modified) { return createModifiedFunctions([ modifier, modified, null ]); } function createOfPredicates (key) { predicates[key].of = createModifiedFunctions( [ ofModifier.bind(null, null), predicates[key], predicates, null ] ); } function createOfModifiers (base, modifier) { collections.forEach(function (key) { base[key].of = createModifiedModifier(modifier, predicates[key].of); }); } function createMaybeOfModifiers (key) { maybe[key].of = createModifiedFunctions( [ ofModifier.bind(null, 'maybe'), predicates[key], predicates, null ] ); assert.maybe[key].of = createModifiedModifier(assertModifier, maybe[key].of); assert.not[key].of = createModifiedModifier(assertModifier, not[key].of); } function exportFunctions (functions) { if (typeof define === 'function' && define.amd) { define(function () { return functions; }); } else if (typeof module !== 'undefined' && module !== null && module.exports) { module.exports = functions; } else { globals.check = functions; } } }(this));
{ "pile_set_name": "Github" }
*** Variables *** ${VAR_1} variable 1 ${VAR_2} variable 2 ${VAR_3} variable 3
{ "pile_set_name": "Github" }
<?php /** * Deprecated. * * As of BuddyPress Docs 1.2, all functionality formerly included in the BP_Docs_Integration class * is handled by the BP_Docs_Component class, located in includes/component.php * * @package BuddyPress_Docs */ _deprecated_file( wp_basename(__FILE__), '1.2', BP_DOCS_INCLUDES_PATH_ABS . 'component.php' );
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!-- Copyright (c) 2014-present, Facebook, Inc. All rights reserved. You are hereby granted a non-exclusive, worldwide, royalty-free license to use, copy, modify, and distribute this software in source code or binary form for use in connection with the web services and APIs provided by Facebook. As with any software that integrates with the Facebook platform, your use of this software is subject to the Facebook Developer Principles and Policies [http://developers.facebook.com/policy/]. This copyright notice shall be included in all copies or substantial portions of the software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. --> <resources> <string name="com_facebook_like_button_not_liked">إعجاب</string> <string name="com_facebook_like_button_liked">أعجبني</string> <string name="com_facebook_loginview_log_out_button">تسجيل الخروج</string> <string name="com_facebook_loginview_log_in_button">تسجيل الدخول</string> <string name="com_facebook_loginview_log_in_button_long">تسجيل الدخول بحساب فيسبوك</string> <string name="com_facebook_loginview_logged_in_as">تم تسجيل الدخول باسم: %1$s</string> <string name="com_facebook_loginview_logged_in_using_facebook">تم تسجيل الدخول بحساب فيسبوك</string> <string name="com_facebook_loginview_log_out_action">تسجيل الخروج</string> <string name="com_facebook_loginview_cancel_action">إلغاء</string> <string name="com_facebook_loading">جارٍ التحميل...</string> <string name="com_facebook_internet_permission_error_title">خطأ AndroidManifest</string> <string name="com_facebook_internet_permission_error_message">يتطلب تسجيل الدخول إلى WebView إذن الاتصال بالإنترنت</string> <string name="com_facebook_tooltip_default">جديد! أنت المتحكم - اختر المعلومات التي تريد مشاركتها مع التطبيقات.</string> <string name="com_facebook_image_download_unknown_error">حدث خطأ غير متوقع أثناء تنزيل صورة.</string> <string name="com_facebook_share_button_text">مشاركة</string> <string name="com_facebook_send_button_text">إرسال</string> </resources>
{ "pile_set_name": "Github" }
Images, layout descriptions, binary blobs and string dictionaries can be included in your application as resource files. Various Android APIs are designed to operate on the resource IDs instead of dealing with images, strings or binary blobs directly. For example, a sample Android app that contains a user interface layout (main.xml), an internationalization string table (strings.xml) and some icons (drawable-XXX/icon.png) would keep its resources in the "Resources" directory of the application: Resources/ drawable-hdpi/ icon.png drawable-ldpi/ icon.png drawable-mdpi/ icon.png layout/ main.xml values/ strings.xml In order to get the build system to recognize Android resources, set the build action to "AndroidResource". The native Android APIs do not operate directly with filenames, but instead operate on resource IDs. When you compile an Android application that uses resources, the build system will package the resources for distribution and generate a class called "Resource" that contains the tokens for each one of the resources included. For example, for the above Resources layout, this is what the Resource class would expose: public class Resource { public class drawable { public const int icon = 0x123; } public class layout { public const int main = 0x456; } public class strings { public const int first_string = 0xabc; public const int second_string = 0xbcd; } } You would then use R.drawable.icon to reference the drawable/icon.png file, or Resource.layout.main to reference the layout/main.xml file, or Resource.strings.first_string to reference the first string in the dictionary file values/strings.xml.
{ "pile_set_name": "Github" }
package com.enonic.xp.attachment; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; public class AttachmentTest { @Test public void getNameWithoutExtension() { assertEquals( "MyImage", Attachment.create(). mimeType( "image/jpg" ). name( "MyImage.jpg" ). build().getNameWithoutExtension() ); assertEquals( "MyImage.something", Attachment.create(). mimeType( "image/gif" ). name( "MyImage.something.gif" ). build().getNameWithoutExtension() ); } @Test public void getBinaryReference() { assertEquals( "MyImage.jpg", Attachment.create(). mimeType( "image/jpg" ). name( "MyImage.jpg" ). build().getBinaryReference().toString() ); assertEquals( "MyImage.something.gif", Attachment.create(). mimeType( "image/gif" ). name( "MyImage.something.gif" ). build().getBinaryReference().toString() ); } @Test public void getExtension() { assertEquals( "jpg", Attachment.create(). mimeType( "image/jpg" ). name( "MyImage.jpg" ). build().getExtension() ); assertEquals( "gif", Attachment.create(). mimeType( "image/gif" ). name( "MyImage.gif" ). build().getExtension() ); assertEquals( "jpeg", Attachment.create(). mimeType( "image/jpeg" ). name( "MyImage.jpeg" ). build().getExtension() ); assertEquals( "png", Attachment.create(). mimeType( "image/png" ). name( "MyImage.png" ). build().getExtension() ); assertEquals( "jpg", Attachment.create(). mimeType( "image/jpg" ). name( "MyImage.something.jpg" ). build().getExtension() ); } @Test public void serializeAttachment() { Attachment a1 = Attachment.create(). mimeType( "image/jpg" ). size( 1024 ). label( "My Image 1" ). name( "MyImage.jpg" ). build(); assertEquals( "Attachment{name=MyImage.jpg, mimeType=image/jpg, label=My Image 1, size=1024}", a1.toString() ); } @Test public void compareAttachments() { Attachment a1 = Attachment.create(). mimeType( "image/jpg" ). size( 1024 ). label( "My Image 1" ). name( "MyImage.jpg" ). build(); Attachment.Builder a2Builder = Attachment.create( a1 ); assertTrue( a1.equals( a1 ) ); assertTrue( a1.equals( a2Builder.build() ) ); assertFalse( a1.equals( new Object() ) ); assertFalse( a1.equals( a2Builder.size( 2048 ).build() ) ); } }
{ "pile_set_name": "Github" }
-- boundary1.test -- -- db eval { -- SELECT a FROM t1 WHERE rowid >= 16383 ORDER BY a -- } SELECT a FROM t1 WHERE rowid >= 16383 ORDER BY a
{ "pile_set_name": "Github" }
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import PropTypes from 'prop-types'; import React from 'react'; import {FormattedMessage, injectIntl} from 'react-intl'; import {Groups} from 'mattermost-redux/constants'; import ConfirmModal from 'components/confirm_modal'; import AddGroupsToTeamModal from 'components/add_groups_to_team_modal'; import {ModalIdentifiers} from 'utils/constants'; import {intlShape} from 'utils/react_intl'; import ListModal, {DEFAULT_NUM_PER_PAGE} from 'components/list_modal.jsx'; import DropdownIcon from 'components/widgets/icons/fa_dropdown_icon'; import groupsAvatar from 'images/groups-avatar.png'; import MenuWrapper from 'components/widgets/menu/menu_wrapper'; import Menu from 'components/widgets/menu/menu'; import * as Utils from 'utils/utils.jsx'; class TeamGroupsManageModal extends React.PureComponent { static propTypes = { intl: intlShape.isRequired, team: PropTypes.object.isRequired, actions: PropTypes.shape({ getGroupsAssociatedToTeam: PropTypes.func.isRequired, unlinkGroupSyncable: PropTypes.func.isRequired, patchGroupSyncable: PropTypes.func.isRequired, getMyTeamMembers: PropTypes.func.isRequired, closeModal: PropTypes.func.isRequired, openModal: PropTypes.func.isRequired, }).isRequired, }; state = { showConfirmModal: false, item: {member_count: 0}, listModal: null, }; loadItems = async (pageNumber, searchTerm) => { const {data} = await this.props.actions.getGroupsAssociatedToTeam(this.props.team.id, searchTerm, pageNumber, DEFAULT_NUM_PER_PAGE, true); return { items: data.groups, totalCount: data.totalGroupCount, }; }; handleDeleteCanceled = () => { this.setState({showConfirmModal: false}); }; handleDeleteConfirmed = () => { this.setState({showConfirmModal: false}); const {item, listModal} = this.state; this.props.actions.unlinkGroupSyncable(item.id, this.props.team.id, Groups.SYNCABLE_TYPE_TEAM).then(async () => { listModal.setState({loading: true}); const {items, totalCount} = await listModal.props.loadItems(listModal.setState.page, listModal.state.searchTerm); listModal.setState({loading: false, items, totalCount}); }); }; onClickRemoveGroup = (item, listModal) => { this.setState({showConfirmModal: true, item, listModal}); }; onClickConfirmRemoveGroup = (item, listModal) => this.props.actions.unlinkGroupSyncable(item.id, this.props.team.id, Groups.SYNCABLE_TYPE_TEAM).then(async () => { listModal.setState({loading: true}); const {items, totalCount} = await listModal.props.loadItems(listModal.setState.page, listModal.state.searchTerm); listModal.setState({loading: false, items, totalCount}); }); onHide = () => { this.props.actions.closeModal(ModalIdentifiers.MANAGE_TEAM_GROUPS); }; titleButtonOnClick = () => { this.onHide(); this.props.actions.openModal({modalId: ModalIdentifiers.ADD_GROUPS_TO_TEAM, dialogType: AddGroupsToTeamModal}); }; setTeamMemberStatus = async (item, listModal, isTeamAdmin) => { this.props.actions.patchGroupSyncable(item.id, this.props.team.id, Groups.SYNCABLE_TYPE_TEAM, {scheme_admin: isTeamAdmin}).then(async () => { listModal.setState({loading: true}); const {items, totalCount} = await listModal.props.loadItems(listModal.setState.page, listModal.state.searchTerm); this.props.actions.getMyTeamMembers(); listModal.setState({loading: false, items, totalCount}); }); }; renderRow = (item, listModal) => { let title; if (item.scheme_admin) { title = Utils.localizeMessage('team_members_dropdown.teamAdmins', 'Team Admins'); } else { title = Utils.localizeMessage('team_members_dropdown.teamMembers', 'Team Members'); } return ( <div key={item.id} className='more-modal__row' > <img className='more-modal__image' src={groupsAvatar} alt='group picture' width='32' height='32' /> <div className='more-modal__details'> <div className='more-modal__name'>{item.display_name} {'-'} {'&nbsp;'} <span className='more-modal__name_count'> <FormattedMessage id='numMembers' defaultMessage='{num, number} {num, plural, one {member} other {members}}' values={{ num: item.member_count, }} /> </span> </div> </div> <div className='more-modal__actions'> <MenuWrapper> <button id={`teamGroupsDropdown_${item.display_name}`} className='dropdown-toggle theme color--link style--none' type='button' aria-expanded='true' > <span>{title} </span> <DropdownIcon/> </button> <Menu openLeft={true} ariaLabel={Utils.localizeMessage('team_members_dropdown.menuAriaLabel', 'Change the role of a team member')} > <Menu.ItemAction show={!item.scheme_admin} onClick={() => this.setTeamMemberStatus(item, listModal, true)} text={Utils.localizeMessage('team_members_dropdown.makeTeamAdmins', 'Make Team Admins')} /> <Menu.ItemAction show={Boolean(item.scheme_admin)} onClick={() => this.setTeamMemberStatus(item, listModal, false)} text={Utils.localizeMessage('team_members_dropdown.makeTeamMembers', 'Make Team Members')} /> <Menu.ItemAction onClick={() => this.onClickRemoveGroup(item, listModal)} text={Utils.localizeMessage('group_list_modal.removeGroupButton', 'Remove Group')} /> </Menu> </MenuWrapper> </div> </div> ); }; render() { const {formatMessage} = this.props.intl; const memberCount = this.state.item.member_count; return ( <> <ListModal show={!this.state.showConfirmModal} titleText={formatMessage({id: 'groups', defaultMessage: '{team} Groups'}, {team: this.props.team.display_name})} searchPlaceholderText={formatMessage({id: 'manage_team_groups_modal.search_placeholder', defaultMessage: 'Search groups'})} renderRow={this.renderRow} loadItems={this.loadItems} onHide={this.onHide} titleBarButtonText={formatMessage({id: 'group_list_modal.addGroupButton', defaultMessage: 'Add Groups'})} titleBarButtonOnClick={this.titleButtonOnClick} /> <ConfirmModal show={this.state.showConfirmModal} title={formatMessage({id: 'remove_group_confirm_title', defaultMessage: 'Remove Group and {memberCount, number} {memberCount, plural, one {Member} other {Members}}'}, {memberCount})} message={formatMessage({id: 'remove_group_confirm_message', defaultMessage: '{memberCount, number} {memberCount, plural, one {member} other {members}} associated to this group will be removed from the team. Are you sure you wish to remove this group and {memberCount} {memberCount, plural, one {member} other {members}}?'}, {memberCount})} confirmButtonText={formatMessage({id: 'remove_group_confirm_button', defaultMessage: 'Yes, Remove Group and {memberCount, plural, one {Member} other {Members}}'}, {memberCount})} onConfirm={this.handleDeleteConfirmed} onCancel={this.handleDeleteCanceled} /> </> ); } } export default injectIntl(TeamGroupsManageModal);
{ "pile_set_name": "Github" }
package binary import "testing" func TestInt8(t *testing.T) { b := make([]byte, 1) BigEndian.PutInt8(b, 100) i := BigEndian.Int8(b) if i != 100 { t.FailNow() } } func TestInt16(t *testing.T) { b := make([]byte, 2) BigEndian.PutInt16(b, 100) i := BigEndian.Int16(b) if i != 100 { t.FailNow() } } func TestInt32(t *testing.T) { b := make([]byte, 4) BigEndian.PutInt32(b, 100) i := BigEndian.Int32(b) if i != 100 { t.FailNow() } }
{ "pile_set_name": "Github" }
using System; namespace HandlebarsDotNet.Compiler.Lexer { internal class LiteralExpressionToken : ExpressionToken { private readonly string _value; private readonly string _delimiter; public LiteralExpressionToken(string value, string delimiter = null) { _value = value; _delimiter = delimiter; } public bool IsDelimitedLiteral { get { return _delimiter != null; } } public string Delimiter { get { return _delimiter; } } public override TokenType Type { get { return TokenType.Literal; } } public override string Value { get { return _value; } } } }
{ "pile_set_name": "Github" }
<testcase> <info> <keywords> curl_easy_escape </keywords> </info> # Server-side # Client-side <client> <server> none </server> <tool> lib543 </tool> <name> curl_easy_escape </name> <command> - </command> </client> # Verify data after the test has been "shot" # # There's no MTDM in the protocol here since this code doesn't ask for the # time/date of the file <verify> <stdout> %9C%26K%3DI%04%A1%01%E0%D8%7C%20%B7%EFS%29%FA%1DW%E1 </stdout> </verify> </testcase>
{ "pile_set_name": "Github" }
/**************************************************************************** ** ** Copyright (C) 2015 The Qt Company Ltd. ** Contact: http://www.qt.io/licensing/ ** ** This file is part of the QtAndroidExtras module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL21$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see http://www.qt.io/terms-conditions. For further ** information use the contact form at http://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 or version 3 as published by the Free ** Software Foundation and appearing in the file LICENSE.LGPLv21 and ** LICENSE.LGPLv3 included in the packaging of this file. Please review the ** following information to ensure the GNU Lesser General Public License ** requirements will be met: https://www.gnu.org/licenses/lgpl.html and ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** As a special exception, The Qt Company gives you certain additional ** rights. These rights are described in The Qt Company LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ package org.gdpurjyfs.wellchat; import android.app.Notification; import android.app.NotificationManager; import android.content.Context; import android.app.Activity; import android.view.Window; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.RelativeLayout; import android.view.ViewTreeObserver; import android.graphics.Rect; import android.view.ViewGroup; import android.os.Bundle; //import org.gdpurjyfs.sparrow; public class WellChatActivity extends org.qtproject.qt5.android.bindings.QtActivity { // @Override // public void onCreate (Bundle savedInstanceState){ // System.out.println("这里竟然不能有其他复杂的函数操作,会闪退的。"); // super.onCreate(savedInstanceState); // org.gdpurjyfs.sparrow.QtBridgingAndroid.Init(this); // } public WellChatActivity() { org.gdpurjyfs.sparrow.QtBridgingAndroid.Init(this); } }
{ "pile_set_name": "Github" }
// +build windows // Copyright 2013, Örjan Persson. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package logging import ( "bytes" "io" "log" "syscall" ) var ( kernel32DLL = syscall.NewLazyDLL("kernel32.dll") setConsoleTextAttributeProc = kernel32DLL.NewProc("SetConsoleTextAttribute") ) // Character attributes // Note: // -- The attributes are combined to produce various colors (e.g., Blue + Green will create Cyan). // Clearing all foreground or background colors results in black; setting all creates white. // See https://msdn.microsoft.com/en-us/library/windows/desktop/ms682088(v=vs.85).aspx#_win32_character_attributes. const ( fgBlack = 0x0000 fgBlue = 0x0001 fgGreen = 0x0002 fgCyan = 0x0003 fgRed = 0x0004 fgMagenta = 0x0005 fgYellow = 0x0006 fgWhite = 0x0007 fgIntensity = 0x0008 fgMask = 0x000F ) var ( colors = []uint16{ INFO: fgWhite, CRITICAL: fgMagenta, ERROR: fgRed, WARNING: fgYellow, NOTICE: fgGreen, DEBUG: fgCyan, } boldcolors = []uint16{ INFO: fgWhite | fgIntensity, CRITICAL: fgMagenta | fgIntensity, ERROR: fgRed | fgIntensity, WARNING: fgYellow | fgIntensity, NOTICE: fgGreen | fgIntensity, DEBUG: fgCyan | fgIntensity, } ) type file interface { Fd() uintptr } // LogBackend utilizes the standard log module. type LogBackend struct { Logger *log.Logger Color bool // f is set to a non-nil value if the underlying writer which logs writes to // implements the file interface. This makes us able to colorise the output. f file } // NewLogBackend creates a new LogBackend. func NewLogBackend(out io.Writer, prefix string, flag int) *LogBackend { b := &LogBackend{Logger: log.New(out, prefix, flag)} // Unfortunately, the API used only takes an io.Writer where the Windows API // need the actual fd to change colors. if f, ok := out.(file); ok { b.f = f } return b } func (b *LogBackend) Log(level Level, calldepth int, rec *Record) error { if b.Color && b.f != nil { buf := &bytes.Buffer{} setConsoleTextAttribute(b.f, colors[level]) buf.Write([]byte(rec.Formatted(calldepth + 1))) err := b.Logger.Output(calldepth+2, buf.String()) setConsoleTextAttribute(b.f, fgWhite) return err } return b.Logger.Output(calldepth+2, rec.Formatted(calldepth+1)) } // setConsoleTextAttribute sets the attributes of characters written to the // console screen buffer by the WriteFile or WriteConsole function. // See http://msdn.microsoft.com/en-us/library/windows/desktop/ms686047(v=vs.85).aspx. func setConsoleTextAttribute(f file, attribute uint16) bool { ok, _, _ := setConsoleTextAttributeProc.Call(f.Fd(), uintptr(attribute), 0) return ok != 0 } func doFmtVerbLevelColor(layout string, level Level, output io.Writer) { // TODO not supported on Windows since the io.Writer here is actually a // bytes.Buffer. }
{ "pile_set_name": "Github" }
module.exports = require('./placeholder');
{ "pile_set_name": "Github" }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.Contracts; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using LanguageExt; using LanguageExt.ClassInstances; using LanguageExt.TypeClasses; using static LanguageExt.Prelude; using static LanguageExt.TypeClass; namespace LanguageExt { /// <summary> /// Cons sequence /// Represents a sequence of values in a similar way to IEnumerable, but without the /// issues of multiple evaluation for key LINQ operators like Skip, Count, etc. /// </summary> /// <typeparam name="A">Type of the values in the sequence</typeparam> public struct Seq<A> : #pragma warning disable CS0618 // Remove ISeq complaint ISeq<A>, #pragma warning restore CS0618 IComparable<Seq<A>>, IEquatable<Seq<A>>, IComparable { /// <summary> /// Empty sequence /// </summary> public static readonly Seq<A> Empty = new Seq<A>(SeqEmptyInternal<A>.Default); /// <summary> /// Internal representation of the sequence (SeqStrict|SeqLazy|SeqEmptyInternal) /// </summary> readonly ISeqInternal<A> value; /// <summary> /// Cached hash code /// </summary> int hash; /// <summary> /// Internal value accessor - protects against `default` /// </summary> internal ISeqInternal<A> Value { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => value ?? SeqEmptyInternal<A>.Default; } /// <summary> /// Constructor from lazy sequence /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq(IEnumerable<A> ma) : this(new SeqLazy<A>(ma)) { } /// <summary> /// Constructor /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Seq(ISeqInternal<A> value) { this.value = value; this.hash = 0; } /// <summary> /// Reference version for use in pattern-matching /// </summary> [Pure] public SeqCase<A> Case => IsEmpty ? EmptyCase<A>.Default : Tail.IsEmpty ? HeadCase<A>.New(Head) : HeadTailCase<A>.New(Head, Tail); public void Deconstruct(out A head, out Seq<A> tail) { head = Head; tail = Tail; } /// <summary> /// Head lens /// </summary> public static Lens<Seq<A>, A> head => Lens<Seq<A>, A>.New( Get: la => la.IsEmpty ? throw new IndexOutOfRangeException() : la[0], Set: a => la => la.IsEmpty ? throw new IndexOutOfRangeException() : a.Cons(la.Tail) ); /// <summary> /// Head or none lens /// </summary> public static Lens<Seq<A>, Option<A>> headOrNone => Lens<Seq<A>, Option<A>>.New( Get: la => la.HeadOrNone(), Set: a => la => la.IsEmpty || a.IsNone ? la : a.Value.Cons(la.Tail) ); /// <summary> /// Tail lens /// </summary> public static Lens<Seq<A>, Seq<A>> tail => Lens<Seq<A>, Seq<A>>.New( Get: la => la.IsEmpty ? Seq<A>.Empty : la.Tail, Set: a => la => la.IsEmpty ? a : la.Head.Cons(a) ); /// <summary> /// Last lens /// </summary> public static Lens<Seq<A>, A> last => Lens<Seq<A>, A>.New( Get: la => la.IsEmpty ? throw new IndexOutOfRangeException() : la.Last, Set: a => la => la.IsEmpty ? throw new IndexOutOfRangeException() : la.Take(la.Count - 1).Add(a) ); /// <summary> /// Last or none lens /// </summary> public static Lens<Seq<A>, Option<A>> lastOrNone => Lens<Seq<A>, Option<A>>.New( Get: la => la.IsEmpty ? None : Some(la.Last), Set: a => la => la.IsEmpty || a.IsNone ? la : la.Take(la.Count - 1).Add(a.Value) ); /// <summary> /// Lens map /// </summary> [Pure] public static Lens<Seq<A>, Seq<B>> map<B>(Lens<A, B> lens) => Lens<Seq<A>, Seq<B>>.New( Get: la => la.Map(lens.Get), Set: lb => la => la.Zip(lb).Map(ab => lens.Set(ab.Item2, ab.Item1)) ); /// <summary> /// Indexer /// </summary> public A this[int index] { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => Value[index]; } /// <summary> /// Add an item to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the item /// can be appended /// </remarks> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Add(A value) => new Seq<A>(Value.Add(value)); /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(IEnumerable<A> items) => items switch { Lst<A> lst => Concat(lst), Set<A> set => Concat(set), HashSet<A> hset => Concat(hset), Arr<A> arr => Concat(arr), Stck<A> stck => Concat(stck), IReadOnlyList<A> rolist => Concat(rolist), _ => new Seq<A>(EnumerableOptimal.ConcatFast(this, items)) }; /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(Lst<A> items) { if (items.Count == 0) { return this; } var arr = items.Value.ToArray(); return Concat(Seq.FromArray(arr)); } /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(Set<A> items) { if (items.Count == 0) { return this; } var arr = items.Value.ToArray(); return Concat(Seq.FromArray(arr)); } /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(HashSet<A> items) { if (items.Count == 0) { return this; } var arr = items.ToArray(); return Concat(Seq.FromArray(arr)); } /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(Arr<A> items) { if (items.Count == 0) { return this; } return Concat(Seq.FromArray(items.Value)); } /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(Stck<A> items) { if (items.Count == 0) { return this; } var arr = items.ToArray(); return Concat(Seq.FromArray(arr)); } /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(IReadOnlyCollection<A> items) { if ((items?.Count ?? 0) == 0) { return this; } var arr = items.ToArray(); return Concat(Seq.FromArray(arr)); } /// <summary> /// Add a range of items to the end of the sequence /// </summary> /// <remarks> /// Forces evaluation of the entire lazy sequence so the items /// can be appended. /// </remarks> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Concat(Seq<A> rhs) { switch(Value.Type) { case SeqType.Empty: // lhs is empty, so just return rhs return rhs; case SeqType.Lazy: switch (rhs.Value.Type) { // lhs lazy, rhs empty // return lhs case SeqType.Empty: return this; // lhs lazy, rhs lazy // return SeqConcat case SeqType.Lazy: return new Seq<A>(new SeqConcat<A>(Seq(value, rhs.value))); // lhs lazy, rhs strict // force lhs to be strict and concat the two case SeqType.Strict: return new Seq<A>(((SeqStrict<A>)value.Strict()).Append((SeqStrict<A>)rhs.value)); // lhs lazy, rhs concat // prepend rhs with lhs case SeqType.Concat: return new Seq<A>(((SeqConcat<A>)rhs.value).ConsSeq(value)); } break; case SeqType.Strict: switch (rhs.Value.Type) { // lhs strict, rhs empty // return lhs case SeqType.Empty: return this; // lhs strict, rhs lazy // return SeqConcat case SeqType.Lazy: return new Seq<A>(new SeqConcat<A>(Seq(value, rhs.value))); // lhs strict, rhs strict // append the two case SeqType.Strict: return new Seq<A>(((SeqStrict<A>)value).Append((SeqStrict<A>)rhs.value)); // lhs strict, rhs concat // prepend rhs with lhs case SeqType.Concat: return new Seq<A>(((SeqConcat<A>)rhs.value).ConsSeq(value)); } break; case SeqType.Concat: switch (rhs.Value.Type) { // lhs concat, rhs empty // return lhs case SeqType.Empty: return this; // lhs concat, rhs lazy || lhs concat, rhs strict // add rhs to concat case SeqType.Lazy: case SeqType.Strict: return new Seq<A>(((SeqConcat<A>)value).AddSeq(rhs.value)); // lhs concat, rhs concat // add rhs to concat case SeqType.Concat: return new Seq<A>(((SeqConcat<A>)value).AddSeqRange(((SeqConcat<A>)rhs.value).ms)); } break; } throw new NotSupportedException(); } /// <summary> /// Prepend an item to the sequence /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Seq<A> Cons(A value) => new Seq<A>(Value.Cons(value)); /// <summary> /// Head item in the sequence. NOTE: If `IsEmpty` is true then Head /// is undefined. Call HeadOrNone() if for maximum safety. /// </summary> public A Head { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => Value.Head; } /// <summary> /// Tail of the sequence /// </summary> public Seq<A> Tail { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => new Seq<A>(Value.Tail); } /// <summary> /// Get all items except the last one /// </summary> public Seq<A> Init { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => new Seq<A>(Value.Init); } /// <summary> /// Head of the sequence if this node isn't the empty node /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Option<A> HeadOrNone() => IsEmpty ? None : Some(Head); /// <summary> /// Last item in sequence. Throws if no items in sequence /// </summary> public A Last { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => Value.Last; } /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Option<A> LastOrNone() => IsEmpty ? None : Some(Last); /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Either<L, A> LastOrLeft<L>(L Left) => IsEmpty ? Either<L, A>.Left(Left) : Either<L, A>.Right(Last); /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Either<L, A> LastOrLeft<L>(Func<L> Left) => IsEmpty ? Either<L, A>.Left(Left()) : Either<L, A>.Right(Last); /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Validation<F, A> LastOrInvalid<F>(F Fail) => IsEmpty ? Validation<F, A>.Fail(Seq1(Fail)) : Validation<F, A>.Success(Last); /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Validation<F, A> LastOrInvalid<F>(Func<F> Fail) => IsEmpty ? Validation<F, A>.Fail(Seq1(Fail())) : Validation<F, A>.Success(Last); /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Validation<MonoidFail, F, A> LastOrInvalid<MonoidFail, F>(F Fail) where MonoidFail : struct, Monoid<F>, Eq<F> => IsEmpty ? Validation<MonoidFail, F, A>.Fail(Fail) : Validation<MonoidFail, F, A>.Success(Last); /// <summary> /// Last item in sequence. /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Validation<MonoidFail, F, A> LastOrInvalid<MonoidFail, F>(Func<F> Fail) where MonoidFail : struct, Monoid<F>, Eq<F> => IsEmpty ? Validation<MonoidFail, F, A>.Fail(Fail()) : Validation<MonoidFail, F, A>.Success(Last); /// <summary> /// Head of the sequence if this node isn't the empty node or fail /// </summary> /// <typeparam name="Fail"></typeparam> /// <param name="fail">Fail case</param> /// <returns>Head of the sequence or fail</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Validation<Fail, A> HeadOrInvalid<Fail>(Fail fail) => IsEmpty ? Fail<Fail, A>(fail) : Success<Fail, A>(Head); /// <summary> /// Head of the sequence /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Validation<MonoidFail, Fail, A> HeadOrInvalid<MonoidFail, Fail>(Fail fail) where MonoidFail : struct, Monoid<Fail>, Eq<Fail> => IsEmpty ? Fail<MonoidFail, Fail, A>(fail) : Success<MonoidFail, Fail, A>(Head); /// <summary> /// Head of the sequence if this node isn't the empty node or left /// </summary> /// <typeparam name="L"></typeparam> /// <param name="left">Left case</param> /// <returns>Head of the sequence or left</returns> [Pure] public Either<L, A> HeadOrLeft<L>(L left) => IsEmpty ? Left<L, A>(left) : Right<L, A>(Head); /// <summary> /// Head of the sequence if this node isn't the empty node /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Either<L, A> HeadOrLeft<L>(Func<L> Left) => IsEmpty ? Left<L, A>(Left()) : Right<L, A>(Head); /// <summary> /// Returns true if the sequence is empty /// </summary> /// <remarks> /// For lazy streams this will have to peek at the first /// item. So, the first item will be consumed. /// </summary> public bool IsEmpty { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => Value.IsEmpty; } /// <summary> /// Returns the number of items in the sequence /// </summary> /// <returns>Number of items in the sequence</returns> public int Count { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => Value.Count; } /// <summary> /// Stream as an enumerable /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public IEnumerable<A> AsEnumerable() => Value; /// <summary> /// Match empty sequence, or multi-item sequence /// </summary> /// <typeparam name="B">Return value type</typeparam> /// <param name="Empty">Match for an empty list</param> /// <param name="Tail">Match for a non-empty</param> /// <returns>Result of match function invoked</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public B Match<B>( Func<B> Empty, Func<A, Seq<A>, B> Tail) => IsEmpty ? Empty() : Tail(this.Head, this.Tail); /// <summary> /// Match empty sequence, or one item sequence, or multi-item sequence /// </summary> /// <typeparam name="B">Return value type</typeparam> /// <param name="Empty">Match for an empty list</param> /// <param name="Tail">Match for a non-empty</param> /// <returns>Result of match function invoked</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public B Match<B>( Func<B> Empty, Func<A, B> Head, Func<A, Seq<A>, B> Tail) => IsEmpty ? Empty() : this.Tail.IsEmpty ? Head(this.Head) : Tail(this.Head, this.Tail); /// <summary> /// Match empty sequence, or multi-item sequence /// </summary> /// <typeparam name="B">Return value type</typeparam> /// <param name="Empty">Match for an empty list</param> /// <param name="Sequence">Match for a non-empty</param> /// <returns>Result of match function invoked</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public B Match<B>( Func<B> Empty, Func<Seq<A>, B> Seq) => IsEmpty ? Empty() : Seq(this); /// <summary> /// Match empty sequence, or one item sequence, or multi-item sequence /// </summary> /// <typeparam name="B">Return value type</typeparam> /// <param name="Empty">Match for an empty list</param> /// <param name="Tail">Match for a non-empty</param> /// <returns>Result of match function invoked</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public B Match<B>( Func<B> Empty, Func<A, B> Head, Func<Seq<A>, B> Tail) => IsEmpty ? Empty() : this.Tail.IsEmpty ? Head(this.Head) : Tail(this.Tail); /// <summary> /// Impure iteration of the bound values in the structure /// </summary> /// <returns> /// Returns the original unmodified structure /// </returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Do(Action<A> f) { this.Iter(f); return this; } /// <summary> /// Impure iteration of the bound values in the structure /// </summary> /// <returns> /// Returns the original unmodified structure /// </returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Unit Iter(Action<A> f) => Value.Iter(f); /// <summary> /// Map the sequence using the function provided /// </summary> /// <typeparam name="B"></typeparam> /// <param name="f">Mapping function</param> /// <returns>Mapped sequence</returns> [Pure] public Seq<B> Map<B>(Func<A, B> f) { return new Seq<B>(new SeqLazy<B>(Yield(this))); IEnumerable<B> Yield(Seq<A> items) { foreach (var item in items) { yield return f(item); } } } /// <summary> /// Map the sequence using the function provided /// </summary> /// <typeparam name="B"></typeparam> /// <param name="f">Mapping function</param> /// <returns>Mapped sequence</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<B> Select<B>(Func<A, B> f) => Map(f); /// <summary> /// Monadic bind (flatmap) of the sequence /// </summary> /// <typeparam name="B">Bound return value type</typeparam> /// <param name="f">Bind function</param> /// <returns>Flatmapped sequence</returns> [Pure] public Seq<B> Bind<B>(Func<A, Seq<B>> f) { IEnumerable<B> Yield(Seq<A> ma, Func<A, Seq<B>> bnd) { foreach (var a in ma) { foreach (var b in bnd(a)) { yield return b; } } } return new Seq<B>(Yield(this, f)); } /// <summary> /// Monadic bind (flatmap) of the sequence /// </summary> /// <typeparam name="B">Bound return value type</typeparam> /// <param name="bind">Bind function</param> /// <returns>Flatmapped sequence</returns> [Pure] public Seq<C> SelectMany<B, C>(Func<A, Seq<B>> bind, Func<A, B, C> project) { IEnumerable<C> Yield(Seq<A> ma, Func<A, Seq<B>> bnd, Func<A, B, C> prj) { foreach (var a in ma) { foreach (var b in bnd(a)) { yield return prj(a, b); } } } return new Seq<C>(Yield(this, bind, project)); } /// <summary> /// Filter the items in the sequence /// </summary> /// <param name="f">Predicate to apply to the items</param> /// <returns>Filtered sequence</returns> [Pure] public Seq<A> Filter(Func<A, bool> f) { return new Seq<A>(new SeqLazy<A>(Yield(this, f))); IEnumerable<A> Yield(Seq<A> items, Func<A, bool> f) { foreach (var item in items) { if (f(item)) { yield return item; } } } } /// <summary> /// Filter the items in the sequence /// </summary> /// <param name="f">Predicate to apply to the items</param> /// <returns>Filtered sequence</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Where(Func<A, bool> f) => Filter(f); /// <summary> /// Fold the sequence from the first item to the last /// </summary> /// <typeparam name="S">State type</typeparam> /// <param name="state">Initial state</param> /// <param name="f">Fold function</param> /// <returns>Aggregated state</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public S Fold<S>(S state, Func<S, A, S> f) => Value.Fold(state, f); /// <summary> /// Fold the sequence from the last item to the first. For /// sequences that are not lazy and are less than 5000 items /// long, FoldBackRec is called instead, because it is faster. /// </summary> /// <typeparam name="S">State type</typeparam> /// <param name="state">Initial state</param> /// <param name="f">Fold function</param> /// <returns>Aggregated state</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public S FoldBack<S>(S state, Func<S, A, S> f) => Value.FoldBack(state, f); /// <summary> /// Returns true if the supplied predicate returns true for any /// item in the sequence. False otherwise. /// </summary> /// <param name="f">Predicate to apply</param> /// <returns>True if the supplied predicate returns true for any /// item in the sequence. False otherwise.</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Exists(Func<A, bool> f) => Value.Exists(f); /// <summary> /// Returns true if the supplied predicate returns true for all /// items in the sequence. False otherwise. If there is an /// empty sequence then true is returned. /// </summary> /// <param name="f">Predicate to apply</param> /// <returns>True if the supplied predicate returns true for all /// items in the sequence. False otherwise. If there is an /// empty sequence then true is returned.</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool ForAll(Func<A, bool> f) => Value.ForAll(f); /// <summary> /// Returns true if the sequence has items in it /// </summary> /// <returns>True if the sequence has items in it</returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Any() => !IsEmpty; /// <summary> /// Get the hash code for all of the items in the sequence, or 0 if empty /// </summary> /// <returns></returns> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() => hash == 0 ? (hash = Value.GetHashCode(FNV32.OffsetBasis)) : hash; [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public int CompareTo(object obj) => obj switch { Seq<A> s => CompareTo(s), IEnumerable<A> e => CompareTo(Seq(e)), _ => 1 }; /// <summary> /// Format the collection as `[a, b, c, ...]` /// The elipsis is used for collections over 50 items /// To get a formatted string with all the items, use `ToFullString` /// or `ToFullArrayString`. /// </summary> [Pure] public override string ToString() => Value is SeqLazy<A> ? CollectionFormat.ToShortArrayString(this) : CollectionFormat.ToShortArrayString(this, Count); /// <summary> /// Format the collection as `a, b, c, ...` /// </summary> [Pure] public string ToFullString(string separator = ", ") => CollectionFormat.ToFullString(this, separator); /// <summary> /// Format the collection as `[a, b, c, ...]` /// </summary> [Pure] public string ToFullArrayString(string separator = ", ") => CollectionFormat.ToFullArrayString(this, separator); /// <summary> /// Append operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Seq<A> operator +(Seq<A> x, Seq<A> y) => x.Concat(y); /// <summary> /// Ordering operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator >(Seq<A> x, Seq<A> y) => x.CompareTo(y) > 0; /// <summary> /// Ordering operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator >=(Seq<A> x, Seq<A> y) => x.CompareTo(y) >= 0; /// <summary> /// Ordering operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator <(Seq<A> x, Seq<A> y) => x.CompareTo(y) < 0; /// <summary> /// Ordering operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator <=(Seq<A> x, Seq<A> y) => x.CompareTo(y) <= 0; /// <summary> /// Equality operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator ==(Seq<A> x, Seq<A> y) => x.Equals(y); /// <summary> /// Non-equality operator /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator !=(Seq<A> x, Seq<A> y) => !(x == y); /// <summary> /// Equality test /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool Equals(object obj) => obj switch { Seq<A> s => Equals(s), IEnumerable<A> e => Equals(Seq(e)), _ => false }; /// <summary> /// Equality test /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] [Obsolete(ISeqObsolete.Message)] public bool Equals(ISeq<A> rhs) => Enumerable.SequenceEqual(this, rhs); /// <summary> /// Equality test /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Equals(Seq<A> rhs) => Equals<EqDefault<A>>(rhs); /// <summary> /// Equality test /// </summary> [Pure] public bool Equals<EqA>(Seq<A> rhs) where EqA : struct, Eq<A> { // Differing lengths? if(Count != rhs.Count) return false; // If the hash code has been calculated on both sides then // check for differences if (hash != 0 && rhs.hash != 0 && hash != rhs.hash) { return false; } // Iterate through both sides using (var iterA = GetEnumerator()) { using (var iterB = rhs.GetEnumerator()) { while (iterA.MoveNext() && iterB.MoveNext()) { if (!default(EqA).Equals(iterA.Current, iterB.Current)) { return false; } } } } return true; } /// <summary> /// Skip count items /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Skip(int amount) => amount < 1 ? this : new Seq<A>(Value.Skip(amount)); /// <summary> /// Take count items /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Take(int amount) => amount < 1 ? Empty : new Seq<A>(Value.Take(amount)); /// <summary> /// Iterate the sequence, yielding items if they match the predicate /// provided, and stopping as soon as one doesn't /// </summary> /// <returns>A new sequence with the first items that match the /// predicate</returns> [Pure] public Seq<A> TakeWhile(Func<A, bool> pred) { return new Seq<A>(new SeqLazy<A>(Yield(Value, pred))); IEnumerable<A> Yield(IEnumerable<A> xs, Func<A, bool> f) { foreach (var x in xs) { if (!f(x)) break; yield return x; } } } /// <summary> /// Iterate the sequence, yielding items if they match the predicate /// provided, and stopping as soon as one doesn't. An index value is /// also provided to the predicate function. /// </summary> /// <returns>A new sequence with the first items that match the /// predicate</returns> [Pure] public Seq<A> TakeWhile(Func<A, int, bool> pred) { return new Seq<A>(new SeqLazy<A>(Yield(Value, pred))); IEnumerable<A> Yield(IEnumerable<A> xs, Func<A, int, bool> f) { var i = 0; foreach (var x in xs) { if (!f(x, i)) break; yield return x; i++; } } } /// <summary> /// Compare to another sequence /// </summary> [Pure] [Obsolete(ISeqObsolete.Message)] public int CompareTo(ISeq<A> rhs) => CompareTo<OrdDefault<A>>(rhs); /// <summary> /// Compare to another sequence /// </summary> [Pure] [Obsolete(ISeqObsolete.Message)] public int CompareTo<OrdA>(ISeq<A> rhs) where OrdA : struct, Ord<A> { if (rhs == null) return 1; // Differing lengths? var cmp = Count.CompareTo(rhs.Count); if (cmp != 0) return cmp; // Iterate through both sides using (var iterA = GetEnumerator()) { using (var iterB = rhs.GetEnumerator()) { while (iterA.MoveNext() && iterB.MoveNext()) { cmp = default(OrdA).Compare(iterA.Current, iterB.Current); if (cmp != 0) return cmp; } } } return 0; } /// <summary> /// Compare to another sequence /// </summary> [Pure] public int CompareTo(Seq<A> rhs) => CompareTo<OrdDefault<A>>(rhs); /// <summary> /// Compare to another sequence /// </summary> [Pure] public int CompareTo<OrdA>(Seq<A> rhs) where OrdA : struct, Ord<A> { // Differing lengths? var cmp = Count.CompareTo(rhs.Count); if (cmp != 0) return cmp; // Iterate through both sides using (var iterA = GetEnumerator()) { using (var iterB = rhs.GetEnumerator()) { while (iterA.MoveNext() && iterB.MoveNext()) { cmp = default(OrdA).Compare(iterA.Current, iterB.Current); if (cmp != 0) return cmp; } } } return 0; } /// <summary> /// Force all items lazy to stream /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Seq<A> Strict() => new Seq<A>(Value.Strict()); [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public IEnumerator<A> GetEnumerator() => Value.GetEnumerator(); [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] IEnumerator IEnumerable.GetEnumerator() => Value.GetEnumerator(); /// <summary> /// Implicit conversion from an untyped empty list /// </summary> [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static implicit operator Seq<A>(SeqEmpty _) => Empty; [Pure] public Seq<B> Cast<B>() { IEnumerable<B> Yield(Seq<A> ma) { foreach (object item in ma) { yield return (B)item; } } return Value is IEnumerable<B> mb ? new Seq<B>(mb) : new Seq<B>(Yield(this)); } } }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * JDK-8027933: Add const.as.var option * * @test * @option --const-as-var * @run */ const THE_ANSWER = 42; print("Answer to all questions: " + THE_ANSWER); print((function () { const FORTY_TWO = 42; return FORTY_TWO })())
{ "pile_set_name": "Github" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_POLICY_CORE_COMMON_CLOUD_MOCK_CLOUD_POLICY_CLIENT_H_ #define COMPONENTS_POLICY_CORE_COMMON_CLOUD_MOCK_CLOUD_POLICY_CLIENT_H_ #include <stdint.h> #include <string> #include "base/macros.h" #include "base/threading/thread_task_runner_handle.h" #include "components/policy/core/common/cloud/cloud_policy_client.h" #include "components/policy/core/common/cloud/device_management_service.h" #include "testing/gmock/include/gmock/gmock.h" namespace network { class SharedURLLoaderFactory; } namespace policy { ACTION_P(ScheduleStatusCallback, status) { base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce(std::move(arg0), status)); } class MockCloudPolicyClient : public CloudPolicyClient { public: MockCloudPolicyClient(); explicit MockCloudPolicyClient( scoped_refptr<network::SharedURLLoaderFactory> url_loader_factory); explicit MockCloudPolicyClient(DeviceManagementService* service); MockCloudPolicyClient( scoped_refptr<network::SharedURLLoaderFactory> url_loader_factory, DeviceManagementService* service); ~MockCloudPolicyClient() override; MOCK_METHOD3(SetupRegistration, void(const std::string&, const std::string&, const std::vector<std::string>&)); MOCK_METHOD3(Register, void(const RegistrationParameters&, const std::string&, const std::string&)); MOCK_METHOD0(FetchPolicy, void(void)); MOCK_METHOD0(Unregister, void(void)); MOCK_METHOD2(UploadEnterpriseMachineCertificate, void(const std::string&, StatusCallback)); MOCK_METHOD2(UploadEnterpriseEnrollmentCertificate, void(const std::string&, StatusCallback)); MOCK_METHOD2(UploadEnterpriseEnrollmentId, void(const std::string&, StatusCallback)); void UploadDeviceStatus( const enterprise_management::DeviceStatusReportRequest* device_status, const enterprise_management::SessionStatusReportRequest* session_status, const enterprise_management::ChildStatusReportRequest* child_status, StatusCallback callback) override { UploadDeviceStatus_(device_status, session_status, child_status, callback); } MOCK_METHOD4(UploadDeviceStatus_, void(const enterprise_management::DeviceStatusReportRequest*, const enterprise_management::SessionStatusReportRequest*, const enterprise_management::ChildStatusReportRequest*, StatusCallback&)); MOCK_METHOD0(CancelAppInstallReportUpload, void(void)); void UpdateGcmId(const std::string& id, StatusCallback callback) override { UpdateGcmId_(id, callback); } MOCK_METHOD2(UpdateGcmId_, void(const std::string&, StatusCallback&)); MOCK_METHOD4(UploadPolicyValidationReport, void(CloudPolicyValidatorBase::Status, const std::vector<ValueValidationIssue>&, const std::string&, const std::string&)); void UploadChromeDesktopReport( std::unique_ptr<enterprise_management::ChromeDesktopReportRequest> request, StatusCallback callback) override { UploadChromeDesktopReportProxy(request.get(), callback); } // Use Proxy function because unique_ptr can't be used in mock function. MOCK_METHOD2(UploadChromeDesktopReportProxy, void(enterprise_management::ChromeDesktopReportRequest*, StatusCallback&)); void UploadChromeOsUserReport( std::unique_ptr<enterprise_management::ChromeOsUserReportRequest> request, StatusCallback callback) override { UploadChromeOsUserReportProxy(request.get(), callback); } // Use Proxy function because unique_ptr can't be used in mock function. MOCK_METHOD2(UploadChromeOsUserReportProxy, void(enterprise_management::ChromeOsUserReportRequest*, StatusCallback&)); void UploadRealtimeReport(base::Value value, StatusCallback callback) override { UploadRealtimeReport_(value, callback); } MOCK_METHOD2(UploadRealtimeReport_, void(base::Value&, StatusCallback&)); void UploadAppInstallReport(base::Value value, StatusCallback callback) override { UploadAppInstallReport_(value, callback); } MOCK_METHOD2(UploadAppInstallReport_, void(base::Value&, StatusCallback&)); MOCK_METHOD5(ClientCertProvisioningStartCsr, void(const std::string& cert_scope, const std::string& cert_profile_id, const std::string& cert_profile_version, const std::string& public_key, ClientCertProvisioningStartCsrCallback callback)); MOCK_METHOD7(ClientCertProvisioningFinishCsr, void(const std::string& cert_scope, const std::string& cert_profile_id, const std::string& cert_profile_version, const std::string& public_key, const std::string& va_challenge_response, const std::string& signature, ClientCertProvisioningFinishCsrCallback callback)); MOCK_METHOD5(ClientCertProvisioningDownloadCert, void(const std::string& cert_scope, const std::string& cert_profile_id, const std::string& cert_profile_version, const std::string& public_key, ClientCertProvisioningDownloadCertCallback callback)); // Sets the DMToken. void SetDMToken(const std::string& token); // Injects policy. void SetPolicy(const std::string& policy_type, const std::string& settings_entity_id, const enterprise_management::PolicyFetchResponse& policy); // Inject invalidation version. void SetFetchedInvalidationVersion( int64_t fetched_invalidation_version); // Sets the status field. void SetStatus(DeviceManagementStatus status); // Make the notification helpers public. using CloudPolicyClient::NotifyPolicyFetched; using CloudPolicyClient::NotifyRegistrationStateChanged; using CloudPolicyClient::NotifyClientError; using CloudPolicyClient::dm_token_; using CloudPolicyClient::client_id_; using CloudPolicyClient::last_policy_timestamp_; using CloudPolicyClient::public_key_version_; using CloudPolicyClient::public_key_version_valid_; using CloudPolicyClient::types_to_fetch_; using CloudPolicyClient::invalidation_version_; using CloudPolicyClient::invalidation_payload_; using CloudPolicyClient::fetched_invalidation_version_; private: DISALLOW_COPY_AND_ASSIGN(MockCloudPolicyClient); }; class MockCloudPolicyClientObserver : public CloudPolicyClient::Observer { public: MockCloudPolicyClientObserver(); ~MockCloudPolicyClientObserver() override; MOCK_METHOD1(OnPolicyFetched, void(CloudPolicyClient*)); MOCK_METHOD1(OnRegistrationStateChanged, void(CloudPolicyClient*)); MOCK_METHOD1(OnClientError, void(CloudPolicyClient*)); private: DISALLOW_COPY_AND_ASSIGN(MockCloudPolicyClientObserver); }; } // namespace policy #endif // COMPONENTS_POLICY_CORE_COMMON_CLOUD_MOCK_CLOUD_POLICY_CLIENT_H_
{ "pile_set_name": "Github" }
# Microsoft Developer Studio Project File - Name="zlib" - Package Owner=<4> # Microsoft Developer Studio Generated Build File, Format Version 6.00 # ** DO NOT EDIT ** # TARGTYPE "Win32 (x86) Dynamic-Link Library" 0x0102 # TARGTYPE "Win32 (x86) Static Library" 0x0104 CFG=zlib - Win32 LIB Debug !MESSAGE This is not a valid makefile. To build this project using NMAKE, !MESSAGE use the Export Makefile command and run !MESSAGE !MESSAGE NMAKE /f "zlib.mak". !MESSAGE !MESSAGE You can specify a configuration when running NMAKE !MESSAGE by defining the macro CFG on the command line. For example: !MESSAGE !MESSAGE NMAKE /f "zlib.mak" CFG="zlib - Win32 LIB Debug" !MESSAGE !MESSAGE Possible choices for configuration are: !MESSAGE !MESSAGE "zlib - Win32 DLL ASM Release" (based on "Win32 (x86) Dynamic-Link Library") !MESSAGE "zlib - Win32 DLL ASM Debug" (based on "Win32 (x86) Dynamic-Link Library") !MESSAGE "zlib - Win32 DLL Release" (based on "Win32 (x86) Dynamic-Link Library") !MESSAGE "zlib - Win32 DLL Debug" (based on "Win32 (x86) Dynamic-Link Library") !MESSAGE "zlib - Win32 LIB ASM Release" (based on "Win32 (x86) Static Library") !MESSAGE "zlib - Win32 LIB ASM Debug" (based on "Win32 (x86) Static Library") !MESSAGE "zlib - Win32 LIB Release" (based on "Win32 (x86) Static Library") !MESSAGE "zlib - Win32 LIB Debug" (based on "Win32 (x86) Static Library") !MESSAGE # Begin Project # PROP AllowPerConfigDependencies 0 # PROP Scc_ProjName "" # PROP Scc_LocalPath "" !IF "$(CFG)" == "zlib - Win32 DLL ASM Release" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 0 # PROP BASE Output_Dir "zlib___Win32_DLL_ASM_Release" # PROP BASE Intermediate_Dir "zlib___Win32_DLL_ASM_Release" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 0 # PROP Output_Dir "Win32_DLL_ASM_Release" # PROP Intermediate_Dir "Win32_DLL_ASM_Release" # PROP Ignore_Export_Lib 0 # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MD /W3 /O2 /D "WIN32" /D "NDEBUG" /FD /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MD /W3 /O2 /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "NDEBUG" /D "ASMV" /D "ASMINF" /FD /c # SUBTRACT CPP /YX /Yc /Yu MTL=midl.exe # ADD BASE MTL /nologo /D "NDEBUG" /mktyplib203 /win32 # ADD MTL /nologo /D "NDEBUG" /mktyplib203 /win32 RSC=rc.exe # ADD BASE RSC /l 0x409 /d "NDEBUG" # ADD RSC /l 0x409 /d "NDEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LINK32=link.exe # ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /dll /machine:I386 # ADD LINK32 /nologo /dll /machine:I386 /out:"Win32_DLL_ASM_Release\zlib1.dll" !ELSEIF "$(CFG)" == "zlib - Win32 DLL ASM Debug" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 1 # PROP BASE Output_Dir "zlib___Win32_DLL_ASM_Debug" # PROP BASE Intermediate_Dir "zlib___Win32_DLL_ASM_Debug" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 1 # PROP Output_Dir "Win32_DLL_ASM_Debug" # PROP Intermediate_Dir "Win32_DLL_ASM_Debug" # PROP Ignore_Export_Lib 0 # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_DEBUG" /FD /GZ /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "_DEBUG" /D "ASMV" /D "ASMINF" /FR /FD /GZ /c # SUBTRACT CPP /YX /Yc /Yu MTL=midl.exe # ADD BASE MTL /nologo /D "_DEBUG" /mktyplib203 /win32 # ADD MTL /nologo /D "_DEBUG" /mktyplib203 /win32 RSC=rc.exe # ADD BASE RSC /l 0x409 /d "_DEBUG" # ADD RSC /l 0x409 /d "_DEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LINK32=link.exe # ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /dll /debug /machine:I386 /pdbtype:sept # ADD LINK32 /nologo /dll /debug /machine:I386 /out:"Win32_DLL_ASM_Debug\zlib1d.dll" /pdbtype:sept !ELSEIF "$(CFG)" == "zlib - Win32 DLL Release" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 0 # PROP BASE Output_Dir "zlib___Win32_DLL_Release" # PROP BASE Intermediate_Dir "zlib___Win32_DLL_Release" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 0 # PROP Output_Dir "Win32_DLL_Release" # PROP Intermediate_Dir "Win32_DLL_Release" # PROP Ignore_Export_Lib 0 # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MD /W3 /O2 /D "WIN32" /D "NDEBUG" /FD /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MD /W3 /O2 /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "NDEBUG" /FD /c # SUBTRACT CPP /YX /Yc /Yu MTL=midl.exe # ADD BASE MTL /nologo /D "NDEBUG" /mktyplib203 /win32 # ADD MTL /nologo /D "NDEBUG" /mktyplib203 /win32 RSC=rc.exe # ADD BASE RSC /l 0x409 /d "NDEBUG" # ADD RSC /l 0x409 /d "NDEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LINK32=link.exe # ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /dll /machine:I386 # ADD LINK32 /nologo /dll /machine:I386 /out:"Win32_DLL_Release\zlib1.dll" !ELSEIF "$(CFG)" == "zlib - Win32 DLL Debug" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 1 # PROP BASE Output_Dir "zlib___Win32_DLL_Debug" # PROP BASE Intermediate_Dir "zlib___Win32_DLL_Debug" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 1 # PROP Output_Dir "Win32_DLL_Debug" # PROP Intermediate_Dir "Win32_DLL_Debug" # PROP Ignore_Export_Lib 0 # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_DEBUG" /FD /GZ /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "_DEBUG" /FR /FD /GZ /c # SUBTRACT CPP /YX /Yc /Yu MTL=midl.exe # ADD BASE MTL /nologo /D "_DEBUG" /mktyplib203 /win32 # ADD MTL /nologo /D "_DEBUG" /mktyplib203 /win32 RSC=rc.exe # ADD BASE RSC /l 0x409 /d "_DEBUG" # ADD RSC /l 0x409 /d "_DEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LINK32=link.exe # ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /dll /debug /machine:I386 /pdbtype:sept # ADD LINK32 /nologo /dll /debug /machine:I386 /out:"Win32_DLL_Debug\zlib1d.dll" /pdbtype:sept !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Release" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 0 # PROP BASE Output_Dir "zlib___Win32_LIB_ASM_Release" # PROP BASE Intermediate_Dir "zlib___Win32_LIB_ASM_Release" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 0 # PROP Output_Dir "Win32_LIB_ASM_Release" # PROP Intermediate_Dir "Win32_LIB_ASM_Release" # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MD /W3 /O2 /D "WIN32" /D "NDEBUG" /FD /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MD /W3 /O2 /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "NDEBUG" /D "ASMV" /D "ASMINF" /FD /c # SUBTRACT CPP /YX /Yc /Yu RSC=rc.exe # ADD BASE RSC /l 0x409 /d "NDEBUG" # ADD RSC /l 0x409 /d "NDEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LIB32=link.exe -lib # ADD BASE LIB32 /nologo # ADD LIB32 /nologo !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Debug" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 1 # PROP BASE Output_Dir "zlib___Win32_LIB_ASM_Debug" # PROP BASE Intermediate_Dir "zlib___Win32_LIB_ASM_Debug" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 1 # PROP Output_Dir "Win32_LIB_ASM_Debug" # PROP Intermediate_Dir "Win32_LIB_ASM_Debug" # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_DEBUG" /FD /GZ /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "_DEBUG" /D "ASMV" /D "ASMINF" /FR /FD /GZ /c # SUBTRACT CPP /YX /Yc /Yu RSC=rc.exe # ADD BASE RSC /l 0x409 /d "_DEBUG" # ADD RSC /l 0x409 /d "_DEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LIB32=link.exe -lib # ADD BASE LIB32 /nologo # ADD LIB32 /nologo /out:"Win32_LIB_ASM_Debug\zlibd.lib" !ELSEIF "$(CFG)" == "zlib - Win32 LIB Release" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 0 # PROP BASE Output_Dir "zlib___Win32_LIB_Release" # PROP BASE Intermediate_Dir "zlib___Win32_LIB_Release" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 0 # PROP Output_Dir "Win32_LIB_Release" # PROP Intermediate_Dir "Win32_LIB_Release" # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MD /W3 /O2 /D "WIN32" /D "NDEBUG" /FD /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MD /W3 /O2 /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "NDEBUG" /FD /c # SUBTRACT CPP /YX /Yc /Yu RSC=rc.exe # ADD BASE RSC /l 0x409 /d "NDEBUG" # ADD RSC /l 0x409 /d "NDEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LIB32=link.exe -lib # ADD BASE LIB32 /nologo # ADD LIB32 /nologo !ELSEIF "$(CFG)" == "zlib - Win32 LIB Debug" # PROP BASE Use_MFC 0 # PROP BASE Use_Debug_Libraries 1 # PROP BASE Output_Dir "zlib___Win32_LIB_Debug" # PROP BASE Intermediate_Dir "zlib___Win32_LIB_Debug" # PROP BASE Target_Dir "" # PROP Use_MFC 0 # PROP Use_Debug_Libraries 1 # PROP Output_Dir "Win32_LIB_Debug" # PROP Intermediate_Dir "Win32_LIB_Debug" # PROP Target_Dir "" CPP=cl.exe # ADD BASE CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_DEBUG" /FD /GZ /c # SUBTRACT BASE CPP /YX /Yc /Yu # ADD CPP /nologo /MDd /W3 /Gm /ZI /Od /D "WIN32" /D "_CRT_SECURE_NO_DEPRECATE" /D "_CRT_NONSTDC_NO_DEPRECATE" /D "_DEBUG" /FR /FD /GZ /c # SUBTRACT CPP /YX /Yc /Yu RSC=rc.exe # ADD BASE RSC /l 0x409 /d "_DEBUG" # ADD RSC /l 0x409 /d "_DEBUG" BSC32=bscmake.exe # ADD BASE BSC32 /nologo # ADD BSC32 /nologo LIB32=link.exe -lib # ADD BASE LIB32 /nologo # ADD LIB32 /nologo /out:"Win32_LIB_Debug\zlibd.lib" !ENDIF # Begin Target # Name "zlib - Win32 DLL ASM Release" # Name "zlib - Win32 DLL ASM Debug" # Name "zlib - Win32 DLL Release" # Name "zlib - Win32 DLL Debug" # Name "zlib - Win32 LIB ASM Release" # Name "zlib - Win32 LIB ASM Debug" # Name "zlib - Win32 LIB Release" # Name "zlib - Win32 LIB Debug" # Begin Group "Source Files" # PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat" # Begin Source File SOURCE=..\..\adler32.c # End Source File # Begin Source File SOURCE=..\..\compress.c # End Source File # Begin Source File SOURCE=..\..\crc32.c # End Source File # Begin Source File SOURCE=..\..\deflate.c # End Source File # Begin Source File SOURCE=..\..\gzclose.c # End Source File # Begin Source File SOURCE=..\..\gzlib.c # End Source File # Begin Source File SOURCE=..\..\gzread.c # End Source File # Begin Source File SOURCE=..\..\gzwrite.c # End Source File # Begin Source File SOURCE=..\..\infback.c # End Source File # Begin Source File SOURCE=..\..\inffast.c # End Source File # Begin Source File SOURCE=..\..\inflate.c # End Source File # Begin Source File SOURCE=..\..\inftrees.c # End Source File # Begin Source File SOURCE=..\..\trees.c # End Source File # Begin Source File SOURCE=..\..\uncompr.c # End Source File # Begin Source File SOURCE=..\..\win32\zlib.def !IF "$(CFG)" == "zlib - Win32 DLL ASM Release" !ELSEIF "$(CFG)" == "zlib - Win32 DLL ASM Debug" !ELSEIF "$(CFG)" == "zlib - Win32 DLL Release" !ELSEIF "$(CFG)" == "zlib - Win32 DLL Debug" !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Release" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Debug" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB Release" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB Debug" # PROP Exclude_From_Build 1 !ENDIF # End Source File # Begin Source File SOURCE=..\..\zutil.c # End Source File # End Group # Begin Group "Header Files" # PROP Default_Filter "h;hpp;hxx;hm;inl" # Begin Source File SOURCE=..\..\crc32.h # End Source File # Begin Source File SOURCE=..\..\deflate.h # End Source File # Begin Source File SOURCE=..\..\inffast.h # End Source File # Begin Source File SOURCE=..\..\inffixed.h # End Source File # Begin Source File SOURCE=..\..\inflate.h # End Source File # Begin Source File SOURCE=..\..\inftrees.h # End Source File # Begin Source File SOURCE=..\..\trees.h # End Source File # Begin Source File SOURCE=..\..\zconf.h # End Source File # Begin Source File SOURCE=..\..\zlib.h # End Source File # Begin Source File SOURCE=..\..\zutil.h # End Source File # End Group # Begin Group "Resource Files" # PROP Default_Filter "ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe" # Begin Source File SOURCE=..\..\win32\zlib1.rc # End Source File # End Group # Begin Group "Assembler Files (Unsupported)" # PROP Default_Filter "asm;obj;c;cpp;cxx;h;hpp;hxx" # Begin Source File SOURCE=..\..\contrib\masmx86\gvmat32.asm !IF "$(CFG)" == "zlib - Win32 DLL ASM Release" # Begin Custom Build - Assembling... IntDir=.\Win32_DLL_ASM_Release InputPath=..\..\contrib\masmx86\gvmat32.asm InputName=gvmat32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 DLL ASM Debug" # Begin Custom Build - Assembling... IntDir=.\Win32_DLL_ASM_Debug InputPath=..\..\contrib\masmx86\gvmat32.asm InputName=gvmat32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Zi /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 DLL Release" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 DLL Debug" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Release" # Begin Custom Build - Assembling... IntDir=.\Win32_LIB_ASM_Release InputPath=..\..\contrib\masmx86\gvmat32.asm InputName=gvmat32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Debug" # Begin Custom Build - Assembling... IntDir=.\Win32_LIB_ASM_Debug InputPath=..\..\contrib\masmx86\gvmat32.asm InputName=gvmat32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Zi /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 LIB Release" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB Debug" # PROP Exclude_From_Build 1 !ENDIF # End Source File # Begin Source File SOURCE=..\..\contrib\masmx86\gvmat32c.c !IF "$(CFG)" == "zlib - Win32 DLL ASM Release" # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 DLL ASM Debug" # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 DLL Release" # PROP Exclude_From_Build 1 # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 DLL Debug" # PROP Exclude_From_Build 1 # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Release" # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Debug" # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 LIB Release" # PROP Exclude_From_Build 1 # ADD CPP /I "..\.." !ELSEIF "$(CFG)" == "zlib - Win32 LIB Debug" # PROP Exclude_From_Build 1 # ADD CPP /I "..\.." !ENDIF # End Source File # Begin Source File SOURCE=..\..\contrib\masmx86\inffas32.asm !IF "$(CFG)" == "zlib - Win32 DLL ASM Release" # Begin Custom Build - Assembling... IntDir=.\Win32_DLL_ASM_Release InputPath=..\..\contrib\masmx86\inffas32.asm InputName=inffas32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 DLL ASM Debug" # Begin Custom Build - Assembling... IntDir=.\Win32_DLL_ASM_Debug InputPath=..\..\contrib\masmx86\inffas32.asm InputName=inffas32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Zi /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 DLL Release" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 DLL Debug" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Release" # Begin Custom Build - Assembling... IntDir=.\Win32_LIB_ASM_Release InputPath=..\..\contrib\masmx86\inffas32.asm InputName=inffas32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 LIB ASM Debug" # Begin Custom Build - Assembling... IntDir=.\Win32_LIB_ASM_Debug InputPath=..\..\contrib\masmx86\inffas32.asm InputName=inffas32 "$(IntDir)\$(InputName).obj" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)" ml.exe /nologo /c /coff /Cx /Zi /Fo"$(IntDir)\$(InputName).obj" "$(InputPath)" # End Custom Build !ELSEIF "$(CFG)" == "zlib - Win32 LIB Release" # PROP Exclude_From_Build 1 !ELSEIF "$(CFG)" == "zlib - Win32 LIB Debug" # PROP Exclude_From_Build 1 !ENDIF # End Source File # End Group # Begin Source File SOURCE=.\README.txt # End Source File # End Target # End Project
{ "pile_set_name": "Github" }
{# # This file is part of SolidInvoice package. # # (c) 2013-2015 Pierre du Plessis <info@customscripts.co.za> # # This source file is subject to the MIT license that is bundled # with this source code in the file LICENSE. #} {% extends '@SolidInvoiceCore/Layout/base.html.twig' %} {% block heading %} {% set title = block('title') is defined ? block('title') %} {% if title is not empty %} <fieldset class="page-heading"> <legend> <h3>{{ block('title') }}</h3> </legend> </fieldset> {% endif %} {% endblock %} {% block scripts %} {% set script = block("script") ?? null %} {% if script is not empty %} <script> {{ script|raw }} </script> {% endif %} {% endblock %} {% block header %} {% include "@SolidInvoiceCore/Menu/top.html.twig" with {"title" : app_name, "header_content" : header_content|default('')} %} {% endblock header %} {% block footer %} <small>{{ "powered_by"|trans }} <a href="http://solidinvoice.co">{{ constant('SolidInvoice\\CoreBundle\\SolidInvoiceCoreBundle::APP_NAME') }}</a> - {{ app_version }}</small> {% endblock footer %} {% block body_bottom %} <div id="modal-container" class="modal" tabindex="-1"></div> {% endblock body_bottom %} {% block body %} <aside class="main-sidebar sidebar-dark-primary elevation-4"> <a class="brand-link" href="{{ url('_dashboard') }}"> {{ app_logo() }} <span class="brand-text font-weight-light"> {{ app_name }} </span> </a> <section class="sidebar"> <div class="user-panel mt-3 pb-3 mb-3 d-flex"> <div class="image"> {#<img src="http://t2.tagstat.com/im/people/silhouette_m_300.png" class="img-circle" alt="User Image">#} <img src="https://gravatar.com/avatar/{{ app.user.email|md5 }}?d=mm" class="img-circle" alt="User Image"> </div> <div class="info text-white"> <p>{{ app.user.username }}</p> <a href="{{ path('_profile') }}">{{ icon('envelope') }} {{ app.user.email }}</a> </div> </div> {{ menu('sidebar') }} {% if block('sidebar') is defined %} <nav class="mt-2"> {{ block('sidebar') }} </nav> {% endif %} </section> </aside> <div class="content-wrapper"> <section class="content-header"> {{ block('heading') is defined ? block('heading') }} </section> <section class="content"> {% include "@SolidInvoiceCore/flash.html.twig" %} {{ block('content') is defined ? block('content') }} </section> </div> {% endblock body %}
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta name="generator" content="HTML Tidy for Linux (vers 25 March 2009), see www.w3.org" /> <title></title> </head> <body> </body> </html>
{ "pile_set_name": "Github" }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LossyCounting; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; /** * A coprocessor that collects metrics from meta table. * <p> * These metrics will be available through the regular Hadoop metrics2 sinks (ganglia, opentsdb, * etc) as well as JMX output. * </p> * @see MetaTableMetrics */ @InterfaceAudience.Private public class MetaTableMetrics implements RegionCoprocessor { private ExampleRegionObserverMeta observer; private MetricRegistry registry; private LossyCounting<String> clientMetricsLossyCounting, regionMetricsLossyCounting; private boolean active = false; private Set<String> metrics = new HashSet<>(); enum MetaTableOps { GET, PUT, DELETE, } private ImmutableMap<Class<? extends Row>, MetaTableOps> opsNameMap = ImmutableMap.<Class<? extends Row>, MetaTableOps>builder() .put(Put.class, MetaTableOps.PUT) .put(Get.class, MetaTableOps.GET) .put(Delete.class, MetaTableOps.DELETE) .build(); class ExampleRegionObserverMeta implements RegionCoprocessor, RegionObserver { @Override public Optional<RegionObserver> getRegionObserver() { return Optional.of(this); } @Override public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> e, Get get, List<Cell> results) throws IOException { registerAndMarkMetrics(e, get); } @Override public void prePut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit, Durability durability) throws IOException { registerAndMarkMetrics(e, put); } @Override public void preDelete(ObserverContext<RegionCoprocessorEnvironment> e, Delete delete, WALEdit edit, Durability durability) { registerAndMarkMetrics(e, delete); } private void registerAndMarkMetrics(ObserverContext<RegionCoprocessorEnvironment> e, Row row){ if (!active || !isMetaTableOp(e)) { return; } tableMetricRegisterAndMark(row); clientMetricRegisterAndMark(); regionMetricRegisterAndMark(row); opMetricRegisterAndMark(row); opWithClientMetricRegisterAndMark(row); } /** * Get table name from Ops such as: get, put, delete. * @param op such as get, put or delete. */ private String getTableNameFromOp(Row op) { final String tableRowKey = Bytes.toString(op.getRow()); if (StringUtils.isEmpty(tableRowKey)) { return null; } final String[] splits = tableRowKey.split(","); return splits.length > 0 ? splits[0] : null; } /** * Get regionId from Ops such as: get, put, delete. * @param op such as get, put or delete. */ private String getRegionIdFromOp(Row op) { final String tableRowKey = Bytes.toString(op.getRow()); if (StringUtils.isEmpty(tableRowKey)) { return null; } final String[] splits = tableRowKey.split(","); return splits.length > 2 ? splits[2] : null; } private boolean isMetaTableOp(ObserverContext<RegionCoprocessorEnvironment> e) { return TableName.META_TABLE_NAME .equals(e.getEnvironment().getRegionInfo().getTable()); } private void clientMetricRegisterAndMark() { // Mark client metric String clientIP = RpcServer.getRemoteIp() != null ? RpcServer.getRemoteIp().toString() : null; if (clientIP == null || clientIP.isEmpty()) { return; } String clientRequestMeter = clientRequestMeterName(clientIP); clientMetricsLossyCounting.add(clientRequestMeter); registerAndMarkMeter(clientRequestMeter); } private void tableMetricRegisterAndMark(Row op) { // Mark table metric String tableName = getTableNameFromOp(op); if (tableName == null || tableName.isEmpty()) { return; } String tableRequestMeter = tableMeterName(tableName); registerAndMarkMeter(tableRequestMeter); } private void regionMetricRegisterAndMark(Row op) { // Mark region metric String regionId = getRegionIdFromOp(op); if (regionId == null || regionId.isEmpty()) { return; } String regionRequestMeter = regionMeterName(regionId); regionMetricsLossyCounting.add(regionRequestMeter); registerAndMarkMeter(regionRequestMeter); } private void opMetricRegisterAndMark(Row op) { // Mark access type ["get", "put", "delete"] metric String opMeterName = opMeterName(op); if (opMeterName == null || opMeterName.isEmpty()) { return; } registerAndMarkMeter(opMeterName); } private void opWithClientMetricRegisterAndMark(Object op) { // // Mark client + access type metric String opWithClientMeterName = opWithClientMeterName(op); if (opWithClientMeterName == null || opWithClientMeterName.isEmpty()) { return; } registerAndMarkMeter(opWithClientMeterName); } // Helper function to register and mark meter if not present private void registerAndMarkMeter(String requestMeter) { if (requestMeter.isEmpty()) { return; } if(!registry.get(requestMeter).isPresent()){ metrics.add(requestMeter); } registry.meter(requestMeter).mark(); } private String opWithClientMeterName(Object op) { // Extract meter name containing the client IP String clientIP = RpcServer.getRemoteIp() != null ? RpcServer.getRemoteIp().toString() : ""; if (clientIP.isEmpty()) { return ""; } MetaTableOps ops = opsNameMap.get(op.getClass()); String opWithClientMeterName = ""; switch (ops) { case GET: opWithClientMeterName = String.format("MetaTable_client_%s_get_request", clientIP); break; case PUT: opWithClientMeterName = String.format("MetaTable_client_%s_put_request", clientIP); break; case DELETE: opWithClientMeterName = String.format("MetaTable_client_%s_delete_request", clientIP); break; default: break; } return opWithClientMeterName; } private String opMeterName(Object op) { // Extract meter name containing the access type MetaTableOps ops = opsNameMap.get(op.getClass()); String opMeterName = ""; switch (ops) { case GET: opMeterName = "MetaTable_get_request"; break; case PUT: opMeterName = "MetaTable_put_request"; break; case DELETE: opMeterName = "MetaTable_delete_request"; break; default: break; } return opMeterName; } private String tableMeterName(String tableName) { // Extract meter name containing the table name return String.format("MetaTable_table_%s_request", tableName); } private String clientRequestMeterName(String clientIP) { // Extract meter name containing the client IP if (clientIP.isEmpty()) { return ""; } return String.format("MetaTable_client_%s_lossy_request", clientIP); } private String regionMeterName(String regionId) { // Extract meter name containing the region ID return String.format("MetaTable_region_%s_lossy_request", regionId); } } @Override public Optional<RegionObserver> getRegionObserver() { return Optional.of(observer); } @Override public void start(CoprocessorEnvironment env) throws IOException { observer = new ExampleRegionObserverMeta(); if (env instanceof RegionCoprocessorEnvironment && ((RegionCoprocessorEnvironment) env).getRegionInfo().getTable() != null && ((RegionCoprocessorEnvironment) env).getRegionInfo().getTable() .equals(TableName.META_TABLE_NAME)) { RegionCoprocessorEnvironment regionCoprocessorEnv = (RegionCoprocessorEnvironment) env; registry = regionCoprocessorEnv.getMetricRegistryForRegionServer(); LossyCounting.LossyCountingListener<String> listener = key -> { registry.remove(key); metrics.remove(key); }; final Configuration conf = regionCoprocessorEnv.getConfiguration(); clientMetricsLossyCounting = new LossyCounting<>("clientMetaMetrics", conf, listener); regionMetricsLossyCounting = new LossyCounting<>("regionMetaMetrics", conf, listener); // only be active mode when this region holds meta table. active = true; } } @Override public void stop(CoprocessorEnvironment env) throws IOException { // since meta region can move around, clear stale metrics when stop. for(String metric:metrics){ registry.remove(metric); } } }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <title>The source code</title> <link href="../resources/prettify/prettify.css" type="text/css" rel="stylesheet" /> <script type="text/javascript" src="../resources/prettify/prettify.js"></script> <style type="text/css"> .highlight { display: block; background-color: #ddd; } </style> <script type="text/javascript"> function highlight() { document.getElementById(location.hash.replace(/#/, "")).className = "highlight"; } </script> </head> <body onload="prettyPrint(); highlight();"> <pre class="prettyprint lang-js"><span id='Ext-chart-TipSurface'>/** </span> * @class Ext.chart.TipSurface * @ignore */ Ext.define('Ext.chart.TipSurface', { /* Begin Definitions */ extend: 'Ext.draw.Component', /* End Definitions */ spriteArray: false, renderFirst: true, constructor: function(config) { this.callParent([config]); if (config.sprites) { this.spriteArray = [].concat(config.sprites); delete config.sprites; } }, onRender: function() { var me = this, i = 0, l = 0, sp, sprites; this.callParent(arguments); sprites = me.spriteArray; if (me.renderFirst &amp;&amp; sprites) { me.renderFirst = false; for (l = sprites.length; i &lt; l; i++) { sp = me.surface.add(sprites[i]); sp.setAttributes({ hidden: false }, true); } } } }); </pre> </body> </html>
{ "pile_set_name": "Github" }
@import 'ej2-buttons/styles/button/bootstrap-dark.scss';
{ "pile_set_name": "Github" }
# Event 110 - CustomTrackingRecord ###### Version: 0 ## Description None ## Data Dictionary |Standard Name|Field Name|Type|Description|Sample Value| |---|---|---|---|---| |TBD|InstanceId|GUID|None|`None`| |TBD|RecordNumber|Int64|None|`None`| |TBD|EventTime|FILETIME|None|`None`| |TBD|Name|UnicodeString|None|`None`| |TBD|ActivityName|UnicodeString|None|`None`| |TBD|ActivityId|UnicodeString|None|`None`| |TBD|ActivityInstanceId|UnicodeString|None|`None`| |TBD|ActivityTypeName|UnicodeString|None|`None`| |TBD|Data|UnicodeString|None|`None`| |TBD|Annotations|UnicodeString|None|`None`| |TBD|ProfileName|UnicodeString|None|`None`| |TBD|HostReference|UnicodeString|None|`None`| |TBD|AppDomain|UnicodeString|None|`None`| ## Tags * etw_level_Warning * etw_keywords_WFTracking EndToEndMonitoring HealthMonitoring Troubleshooting UserEvents * etw_task_CustomTrackingRecord
{ "pile_set_name": "Github" }
/************************************************************************ * Copyright(c) 2010, One Unified. All rights reserved. * * email: info@oneunified.net * * * * This file is provided as is WITHOUT ANY WARRANTY * * without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * * * This software may not be used nor distributed without proper license * * agreement. * * * * See the file LICENSE.txt for redistribution information. * ************************************************************************/ #pragma once #include <string> //#include <OUSQL/SessionBase.h> // for the enumerations #include <OUSQL/Constants.h> #define SQLITE_DEFAULT_FOREIGN_KEYS 1 #define SQLITE_DEFAULT_FILE_FORMAT 4 #include "sqlite3.h" #include "StatementState.h" #include "Actions.h" namespace ou { namespace db { // needs to be called as db: public ISqlite3, SessionBase<db, ISqlite3> class ISqlite3 { public: typedef sqlite::structStatementState structStatementState; typedef ou::db::sqlite::Action_Assemble_TableDef Action_Assemble_TableDef; typedef ou::db::Action_Compose_Insert Action_Compose_Insert; typedef ou::db::Action_Compose_Update Action_Compose_Update; typedef ou::db::Action_Compose_Delete Action_Compose_Delete; typedef ou::db::sqlite::Action_Extract_Columns Action_Extract_Columns; typedef ou::db::sqlite::Action_Bind_Values Action_Bind_Values; ISqlite3(void); ~ISqlite3(void); void SessionOpen( const std::string& sDbFileName, enumOpenFlags = EOpenFlagsZero ); void SessionClose( void ); void PrepareStatement( structStatementState& statement, std::string& sStatement ); bool ExecuteStatement( structStatementState& statement ); // true when row available void ResetStatement( structStatementState& statement ); void CloseStatement( structStatementState& statement ); boost::int64_t GetLastRowId( void ) { return sqlite3_last_insert_rowid( m_db ); } protected: private: sqlite3* m_db; }; } // db } // ou
{ "pile_set_name": "Github" }
VideoCaptureDevice="Уређај за снимање видеа" Device="Уређај" ColorSpace.Default="Подразумевани" ColorRange.Partial="Делимично" ColorRange.Full="Потпуно" ConfigureAudio="Подеси звук" ConfigureVideo="Подеси видео" ConfigureCrossbar="Подеси crossbar" ResFPSType="Резолуција/FPS тип" ResFPSType.Custom="Прилагођено" ResFPSType.DevPreferred="Подразумевано за уређај" FPS.Matching="Прилагоди излазном FPS-у" FPS.Highest="Највиши FPS" Resolution="Резолуција" VideoFormat="Формат видеа" VideoFormat.Any="Било који" VideoFormat.Unknown="Непознато (%1)" AudioOutputMode="Режим звучног излаза" AudioOutputMode.Capture="Снимај само звук" AudioOutputMode.DirectSound="Излаз на звук радне површине (DirectSound)" AudioOutputMode.WaveOut="Излаз на звук радне површине (WaveOut)" UseCustomAudioDevice="Користи специфичан уређај за звук" AudioDevice="Уређај за звук" Buffering="Баферовање" Buffering.ToolTip="Када је омогућено, баферовање видео/звучних података осигурава течну и најпрецизнију\nмогућу репродукцију, али носи и последицу продуженог кашњења. Када је баферовање у\nупотреби са картицом за хватање видеа, препоручљиво је поставити картицу и\nпрограм на исти фрејмрејт да добијете најбоље резултате.\n\nКада је онемогућено, обезбеђује најмање кашњење репродукције, али уз последицу непрецизности\nрепродукованог фрејма. Ово је идеално за камере које снимају лица, или када желите\nда користите програмски прозор за преглед да бисте играли на конзоли.\n\nАутоматско-откривање (подразумевано) аутоматски омогућава ово ако уређај има кашњење и онемогућава\nако нема кашњења." Buffering.AutoDetect="Аутоматско-откривање" Buffering.Enable="Омогући" Buffering.Disable="Онемогући" Activate="Активирај" Deactivate="Деактивирај" FlipVertically="Преврни вертиикално" DeactivateWhenNotShowing="Деактивирај када се не приказује" Bitrate="Bitrate" Encoder.C985="AVerMedia h264 енкодер (c985)" Encoder.C353="AVerMedia H.264 енкодер"
{ "pile_set_name": "Github" }
import Avatar from "@material-ui/core/Avatar"; import Card from "@material-ui/core/Card"; import CardContent from "@material-ui/core/CardContent"; import * as colors from "@material-ui/core/colors"; import { makeStyles } from "@material-ui/core/styles"; import Typography from "@material-ui/core/Typography"; import PersonIcon from "@material-ui/icons/Person"; import CRC from "crc-32"; import React from "react"; import { DateTime } from "../Date"; const palette = [ colors.amber, colors.blue, colors.cyan, colors.deepOrange, colors.deepPurple, colors.green, colors.indigo, colors.lightBlue, colors.lightGreen, colors.lime, colors.orange, colors.pink, colors.purple, colors.red, colors.teal, colors.yellow ].map(color => color[500]); const useStyles = makeStyles( theme => ({ avatar: { left: -45, position: "absolute", top: 0 }, card: { marginBottom: theme.spacing(3), marginLeft: theme.spacing(3), position: "relative" }, cardContent: { "&:last-child": { padding: 16 }, boxShadow: "0px 5px 10px rgba(0, 0, 0, 0.05)" }, root: { position: "relative" }, title: { "& p": { fontSize: "14px" }, alignItems: "center", display: "flex", justifyContent: "space-between", marginBottom: theme.spacing(), paddingLeft: theme.spacing(3) } }), { name: "TimelineNote" } ); interface TimelineNoteProps { date: string; message: string | null; user: { email: string; }; } export const TimelineNote: React.FC<TimelineNoteProps> = props => { const { date, user, message } = props; const classes = useStyles(props); return ( <div className={classes.root}> {user && ( <Avatar className={classes.avatar} style={{ background: palette[CRC.str(user.email) % palette.length] }} > <PersonIcon /> </Avatar> )} <div className={classes.title}> <Typography>{user?.email}</Typography> <Typography> <DateTime date={date} /> </Typography> </div> <Card className={classes.card}> <CardContent className={classes.cardContent}> <Typography dangerouslySetInnerHTML={{ __html: message.replace("\n", "<br />") }} /> </CardContent> </Card> </div> ); }; TimelineNote.displayName = "TimelineNote"; export default TimelineNote;
{ "pile_set_name": "Github" }
describe GridService do it { should be_timestamped_document } it { should be_kind_of(EventStream) } it { should have_fields(:image_name, :name, :user, :entrypoint, :state, :net, :log_driver, :pid, :stop_signal).of_type(String) } it { should have_fields(:container_count, :memory, :memory_swap, :cpu_shares, :revision, :stack_revision, :shm_size).of_type(Integer) } it { should have_fields(:affinity, :cmd, :ports, :env, :volumes_from, :cap_add, :cap_drop).of_type(Array) } it { should have_fields(:labels, :log_opts).of_type(Hash) } it { should have_fields(:deploy_requested_at, :deployed_at).of_type(DateTime) } it { should have_fields(:privileged).of_type(Mongoid::Boolean) } it { should belong_to(:grid) } it { should belong_to(:stack) } it { should embed_many(:grid_service_links) } it { should embed_many(:secrets) } it { should embed_many(:hooks) } it { should embed_many(:service_volumes) } it { should embed_one(:deploy_opts) } it { should have_many(:containers) } it { should have_many(:container_logs) } it { should have_many(:container_stats) } it { should have_many(:audit_logs) } it { should have_many(:grid_service_instances) } it { should have_many(:grid_service_deploys) } it { should have_many(:event_logs) } it { should validate_presence_of(:name) } it { should validate_presence_of(:image_name) } it { should validate_presence_of(:grid_id) } it { should validate_presence_of(:stack_id) } it { should have_index_for(grid_id: 1) } it { should have_index_for(grid_service_ids: 1) } let(:grid) do Grid.create(name: 'test-grid') end let :stack do Stack.create(grid: grid, name: 'stack') end let :stack2 do Stack.create(grid: grid, name: 'stack2') end let(:grid_service) do GridService.create!(grid: grid, name: 'redis', image_name: 'redis:2.8') end let(:stack_service) do GridService.create!(grid: grid, stack: stack, name: 'redis', image_name: 'redis:2.8') end describe '#qualified_name' do it 'returns full path for stack_service service' do expect(stack_service.qualified_name).to eq('stack/redis') end it 'returns path without stack for stackless service' do expect(grid_service.qualified_name).to eq('redis') end end describe '#stateful?' do it 'returns true if stateful' do subject.stateful = true expect(subject.stateful?).to be_truthy end it 'returns false if not stateful' do subject.stateful = false expect(subject.stateful?).to be_falsey end end describe '#stateless?' do it 'returns true if stateless' do subject.stateful = false expect(subject.stateless?).to be_truthy end it 'returns false if not stateless' do subject.stateful = true expect(subject.stateless?).to be_falsey end end describe '#daemon?' do it 'returns true if strategy is daemon' do subject.strategy = 'daemon' expect(subject.daemon?).to be_truthy end it 'returns false if strategy is not daemon' do expect(subject.daemon?).to be_falsey end end describe '#running?' do it 'returns true if service is running' do subject.state = 'running' expect(subject.running?).to eq(true) end it 'returns false if service is not running' do subject.state = 'stopped' expect(subject.running?).to eq(false) end end describe '#stopped?' do it 'returns true if service is stopped' do subject.state = 'stopped' expect(subject.stopped?).to eq(true) end it 'returns false if service is not stopped' do subject.state = 'running' expect(subject.stopped?).to eq(false) end end describe '#set_state' do it 'sets value of state column' do grid_service.set_state('running') expect(grid_service.state).to eq('running') end it 'does not modify updated_at field' do five_hours_ago = Time.now.utc - 5.hours grid_service.timeless.update_attribute(:updated_at, five_hours_ago) grid_service.clear_timeless_option grid_service.set_state('running') expect(grid_service.updated_at).to eq(five_hours_ago) end it 'publishes update event' do expect(grid_service).to receive(:publish_update_event).once grid_service.set_state('running') end end describe '#container_by_name' do it 'returns related container by name' do container = grid_service.containers.create!(name: 'redis-1') expect(grid_service.container_by_name(container.name)).to eq(container) end it 'returns nil if container is not found' do expect(grid_service.container_by_name('not_found')).to be_nil end end describe '#linked_from_services' do it 'returns Mongoid::Criteria' do expect(grid_service.linked_from_services).to be_instance_of(Mongoid::Criteria) end it 'returns services that service has been linked from' do a = GridService.create!( grid: grid, name: 'aaa', image_name: 'aaa:latest', grid_service_links: [ GridServiceLink.new(linked_grid_service_id: grid_service.id, alias: 'foo') ] ) b = GridService.create!( grid: grid, name: 'bbb', image_name: 'bbb:latest', grid_service_links: [ GridServiceLink.new(linked_grid_service_id: a.id, alias: 'a-foo') ] ) expect(grid_service.linked_from_services.count).to eq(1) expect(grid_service.linked_from_services.first.id).to eq(a.id) expect(a.linked_from_services.count).to eq(1) expect(a.linked_from_services.first.id).to eq(b.id) end end describe '#load_balancer?' do it 'returns true if latest official kontena/lb image' do subject.image_name = 'kontena/lb:latest' expect(subject.load_balancer?).to eq(true) end it 'returns true if official kontena/lb image' do subject.image_name = 'kontena/lb:edge' expect(subject.load_balancer?).to eq(true) end it 'returns true if custom image with KONTENA_SERVICE_ROLE=lb env variable' do subject.image_name = 'custom/lb:latest' subject.env << 'KONTENA_SERVICE_ROLE=lb' expect(subject.load_balancer?).to eq(true) end it 'returns false if not official kontena/lb image' do subject.image_name = 'acme/lb:latest' expect(subject.load_balancer?).to eq(false) end it 'returns false by default' do expect(subject.load_balancer?).to eq(false) end end describe '#health_status' do it 'returns health status' do healthy_container = grid_service.containers.create!(name: 'redis-1') healthy_container.update_attribute(:health_status, 'healthy') unhealthy_container = grid_service.containers.create!(name: 'redis-2') unhealthy_container.update_attribute(:health_status, 'unhealthy') expect(grid_service.health_status).to eq({healthy: 1, unhealthy: 1, total: 2}) end it 'returns nil if container is not found' do expect(grid_service.container_by_name('not_found')).to be_nil end end describe '#name_with_stack' do it 'returns stackless service name without stack' do expect(grid_service.name_with_stack).to eq 'redis' end it 'returns stack service name with stack' do expect(stack_service.name_with_stack).to eq 'stack.redis' end end describe '#stack_exposed?' do it 'returns true if service is exposed via stack' do stack = Stack.create!(name: 'redis') stack.stack_revisions.create(expose: 'redis') service = GridService.create!(grid: grid, name: 'redis', image_name: 'redis:2.8', stack: stack) expect(service.stack_exposed?).to be_truthy end it 'returns false if service is not exposed via stack' do expect(grid_service.stack_exposed?).to be_falsey end end describe '#affinity' do it 'returns empty array by default' do expect(subject.affinity).to eq([]) end it 'returns service affinity if set' do subject.grid = grid subject.affinity = ['label==foo=bar'] expect(subject.affinity).to eq(['label==foo=bar']) end it 'returns default affinity from grid if service affinity is not set' do grid.default_affinity = ['label!=type=ssd'] subject.grid = grid expect(subject.affinity).to eq(['label!=type=ssd']) end end describe '#env_hash' do it 'should build a valid hash' do expect(subject).to receive(:env).and_return( [ 'FOO=bar', 'FOO=BAR=bar', 'BAR=', 'DOG' ] ) expect(subject.env_hash).to eq( { 'FOO' => 'BAR=bar', 'BAR' => '', 'DOG' => nil } ) end end describe '#deploying?' do it 'returns false when no deployments' do expect(subject.deploying?).to be_falsey end it 'returns true when queued deployments' do GridServiceDeploy.create!(grid_service: subject, started_at: nil, finished_at: nil) expect(subject.deploying?).to be_truthy end it 'returns true when un-finished deployments' do GridServiceDeploy.create!(grid_service: subject, started_at: 10.minutes.ago, finished_at: nil) expect(subject.deploying?).to be_truthy end it 'returns false when un-finished stale deployments' do GridServiceDeploy.create!(grid_service: subject, started_at: 32.minutes.ago, finished_at: nil) expect(subject.deploying?).to be_falsey end it 'returns false when finished deployments' do GridServiceDeploy.create!(grid_service: subject, started_at: 32.minutes.ago, finished_at: 26.minutes.ago) expect(subject.deploying?).to be_falsey end it 'returns false when finished deployments with no started_at' do GridServiceDeploy.create!(grid_service: subject, started_at: nil, finished_at: 26.minutes.ago) expect(subject.deploying?).to be_falsey end end end
{ "pile_set_name": "Github" }
#!/bin/bash # Copyright 2012 Citrix Systems, Inc. Licensed under the # Apache License, Version 2.0 (the "License"); you may not use this # file except in compliance with the License. Citrix Systems, Inc. # reserves all rights not expressly granted by the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Automatically generated by addcopyright.py at 04/03/2012 usage() { printf "Stop Router: %s: -h management-server -i vmid\n" $(basename $0) >&2 } iflag= hflag= host="127.0.0.1" #defaults to localhost vmid= while getopts 'h:i:' OPTION do case $OPTION in h) hflag=1 host="$OPTARG" ;; i) iflag=1 vmid="$OPTARG" ;; ?) usage exit 2 ;; esac done if [[ $iflag != "1" ]] then usage exit 2 fi stop_vm="GET http://$host:8096/client/?command=stopRouter&id=$vmid HTTP/1.0\n\n" echo -e $stop_vm | nc -v -w 60 $host 8096
{ "pile_set_name": "Github" }
// TEST_CONFIG /* TEST_BUILD_OUTPUT .*designatedinit.m:\d+:\d+: warning: designated initializer should only invoke a designated initializer on 'super'.* .*designatedinit.m:\d+:\d+: note: .* .*designatedinit.m:\d+:\d+: warning: method override for the designated initializer of the superclass '-init' not found.* .*NSObject.h:\d+:\d+: note: .* END */ #define NS_ENFORCE_NSOBJECT_DESIGNATED_INITIALIZER 1 #include "test.h" #include <objc/NSObject.h> @interface C : NSObject -(id) initWithInt:(int)i NS_DESIGNATED_INITIALIZER; @end @implementation C -(id) initWithInt:(int)__unused i { return [self init]; } @end int main() { succeed(__FILE__); }
{ "pile_set_name": "Github" }
module github.com/aws/aws-sdk-go require github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af
{ "pile_set_name": "Github" }
/******************************************************************************* * Copyright (c) 2016, 2020 Eurotech and/or its affiliates and others * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech - initial API and implementation *******************************************************************************/ package org.eclipse.kapua.service.datastore.internal.schema; import org.eclipse.kapua.service.datastore.client.DatamodelMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.eclipse.kapua.commons.util.KapuaDateUtils; import org.eclipse.kapua.service.datastore.client.SchemaKeys; /** * Metric info schema definition * * @since 1.0 */ public class MetricInfoSchema { private MetricInfoSchema() { } /** * Metric information schema name */ public static final String METRIC_TYPE_NAME = "metric"; /** * Metric information - channel */ public static final String METRIC_CHANNEL = "channel"; /** * Metric information - client identifier */ public static final String METRIC_CLIENT_ID = "client_id"; /** * Metric information - scope id */ public static final String METRIC_SCOPE_ID = "scope_id"; /** * Metric information - metric map prefix */ public static final String METRIC_MTR = "metric"; /** * Metric information - name */ public static final String METRIC_MTR_NAME = "name"; /** * Metric information - full name (so with the metric type suffix) */ public static final String METRIC_MTR_NAME_FULL = "metric.name"; /** * Metric information - type */ public static final String METRIC_MTR_TYPE = "type"; /** * Metric information - full type (so with the metric type suffix) */ public static final String METRIC_MTR_TYPE_FULL = "metric.type"; /** * Metric information - value */ public static final String METRIC_MTR_VALUE = "value"; /** * Metric information - full value (so with the metric type suffix) */ public static final String METRIC_MTR_VALUE_FULL = "metric.value"; /** * Metric information - message timestamp (of the first message published in this channel) */ public static final String METRIC_MTR_TIMESTAMP = "timestamp"; /** * Metric information - message timestamp (of the first message published in this channel, with the metric type suffix) */ public static final String METRIC_MTR_TIMESTAMP_FULL = "metric.timestamp"; /** * Metric information - message identifier (of the first message published in this channel) */ public static final String METRIC_MTR_MSG_ID = "message_id"; /** * Metric information - full message identifier (of the first message published in this channel, with the metric type suffix) */ public static final String METRIC_MTR_MSG_ID_FULL = "metric.message_id"; /** * Create and return the Json representation of the metric info schema * * @param allEnable * @param sourceEnable * @return * @throws DatamodelMappingException */ public static JsonNode getMetricTypeSchema(boolean allEnable, boolean sourceEnable) throws DatamodelMappingException { ObjectNode rootNode = SchemaUtil.getObjectNode(); ObjectNode metricName = SchemaUtil.getObjectNode(); ObjectNode sourceMetric = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_ENABLED, sourceEnable) }); metricName.set(SchemaKeys.KEY_SOURCE, sourceMetric); ObjectNode allMetric = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_ENABLED, allEnable) }); metricName.set(SchemaKeys.KEY_ALL, allMetric); ObjectNode propertiesNode = SchemaUtil.getObjectNode(); ObjectNode metricAccount = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); propertiesNode.set(METRIC_SCOPE_ID, metricAccount); ObjectNode metricClientId = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); propertiesNode.set(METRIC_CLIENT_ID, metricClientId); ObjectNode metricChannel = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); propertiesNode.set(METRIC_CHANNEL, metricChannel); ObjectNode metricMtrNode = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_OBJECT), new KeyValueEntry(SchemaKeys.KEY_ENABLED, true), new KeyValueEntry(SchemaKeys.KEY_DYNAMIC, false), new KeyValueEntry(SchemaKeys.KEY_INCLUDE_IN_ALL, false) }); ObjectNode metricMtrPropertiesNode = SchemaUtil.getObjectNode(); ObjectNode metricMtrNameNode = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); metricMtrPropertiesNode.set(METRIC_MTR_NAME, metricMtrNameNode); ObjectNode metricMtrTypeNode = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); metricMtrPropertiesNode.set(METRIC_MTR_TYPE, metricMtrTypeNode); ObjectNode metricMtrValueNode = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); metricMtrPropertiesNode.set(METRIC_MTR_VALUE, metricMtrValueNode); ObjectNode metricMtrTimestampNode = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_DATE), new KeyValueEntry(SchemaKeys.KEY_FORMAT, KapuaDateUtils.ISO_DATE_PATTERN) }); metricMtrPropertiesNode.set(METRIC_MTR_TIMESTAMP, metricMtrTimestampNode); ObjectNode metricMtrMsgIdNode = SchemaUtil.getField( new KeyValueEntry[] { new KeyValueEntry(SchemaKeys.KEY_TYPE, SchemaKeys.TYPE_KEYWORD), new KeyValueEntry(SchemaKeys.KEY_INDEX, SchemaKeys.VALUE_TRUE) }); metricMtrPropertiesNode.set(METRIC_MTR_MSG_ID, metricMtrMsgIdNode); metricMtrNode.set(SchemaKeys.FIELD_NAME_PROPERTIES, metricMtrPropertiesNode); propertiesNode.set(METRIC_MTR, metricMtrNode); metricName.set(SchemaKeys.FIELD_NAME_PROPERTIES, propertiesNode); rootNode.set(METRIC_TYPE_NAME, metricName); return rootNode; } }
{ "pile_set_name": "Github" }
// // fcntl.h // // Copyright (c) Microsoft Corporation. All rights reserved. // // File control options used by _open(). // #pragma once #ifndef _INC_FCNTL // include guard for 3rd party interop #define _INC_FCNTL #define _O_RDONLY 0x0000 // open for reading only #define _O_WRONLY 0x0001 // open for writing only #define _O_RDWR 0x0002 // open for reading and writing #define _O_APPEND 0x0008 // writes done at eof #define _O_CREAT 0x0100 // create and open file #define _O_TRUNC 0x0200 // open and truncate #define _O_EXCL 0x0400 // open only if file doesn't already exist // O_TEXT files have <cr><lf> sequences translated to <lf> on read()'s and <lf> // sequences translated to <cr><lf> on write()'s #define _O_TEXT 0x4000 // file mode is text (translated) #define _O_BINARY 0x8000 // file mode is binary (untranslated) #define _O_WTEXT 0x10000 // file mode is UTF16 (translated) #define _O_U16TEXT 0x20000 // file mode is UTF16 no BOM (translated) #define _O_U8TEXT 0x40000 // file mode is UTF8 no BOM (translated) // macro to translate the C 2.0 name used to force binary mode for files #define _O_RAW _O_BINARY #define _O_NOINHERIT 0x0080 // child process doesn't inherit file #define _O_TEMPORARY 0x0040 // temporary file bit (file is deleted when last handle is closed) #define _O_SHORT_LIVED 0x1000 // temporary storage file, try not to flush #define _O_OBTAIN_DIR 0x2000 // get information about a directory #define _O_SEQUENTIAL 0x0020 // file access is primarily sequential #define _O_RANDOM 0x0010 // file access is primarily random #if (defined _CRT_DECLARE_NONSTDC_NAMES && _CRT_DECLARE_NONSTDC_NAMES) || (!defined _CRT_DECLARE_NONSTDC_NAMES && !__STDC__) #define O_RDONLY _O_RDONLY #define O_WRONLY _O_WRONLY #define O_RDWR _O_RDWR #define O_APPEND _O_APPEND #define O_CREAT _O_CREAT #define O_TRUNC _O_TRUNC #define O_EXCL _O_EXCL #define O_TEXT _O_TEXT #define O_BINARY _O_BINARY #define O_RAW _O_BINARY #define O_TEMPORARY _O_TEMPORARY #define O_NOINHERIT _O_NOINHERIT #define O_SEQUENTIAL _O_SEQUENTIAL #define O_RANDOM _O_RANDOM #endif #endif // _INC_FCNTL
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- from django.db import migrations from oscar.core.loading import get_model from ecommerce.core.constants import ENROLLMENT_CODE_PRODUCT_CLASS_NAME ProductAttribute = get_model("catalogue", "ProductAttribute") ProductClass = get_model("catalogue", "ProductClass") def create_idverifyreq_attribute(apps, schema_editor): """Create enrollment code 'id_verification_required' attribute.""" ProductAttribute.skip_history_when_saving = True enrollment_code_class = ProductClass.objects.get(name=ENROLLMENT_CODE_PRODUCT_CLASS_NAME) pa = ProductAttribute( product_class=enrollment_code_class, name='id_verification_required', code='id_verification_required', type='boolean', required=False ) pa.save() def remove_idverifyreq_attribute(apps, schema_editor): """Remove enrollment code 'id_verification_required' attribute.""" ProductAttribute.skip_history_when_saving = True enrollment_code_class = ProductClass.objects.get(name=ENROLLMENT_CODE_PRODUCT_CLASS_NAME) ProductAttribute.objects.get(product_class=enrollment_code_class, name='id_verification_required').delete() class Migration(migrations.Migration): dependencies = [ ('catalogue', '0001_initial'), ('catalogue', '0018_auto_20160530_0134') ] operations = [ migrations.RunPython(create_idverifyreq_attribute, remove_idverifyreq_attribute) ]
{ "pile_set_name": "Github" }
#ifndef _INC_PMCC4_SYSDEP_H_ #define _INC_PMCC4_SYSDEP_H_ /*----------------------------------------------------------------------------- * pmcc4_sysdep.h - * * Copyright (C) 2005 SBE, Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ /* reduce multiple autoconf entries to a single definition */ #ifdef CONFIG_SBE_PMCC4_HDLC_V7_MODULE #undef CONFIG_SBE_PMCC4_HDLC_V7 #define CONFIG_SBE_PMCC4_HDLC_V7 1 #endif #ifdef CONFIG_SBE_PMCC4_NCOMM_MODULE #undef CONFIG_SBE_PMCC4_NCOMM #define CONFIG_SBE_PMCC4_NCOMM 1 #endif /* FLUSH MACROS - if using ioremap_nocache(), then these can be NOOPS, * otherwise a memory barrier needs to be inserted. */ #define FLUSH_PCI_READ() rmb() #define FLUSH_PCI_WRITE() wmb() #define FLUSH_MEM_READ() rmb() #define FLUSH_MEM_WRITE() wmb() /* * System dependent callbacks routines, not inlined... * For inlined system dependent routines, see include/sbecom_inlinux_linux.h */ /* * passes received memory token back to the system, <user> is parameter from * sd_new_chan() used to create the channel which the data arrived on */ void sd_recv_consume(void *token, size_t len, void *user); void sd_disable_xmit (void *user); void sd_enable_xmit (void *user); int sd_line_is_ok (void *user); void sd_line_is_up (void *user); void sd_line_is_down (void *user); int sd_queue_stopped (void *user); #endif /*** _INC_PMCC4_SYSDEP_H_ ***/
{ "pile_set_name": "Github" }
# Import ``` import { Facebook } from 'react-social-sharing' ``` # Use ```react showSource: true --- <Facebook link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook simple link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook simpleReverse link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook solidcircle big link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook solidcircle medium link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook solidcircle small link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook solid big link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook solid medium link="http://sharingbuttons.io"/> ``` ```react showSource: true --- <Facebook solid small link="http://sharingbuttons.io"/> ``` ## Custom Props ```react showSource: true --- const styles = { background: 'black' }; <Facebook style={styles} link="http://sharingbuttons.io"/> ```
{ "pile_set_name": "Github" }
/* ****************************************************************** ** ** OpenSees - Open System for Earthquake Engineering Simulation ** ** Pacific Earthquake Engineering Research Center ** ** ** ** ** ** (C) Copyright 1999, The Regents of the University of California ** ** All Rights Reserved. ** ** ** ** Commercial use of this program without express permission of the ** ** University of California, Berkeley, is strictly prohibited. See ** ** file 'COPYRIGHT' in main directory for information on usage and ** ** redistribution, and for a DISCLAIMER OF ALL WARRANTIES. ** ** ** ** Developed by: ** ** Frank McKenna (fmckenna@ce.berkeley.edu) ** ** Gregory L. Fenves (fenves@ce.berkeley.edu) ** ** Filip C. Filippou (filippou@ce.berkeley.edu) ** ** ** ** ****************************************************************** */ // $Revision: 1.5 $ // $Date: 2008-12-03 23:43:16 $ // $Source: /usr/local/cvs/OpenSees/SRC/element/forceBeamColumn/HingeRadauBeamIntegration.cpp,v $ /* * Reference Scott, M. H. and G. L. Fenves. "Plastic Hinge Integration Methods for Force-Based Beam-Column Elements." Journal of Structural Engineering, 132(2):244-252, February 2006. * */ #include <HingeRadauBeamIntegration.h> #include <ElementalLoad.h> #include <Matrix.h> #include <Vector.h> #include <Channel.h> #include <FEM_ObjectBroker.h> #include <Information.h> #include <Parameter.h> #include <math.h> #include <elementAPI.h> #include <ID.h> void* OPS_HingeRadauBeamIntegration(int& integrationTag, ID& secTags) { if(OPS_GetNumRemainingInputArgs() < 6) { opserr<<"insufficient arguments:integrationTag,secTagI,lpI,secTagJ,lpJ,secTagE\n"; return 0; } // inputs: int iData[4]; double dData[2]; int numData = 2; if(OPS_GetIntInput(&numData,&iData[0]) < 0) { opserr << "WARNING: failed to get tag and secTagI\n"; return 0; } numData = 1; if(OPS_GetDoubleInput(&numData,&dData[0]) < 0) { opserr << "WARNING: failed to get lpI\n"; return 0; } if(OPS_GetIntInput(&numData,&iData[2]) < 0) { opserr << "WARNING: failed to get secTagJ\n"; return 0; } if(OPS_GetDoubleInput(&numData,&dData[1]) < 0) { opserr << "WARNING: failed to get lpJ\n"; return 0; } if(OPS_GetIntInput(&numData,&iData[3]) < 0) { opserr << "WARNING: failed to get secTagE\n"; return 0; } integrationTag = iData[0]; secTags.resize(6); secTags(0) = iData[1]; secTags(1) = iData[3]; secTags(2) = iData[3]; secTags(3) = iData[3]; secTags(4) = iData[3]; secTags(5) = iData[2]; return new HingeRadauBeamIntegration(dData[0],dData[1]); } HingeRadauBeamIntegration::HingeRadauBeamIntegration(double lpi, double lpj): BeamIntegration(BEAM_INTEGRATION_TAG_HingeRadau), lpI(lpi), lpJ(lpj) { // Nothing to do } HingeRadauBeamIntegration::HingeRadauBeamIntegration(): BeamIntegration(BEAM_INTEGRATION_TAG_HingeRadau), lpI(0.0), lpJ(0.0) { } HingeRadauBeamIntegration::~HingeRadauBeamIntegration() { // Nothing to do } void HingeRadauBeamIntegration::getSectionLocations(int numSections, double L, double *xi) { double oneOverL = 1.0/L; xi[0] = 0.0; xi[1] = 8.0/3*lpI*oneOverL; xi[4] = 1.0-8.0/3*lpJ*oneOverL; xi[5] = 1.0; static const double oneRoot3 = 1.0/sqrt(3.0); double alpha = 0.5-2*(lpI+lpJ)*oneOverL; double beta = 0.5+2*(lpI-lpJ)*oneOverL; xi[2] = alpha*(-oneRoot3) + beta; xi[3] = alpha*(oneRoot3) + beta; for (int i = 6; i < numSections; i++) xi[i] = 0.0; } void HingeRadauBeamIntegration::getSectionWeights(int numSections, double L, double *wt) { double oneOverL = 1.0/L; wt[0] = lpI*oneOverL; wt[1] = 3*lpI*oneOverL; wt[4] = 3*lpJ*oneOverL; wt[5] = lpJ*oneOverL; wt[2] = 0.5-2*(lpI+lpJ)*oneOverL; wt[3] = 0.5-2*(lpI+lpJ)*oneOverL; for (int i = 6; i < numSections; i++) wt[i] = 1.0; } BeamIntegration* HingeRadauBeamIntegration::getCopy(void) { return new HingeRadauBeamIntegration(lpI, lpJ); } int HingeRadauBeamIntegration::sendSelf(int cTag, Channel &theChannel) { static Vector data(2); data(0) = lpI; data(1) = lpJ; int dbTag = this->getDbTag(); if (theChannel.sendVector(dbTag, cTag, data) < 0) { opserr << "HingeRadauBeamIntegration::sendSelf() - failed to send Vector data\n"; return -1; } return 0; } int HingeRadauBeamIntegration::recvSelf(int cTag, Channel &theChannel, FEM_ObjectBroker &theBroker) { static Vector data(2); int dbTag = this->getDbTag(); if (theChannel.recvVector(dbTag, cTag, data) < 0) { opserr << "HingeRadauBeamIntegration::recvSelf() - failed to receive Vector data\n"; return -1; } lpI = data(0); lpJ = data(1); return 0; } int HingeRadauBeamIntegration::setParameter(const char **argv, int argc, Parameter &param) { if (argc < 1) return -1; if (strcmp(argv[0],"lpI") == 0) { param.setValue(lpI); return param.addObject(1, this); } if (strcmp(argv[0],"lpJ") == 0) { param.setValue(lpJ); return param.addObject(2, this); } if (strcmp(argv[0],"lp") == 0) { param.setValue(lpI); return param.addObject(3, this); } return -1; } int HingeRadauBeamIntegration::updateParameter(int parameterID, Information &info) { switch (parameterID) { case 1: lpI = info.theDouble; return 0; case 2: lpJ = info.theDouble; return 0; case 3: lpI = lpJ = info.theDouble; return 0; default: return -1; } } int HingeRadauBeamIntegration::activateParameter(int paramID) { parameterID = paramID; return 0; } void HingeRadauBeamIntegration::Print(OPS_Stream &s, int flag) { if (flag == OPS_PRINT_PRINTMODEL_JSON) { s << "{\"type\": \"HingeRadau\", "; s << "\"lpI\": " << lpI << ", "; s << "\"lpJ\": " << lpJ << "}"; } else { s << "HingeRadau" << endln; s << " lpI = " << lpI; s << " lpJ = " << lpJ << endln; } } void HingeRadauBeamIntegration::getLocationsDeriv(int numSections, double L, double dLdh, double *dptsdh) { double oneOverL = 1.0/L; for (int i = 0; i < numSections; i++) dptsdh[i] = 0.0; //return; static const double oneRoot3 = 1.0/sqrt(3.0); if (parameterID == 1) { // lpI dptsdh[1] = 8.0/3*oneOverL; dptsdh[2] = 2.0*oneOverL*(1.0+oneRoot3); dptsdh[3] = 2.0*oneOverL*(1.0-oneRoot3); } if (parameterID == 2) { // lpJ dptsdh[2] = -2.0*oneOverL*(1.0-oneRoot3); dptsdh[3] = -2.0*oneOverL*(1.0+oneRoot3); dptsdh[4] = -8.0/3*oneOverL; } if (parameterID == 3) { // lpI and lpJ dptsdh[1] = 8.0/3*oneOverL; dptsdh[2] = 4.0*oneOverL*oneRoot3; dptsdh[3] = -4.0*oneOverL*oneRoot3; dptsdh[4] = -8.0/3*oneOverL; } return; if (dLdh != 0.0) { // STILL TO DO //opserr << "getPointsDeriv -- to do" << endln; } return; } void HingeRadauBeamIntegration::getWeightsDeriv(int numSections, double L, double dLdh, double *dwtsdh) { double oneOverL = 1.0/L; for (int i = 0; i < numSections; i++) dwtsdh[i] = 0.0; if (parameterID == 1) { // lpI dwtsdh[0] = oneOverL; dwtsdh[1] = 3*oneOverL; dwtsdh[2] = -2*oneOverL; dwtsdh[3] = -2*oneOverL; } if (parameterID == 2) { // lpJ dwtsdh[2] = -2*oneOverL; dwtsdh[3] = -2*oneOverL; dwtsdh[4] = 3*oneOverL; dwtsdh[5] = oneOverL; } if (parameterID == 3) { // lpI and lpJ dwtsdh[0] = oneOverL; dwtsdh[1] = 3*oneOverL; dwtsdh[2] = -4*oneOverL; dwtsdh[3] = -4*oneOverL; dwtsdh[4] = 3*oneOverL; dwtsdh[5] = oneOverL; } return; if (dLdh != 0.0) { dwtsdh[0] = -lpI*dLdh/(L*L); dwtsdh[1] = -3*lpI*dLdh/(L*L); dwtsdh[2] = 2*(lpI+lpJ)*dLdh/(L*L); dwtsdh[3] = 2*(lpI+lpJ)*dLdh/(L*L); dwtsdh[4] = -3*lpJ*dLdh/(L*L); dwtsdh[5] = -lpJ*dLdh/(L*L); } return; }
{ "pile_set_name": "Github" }
body { font: 28px; padding-left: 5px; padding-right: 5px; } form { padding-top: 10px; padding-bottom: 10px; }
{ "pile_set_name": "Github" }
.nh .TH "X86-RSQRTSS" "7" "May 2019" "TTMO" "Intel x86-64 ISA Manual" .SH NAME RSQRTSS - COMPUTE RECIPROCAL OF SQUARE ROOT OF SCALAR SINGLE-PRECISION FLOATING-POINT VALUE .TS allbox; l l l l l l l l l l . \fB\fCOpcode*/Instruction\fR \fB\fCOp/En\fR \fB\fC64/32 bit Mode Support\fR \fB\fCCPUID Feature Flag\fR \fB\fCDescription\fR F3 0F 52 /xmm2/m32 RM V/V SSE T{ Computes the approximate reciprocal of the square root of the low single\-precision floating\-point value in xmm1. T} T{ VEX.LIG.F3.0F.WIG 52 /r VRSQRTSS xmm1, xmm2, xmm3/m32 T} RVM V/V AVX T{ Computes the approximate reciprocal of the square root of the low single precision floating\-point value in xmm1 T} [ 127:32 ] \&. .TE .SH INSTRUCTION OPERAND ENCODING .TS allbox; l l l l l l l l l l . Op/En Operand 1 Operand 2 Operand 3 Operand 4 RM ModRM:reg (w) ModRM:r/m (r) NA NA RVM ModRM:reg (w) VEX.vvvv (r) ModRM:r/m (r) NA .TE .SH DESCRIPTION .PP Computes an approximate reciprocal of the square root of the low single\-precision floating\-point value in the source operand (second operand) stores the single\-precision floating\-point result in the destination operand. The source operand can be an XMM register or a 32\-bit memory location. The destination operand is an XMM register. The three high\-order doublewords of the destination operand remain unchanged. See Figure 10\-6 in the Intel® 64 and IA\-32 Architectures Software Developer’s Manual, Volume 1, for an illustration of a scalar single\-precision floating\-point operation. .PP The relative error for this approximation is: .PP |Relative Error| ≤ 1.5 ∗ 2−12 .PP The RSQRTSS instruction is not affected by the rounding control bits in the MXCSR register. When a source value is a 0.0, an ∞ of the sign of the source value is returned. A denormal source value is treated as a 0.0 (of the same sign). When a source value is a negative value (other than −0.0), a floating\-point indefinite is returned. When a source value is an SNaN or QNaN, the SNaN is converted to a QNaN or the source QNaN is returned. .PP In 64\-bit mode, using a REX prefix in the form of REX.R permits this instruction to access additional registers (XMM8\-XMM15). .PP 128\-bit Legacy SSE version: The first source operand and the destination operand are the same. Bits (MAXVL\-1:32) of the corresponding YMM destination register remain unchanged. .PP VEX.128 encoded version: Bits (MAXVL\-1:128) of the destination YMM register are zeroed. .SH OPERATION .SS RSQRTSS (128\-bit Legacy SSE version) .PP .RS .nf DEST[31:0] ← APPROXIMATE(1/SQRT(SRC2[31:0])) DEST[MAXVL\-1:32] (Unmodified) .fi .RE .SS VRSQRTSS (VEX.128 encoded version) .PP .RS .nf DEST[31:0] ← APPROXIMATE(1/SQRT(SRC2[31:0])) DEST[127:32] ← SRC1[127:32] DEST[MAXVL\-1:128] ← 0 .fi .RE .SH INTEL C/C++ COMPILER INTRINSIC EQUIVALENT .PP .RS .nf RSQRTSS: \_\_m128 \_mm\_rsqrt\_ss(\_\_m128 a) .fi .RE .SH SIMD FLOATING\-POINT EXCEPTIONS .PP None. .SH OTHER EXCEPTIONS .PP See Exceptions Type 5. .SH SEE ALSO .PP x86\-manpages(7) for a list of other x86\-64 man pages. .SH COLOPHON .PP This UNOFFICIAL, mechanically\-separated, non\-verified reference is provided for convenience, but it may be incomplete or broken in various obvious or non\-obvious ways. Refer to Intel® 64 and IA\-32 Architectures Software Developer’s Manual for anything serious. .br This page is generated by scripts; therefore may contain visual or semantical bugs. Please report them (or better, fix them) on https://github.com/ttmo-O/x86-manpages. .br MIT licensed by TTMO 2020 (Turkish Unofficial Chamber of Reverse Engineers - https://ttmo.re).
{ "pile_set_name": "Github" }
# SortRichEditor 支持图片文字混合编辑、排序的富文本编辑器 目前暂时支持的功能: - 支持图片文字混合添加、修改、删除 - 支持文字中间随意插入一张或多张图片 - 支持图片文字任意排序 # Preview 由于 gif 图片较大,网速不好的童鞋请耐心等待... 如果发现图片里面卡,那是还在缓冲。真实项目是很流畅的 <img src="preview/SortRichEditor.gif"/> # Usage 目前没有做很好的封装,如果需要使用SortRichEditor,请复制 editor 包中全部文件到您的项目当中 <br/> 以及以下三张图标资源文件 <br/> - icon_add_text.png (xhdpi) - icon_delete.png (xhdpi) - icon_empty_photo.png (xhdpi) copy完成后,在布局文件中使用 ``` <com.hitomi.sortricheditor.view.editor.SortRichEditor android:id="@+id/richEditor" android:layout_width="match_parent" android:layout_height="match_parent" android:background="#fff" /> ``` SortRichEditor不包含照片墙、选择照片插入照片、拍照插入等功能,SortRichEditor只提供可插入图片的方法 如果需要以上功能,可以参照本项目其他代码。以后会将这些功能组件封装在里面。 # TODO - [x] 图片压缩问题防止OOM - [x] 优化插入图片的速度 - [x] 优化软键盘的显示和隐藏 - [ ] 重构SortRichEditor类 # Thanks - [@xmuSistone][1] - [@张鸿洋][2] [1]: https://github.com/xmuSistone/android-animate-RichEditor [2]: https://github.com/hongyangAndroid
{ "pile_set_name": "Github" }
.pa 1 .he 'QUIT (II)'3/15/72'QUIT (II)' .ti 0 NAME quit -- turn off quit signal .s3 .ti 0 SYNOPSIS sys quit; flag / quit = 26. .s3 quit(flag) .s3 .ti 0 DESCRIPTION When flag____ is 0, this call disables quit signals from the typewriter (ASCII FS). When flag____ is non-zero, quits are re-enabled, and cause execution to cease and a core image to be produced. .s3 Quits should be turned off only with due consideration. .s3 .ti 0 SEE ALSO intr(II) .s3 .ti 0 DIAGNOSTICS -- .s3 .ti 0 BUGS --
{ "pile_set_name": "Github" }
{ "action": { "misuse": { "variety": [ "Data mishandling", "Privilege abuse" ], "vector": [ "Physical access" ] } }, "actor": { "internal": { "motive": [ "Financial" ], "variety": [ "End-user" ] } }, "asset": { "assets": [ { "amount": 1, "variety": "Unknown" } ], "cloud": [ "Unknown" ], "country": [ "US" ], "total_amount": 1 }, "attribute": { "confidentiality": { "data": [ { "amount": 1, "variety": "Personal" } ], "data_disclosure": "Yes", "data_total": 1, "data_victim": [ "Other" ], "state": [ "Stored" ] } }, "confidence": "Low", "discovery_method": { "unknown": true }, "incident_id": "0728b850-a926-11e7-a3c5-dbae2580635b", "plus": { "analysis_status": "Validated", "analyst": "Hylender", "attribute": { "confidentiality": { "credit_monitoring": "Unknown", "data_abuse": "Yes" } }, "created": "2017-10-04T22:59:40.654Z", "dbir_year": 2016, "github": "9907", "master_id": "6b41af51-d3c1-4957-a26b-e782065a7d21", "modified": "2017-10-27T00:33:44.101Z", "sub_source": "phidbr", "timeline": { "notification": { "day": 31, "month": 7, "year": 2015 } } }, "reference": "https://www.reviewjournal.com/crime/courts/las-vegas-lab-tech-gets-2-years-in-prison-for-identity-theft/", "schema_version": "1.3.4", "security_incident": "Confirmed", "source_id": "vcdb", "summary": "pediatric laboratory technician was sentenced Monday to two years in prison for stealing the identity of a patient\u2019s parent and using it to apply for credit cards.", "timeline": { "incident": { "day": 1, "month": 12, "year": 2014 } }, "victim": { "country": [ "US" ], "employee_count": "Unknown", "industry": "622110", "region": [ "019021" ], "state": "Nevada", "victim_id": "Children's Heart Center" } }
{ "pile_set_name": "Github" }
// Copyright (C) 2017 Jérôme Leclercq // This file is part of the "Nazara Engine - Utility module" // For conditions of distribution and use, see copyright notice in Config.hpp #pragma once #ifndef NAZARA_VERTEXBUFFER_HPP #define NAZARA_VERTEXBUFFER_HPP #include <Nazara/Prerequisites.hpp> #include <Nazara/Core/ObjectRef.hpp> #include <Nazara/Core/RefCounted.hpp> #include <Nazara/Core/Signal.hpp> #include <Nazara/Utility/Buffer.hpp> #include <Nazara/Utility/VertexDeclaration.hpp> namespace Nz { class VertexBuffer; using VertexBufferConstRef = ObjectRef<VertexBuffer>; using VertexBufferRef = ObjectRef<VertexBuffer>; class NAZARA_UTILITY_API VertexBuffer : public RefCounted { public: VertexBuffer() = default; VertexBuffer(VertexDeclarationConstRef vertexDeclaration, BufferRef buffer); VertexBuffer(VertexDeclarationConstRef vertexDeclaration, BufferRef buffer, UInt32 offset, UInt32 size); VertexBuffer(VertexDeclarationConstRef vertexDeclaration, UInt32 length, DataStorage storage, BufferUsageFlags usage); VertexBuffer(const VertexBuffer& vertexBuffer); VertexBuffer(VertexBuffer&&) = delete; ~VertexBuffer(); bool Fill(const void* data, UInt32 startVertex, UInt32 length); bool FillRaw(const void* data, UInt32 offset, UInt32 size); inline const BufferRef& GetBuffer() const; inline UInt32 GetEndOffset() const; inline UInt32 GetStartOffset() const; inline UInt32 GetStride() const; inline UInt32 GetVertexCount() const; inline const VertexDeclarationConstRef& GetVertexDeclaration() const; inline bool IsValid() const; void* Map(BufferAccess access, UInt32 startVertex = 0, UInt32 length = 0); void* Map(BufferAccess access, UInt32 startVertex = 0, UInt32 length = 0) const; void* MapRaw(BufferAccess access, UInt32 offset = 0, UInt32 size = 0); void* MapRaw(BufferAccess access, UInt32 offset = 0, UInt32 size = 0) const; void Reset(); void Reset(VertexDeclarationConstRef vertexDeclaration, BufferRef buffer); void Reset(VertexDeclarationConstRef vertexDeclaration, BufferRef buffer, UInt32 offset, UInt32 size); void Reset(VertexDeclarationConstRef vertexDeclaration, UInt32 length, DataStorage storage, BufferUsageFlags usage); void Reset(const VertexBuffer& vertexBuffer); void SetVertexDeclaration(VertexDeclarationConstRef vertexDeclaration); void Unmap() const; VertexBuffer& operator=(const VertexBuffer& vertexBuffer); VertexBuffer& operator=(VertexBuffer&&) = delete; template<typename... Args> static VertexBufferRef New(Args&&... args); // Signals: NazaraSignal(OnVertexBufferRelease, const VertexBuffer* /*vertexBuffer*/); private: BufferRef m_buffer; UInt32 m_endOffset; UInt32 m_startOffset; UInt32 m_vertexCount; VertexDeclarationConstRef m_vertexDeclaration; }; } #include <Nazara/Utility/VertexBuffer.inl> #endif // NAZARA_VERTEXBUFFER_HPP
{ "pile_set_name": "Github" }
/* -*-c++-*- */ /* osgEarth - Geospatial SDK for OpenSceneGraph * Copyright 2008-2012 Pelican Mapping * http://osgearth.org * * osgEarth is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/> */ #ifndef OSGEARTH_DATE_TIME_H #define OSGEARTH_DATE_TIME_H #include <osgEarth/Common> #include <ctime> #include <cstring> namespace osgEarth { /** Basic timestamp (seconds from the 1970 epoch) */ typedef ::time_t TimeStamp; /** Time span (in seconds) */ typedef long TimeSpan; /** * General-purpose UTC date/time object. * One second resolution, GMT time zone. */ class OSGEARTH_EXPORT DateTime { public: /** DateTime representing "now" */ DateTime(); /** DateTime copy */ DateTime(const DateTime& rhs); /** DateTime from a tm (in the local time zone) */ DateTime(const ::tm& tm); /** DateTime from UTC seconds since the epoch */ DateTime(TimeStamp utc); /** DateTime from year, month [1-12], date [1-31], hours [0-24) */ DateTime(int year, int month, int day, double hours); /** DateTime from year and fractional day-of-year [1..365] */ DateTime(int year, double dayOfYear); /** DateTime from an ISO 8601 string */ DateTime(const std::string& iso8601); /** As a date/time string in RFC 1123 format (e.g., HTTP) */ const std::string asRFC1123() const; /** As a date/time string in ISO 8601 format (lexigraphic order). */ const std::string asISO8601() const; /** As a date/time string in compact ISO 8601 format (lexigraphic * order with no delimiters). */ const std::string asCompactISO8601() const; /** Julian day (fractional) corresponding to this DateTime */ double getJulianDay() const; /** Seconds since Jan 1, 1970 00:00 UTC */ TimeStamp asTimeStamp() const { return _time_t; } /** Adds hours to return a new DateTime */ DateTime operator + (double hours) const; public: int year() const; int month() const; int day() const; double hours() const; protected: ::tm _tm; ::time_t _time_t; private: // since timegm is not cross-platform ::time_t timegm(const ::tm* tm) const; }; } // namespace osgEarth #endif // OSGEARTH_DATE_TIME_H
{ "pile_set_name": "Github" }
/*-----------------------------------------------------------------------------+ Copyright (c) 2010-2010: Joachim Faulhaber +------------------------------------------------------------------------------+ Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENCE.txt or copy at http://www.boost.org/LICENSE_1_0.txt) +-----------------------------------------------------------------------------*/ #ifndef BOOST_ICL_ASSOCIATIVE_INTERVAL_CONTAINER_HPP_JOFA_101023 #define BOOST_ICL_ASSOCIATIVE_INTERVAL_CONTAINER_HPP_JOFA_101023 #include <boost/icl/impl_config.hpp> #include <boost/icl/concept/comparable.hpp> #include <boost/icl/concept/joinable.hpp> #include <boost/icl/concept/container.hpp> #include <boost/icl/concept/interval_associator_base.hpp> #include <boost/icl/concept/interval_set.hpp> #include <boost/icl/concept/interval_map.hpp> #include <boost/icl/concept/interval_associator.hpp> #include <boost/icl/iterator.hpp> #endif
{ "pile_set_name": "Github" }
^D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\ALGORITHM.OBJ|D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\CHAPTER4.EXE.EMBED.MANIFEST.RES|D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\MAIN.OBJ|D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\SSTRING.OBJ C:\PROGRAMDATA\NVIDIA CORPORATION\DRS\NVDRSDB1.BIN C:\WINDOWS\GLOBALIZATION\SORTING\SORTDEFAULT.NLS C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\KERNEL32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\USER32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\GDI32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\WINSPOOL.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\COMDLG32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\ADVAPI32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\SHELL32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\OLE32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\OLEAUT32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\UUID.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\ODBC32.LIB C:\PROGRAM FILES (X86)\MICROSOFT SDKS\WINDOWS\V7.0A\LIB\ODBCCP32.LIB D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\CHAPTER4.EXE.EMBED.MANIFEST.RES D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\ALGORITHM.OBJ D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\MAIN.OBJ D:\DS_CODE\CHAPTER4\CHAPTER4\DEBUG\SSTRING.OBJ C:\WINDOWS\SYSTEM32\TZRES.DLL D:\DS_CODE\CHAPTER4\DEBUG\CHAPTER4.EXE D:\DS_CODE\CHAPTER4\DEBUG\CHAPTER4.PDB C:\PROGRAM FILES (X86)\MICROSOFT VISUAL STUDIO 10.0\VC\LIB\MSVCRTD.LIB C:\PROGRAM FILES (X86)\MICROSOFT VISUAL STUDIO 10.0\VC\LIB\OLDNAMES.LIB C:\PROGRAM FILES (X86)\MICROSOFT VISUAL STUDIO 10.0\VC\BIN\LINK.EXE C:\WINDOWS\SYSTEM32\RSAENH.DLL C:\PROGRAM FILES (X86)\MICROSOFT VISUAL STUDIO 10.0\VC\BIN\CVTRES.EXE
{ "pile_set_name": "Github" }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2017_10_01; import java.util.List; import com.microsoft.azure.SubResource; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; /** * Path rule of URL path map of an application gateway. */ @JsonFlatten public class ApplicationGatewayPathRule extends SubResource { /** * Path rules of URL path map. */ @JsonProperty(value = "properties.paths") private List<String> paths; /** * Backend address pool resource of URL path map path rule. */ @JsonProperty(value = "properties.backendAddressPool") private SubResource backendAddressPool; /** * Backend http settings resource of URL path map path rule. */ @JsonProperty(value = "properties.backendHttpSettings") private SubResource backendHttpSettings; /** * Redirect configuration resource of URL path map path rule. */ @JsonProperty(value = "properties.redirectConfiguration") private SubResource redirectConfiguration; /** * Path rule of URL path map resource. Possible values are: 'Updating', * 'Deleting', and 'Failed'. */ @JsonProperty(value = "properties.provisioningState") private String provisioningState; /** * Name of the resource that is unique within a resource group. This name * can be used to access the resource. */ @JsonProperty(value = "name") private String name; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag") private String etag; /** * Type of the resource. */ @JsonProperty(value = "type") private String type; /** * Get path rules of URL path map. * * @return the paths value */ public List<String> paths() { return this.paths; } /** * Set path rules of URL path map. * * @param paths the paths value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withPaths(List<String> paths) { this.paths = paths; return this; } /** * Get backend address pool resource of URL path map path rule. * * @return the backendAddressPool value */ public SubResource backendAddressPool() { return this.backendAddressPool; } /** * Set backend address pool resource of URL path map path rule. * * @param backendAddressPool the backendAddressPool value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withBackendAddressPool(SubResource backendAddressPool) { this.backendAddressPool = backendAddressPool; return this; } /** * Get backend http settings resource of URL path map path rule. * * @return the backendHttpSettings value */ public SubResource backendHttpSettings() { return this.backendHttpSettings; } /** * Set backend http settings resource of URL path map path rule. * * @param backendHttpSettings the backendHttpSettings value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withBackendHttpSettings(SubResource backendHttpSettings) { this.backendHttpSettings = backendHttpSettings; return this; } /** * Get redirect configuration resource of URL path map path rule. * * @return the redirectConfiguration value */ public SubResource redirectConfiguration() { return this.redirectConfiguration; } /** * Set redirect configuration resource of URL path map path rule. * * @param redirectConfiguration the redirectConfiguration value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withRedirectConfiguration(SubResource redirectConfiguration) { this.redirectConfiguration = redirectConfiguration; return this; } /** * Get path rule of URL path map resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. * * @return the provisioningState value */ public String provisioningState() { return this.provisioningState; } /** * Set path rule of URL path map resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. * * @param provisioningState the provisioningState value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withProvisioningState(String provisioningState) { this.provisioningState = provisioningState; return this; } /** * Get name of the resource that is unique within a resource group. This name can be used to access the resource. * * @return the name value */ public String name() { return this.name; } /** * Set name of the resource that is unique within a resource group. This name can be used to access the resource. * * @param name the name value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withName(String name) { this.name = name; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Set a unique read-only string that changes whenever the resource is updated. * * @param etag the etag value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withEtag(String etag) { this.etag = etag; return this; } /** * Get type of the resource. * * @return the type value */ public String type() { return this.type; } /** * Set type of the resource. * * @param type the type value to set * @return the ApplicationGatewayPathRule object itself. */ public ApplicationGatewayPathRule withType(String type) { this.type = type; return this; } }
{ "pile_set_name": "Github" }
{ "name": "php-mock/php-mock", "type": "library", "description": "PHP-Mock can mock built-in PHP functions (e.g. time()). PHP-Mock relies on PHP's namespace fallback policy. No further extension is needed.", "keywords": ["mock", "stub", "test double", "function", "test", "TDD", "BDD"], "homepage": "https://github.com/php-mock/php-mock", "license": "WTFPL", "authors": [ { "name": "Markus Malkusch", "email": "markus@malkusch.de", "homepage": "http://markus.malkusch.de", "role": "Developer" } ], "autoload": { "files": ["autoload.php"], "psr-4": { "phpmock\\": ["classes/", "tests/"] } }, "autoload-dev": { "files": ["tests/autoload.php"] }, "require": { "php": "^5.6 || ^7.0", "phpunit/php-text-template": "^1 || ^2" }, "require-dev": { "phpunit/phpunit": "^5.7 || ^6.5 || ^7.5 || ^8.0 || ^9.0" }, "replace": { "malkusch/php-mock": "*" }, "suggest": { "php-mock/php-mock-phpunit": "Allows integration into PHPUnit testcase with the trait PHPMock." }, "archive": { "exclude": ["/tests"] } }
{ "pile_set_name": "Github" }
#include <stdio.h> #include <sys/types.h> #include <sys/stat.h> #include <sys/ioctl.h> #include <fcntl.h> #include <errno.h> #include "open_device.h" #include "cmd_handler.h" #include "ms_debug.h" int ms_fd = -1; int start_test() { int result = -1; /* Handle signals */ if (register_signal_handlers() != 0) { printf("Failed to register signal handlers"); goto done; } TRACE("Registered signal handlers\n", 0); /* Open device */ ms_fd = open("/dev/maplesyrup", O_RDWR); if (ms_fd == -1) { printf("Failed to open device\n"); goto done; } TRACE("Device opened: fd: 0x%x\n", ms_fd); result = 0; done: return result; } void stop_test() { if (ms_fd == -1) { TRACE("No valid device fd found\n", 0); goto done; } TRACE("Closing device: 0x%x\n", ms_fd); close(ms_fd); ms_fd = -1; TRACE("Restoring signal handlers\n", 0); restore_signal_handlers(); done: return; }
{ "pile_set_name": "Github" }
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package resmap import ( "fmt" "github.com/pkg/errors" "sigs.k8s.io/kustomize/pkg/ifc" internal "sigs.k8s.io/kustomize/pkg/internal/error" "sigs.k8s.io/kustomize/pkg/resource" "sigs.k8s.io/kustomize/pkg/types" ) // Factory makes instances of ResMap. type Factory struct { resF *resource.Factory } // NewFactory returns a new resmap.Factory. func NewFactory(rf *resource.Factory) *Factory { return &Factory{resF: rf} } // RF returns a resource.Factory. func (rmF *Factory) RF() *resource.Factory { return rmF.resF } // FromFiles returns a ResMap given a resource path slice. func (rmF *Factory) FromFiles( loader ifc.Loader, paths []string) (ResMap, error) { var result []ResMap for _, path := range paths { content, err := loader.Load(path) if err != nil { return nil, errors.Wrap(err, "Load from path "+path+" failed") } res, err := rmF.NewResMapFromBytes(content) if err != nil { return nil, internal.Handler(err, path) } result = append(result, res) } return MergeWithErrorOnIdCollision(result...) } // newResMapFromBytes decodes a list of objects in byte array format. func (rmF *Factory) NewResMapFromBytes(b []byte) (ResMap, error) { resources, err := rmF.resF.SliceFromBytes(b) if err != nil { return nil, err } result := ResMap{} for _, res := range resources { id := res.Id() if _, found := result[id]; found { return result, fmt.Errorf("GroupVersionKindName: %#v already exists b the map", id) } result[id] = res } return result, nil } // NewResMapFromConfigMapArgs returns a Resource slice given // a configmap metadata slice from kustomization file. func (rmF *Factory) NewResMapFromConfigMapArgs(argList []types.ConfigMapArgs, options *types.GeneratorOptions) (ResMap, error) { var resources []*resource.Resource for _, args := range argList { res, err := rmF.resF.MakeConfigMap(&args, options) if err != nil { return nil, errors.Wrap(err, "NewResMapFromConfigMapArgs") } resources = append(resources, res) } return newResMapFromResourceSlice(resources) } // NewResMapFromSecretArgs takes a SecretArgs slice, generates // secrets from each entry, and accumulates them in a ResMap. func (rmF *Factory) NewResMapFromSecretArgs(argsList []types.SecretArgs, options *types.GeneratorOptions) (ResMap, error) { var resources []*resource.Resource for _, args := range argsList { res, err := rmF.resF.MakeSecret(&args, options) if err != nil { return nil, errors.Wrap(err, "NewResMapFromSecretArgs") } resources = append(resources, res) } return newResMapFromResourceSlice(resources) } // Set sets the loader for the underlying factory func (rmF *Factory) Set(ldr ifc.Loader) { rmF.resF.Set(ldr) } func newResMapFromResourceSlice(resources []*resource.Resource) (ResMap, error) { result := ResMap{} for _, res := range resources { id := res.Id() if _, found := result[id]; found { return nil, fmt.Errorf("duplicated %#v is not allowed", id) } result[id] = res } return result, nil }
{ "pile_set_name": "Github" }
<div class="spinner-wrapper col-xs-12"> <svg class="spinner-container" style="width:65px;height:65px" viewBox="0 0 44 44"> <circle class="path" cx="22" cy="22" r="20" fill="none" stroke-width="4"></circle> </svg> </div>
{ "pile_set_name": "Github" }
from __future__ import division from datetime import datetime from torch.autograd import Variable from model import * import argparse import sys from util import * import time import torch import random import logging import numpy as np import cPickle as pickle from datetime import timedelta from os.path import expanduser from torch.autograd import Variable from torchtext.vocab import load_word_vectors def prepare_data(pairs): batch_list = [] batch_index = 0 while batch_index < len(pairs): try: subset = pairs[batch_index:batch_index + batch_size] except: subset = pairs[batch_index:] tmp_a = np.array([len(item[0]) for item in subset]) tmp_b = np.array([len(item[1]) for item in subset]) batch_index2 = batch_index + min(len(np.where(tmp_a == tmp_a[0])[0]), len(np.where(tmp_b == tmp_b[0])[0])) batch_list.append([batch_index, batch_index2]) batch_index = batch_index2 return batch_list def create_batch(data,from_index, to_index): if to_index>len(data): to_index=len(data) lsize=0 rsize=0 lsize_list=[] rsize_list=[] for i in range(from_index, to_index): length=len(data[i][0])+2 lsize_list.append(length) if length>lsize: lsize=length length=len(data[i][1])+2 rsize_list.append(length) if length>rsize: rsize=length #lsize+=1 #rsize+=1 lsent = data[from_index][0] lsent = ['bos']+lsent + ['oov' for k in range(lsize -1 - len(lsent))] #print(lsent) left_sents = torch.cat((dict[word].view(1, -1) for word in lsent)) left_sents = torch.unsqueeze(left_sents,0) rsent = data[from_index][1] rsent = ['bos']+rsent + ['oov' for k in range(rsize -1 - len(rsent))] #print(rsent) right_sents = torch.cat((dict[word].view(1, -1) for word in rsent)) right_sents = torch.unsqueeze(right_sents,0) labels=[data[from_index][2]] for i in range(from_index+1, to_index): lsent=data[i][0] lsent=['bos']+lsent+['oov' for k in range(lsize -1 - len(lsent))] #print(lsent) left_sent = torch.cat((dict[word].view(1,-1) for word in lsent)) left_sent = torch.unsqueeze(left_sent, 0) left_sents = torch.cat([left_sents, left_sent]) rsent=data[i][1] rsent=['bos']+rsent+['oov' for k in range(rsize -1 - len(rsent))] #print(rsent) right_sent = torch.cat((dict[word].view(1,-1) for word in rsent)) right_sent = torch.unsqueeze(right_sent, 0) right_sents = torch.cat((right_sents, right_sent)) labels.append(data[i][2]) left_sents=Variable(left_sents) right_sents=Variable(right_sents) if task=='sts': labels=Variable(torch.Tensor(labels)) else: labels=Variable(torch.LongTensor(labels)) lsize_list=Variable(torch.LongTensor(lsize_list)) rsize_list = Variable(torch.LongTensor(rsize_list)) if torch.cuda.is_available(): left_sents=left_sents.cuda() right_sents=right_sents.cuda() labels=labels.cuda() lsize_list=lsize_list.cuda() rsize_list=rsize_list.cuda() #print(left_sents) #print(right_sents) #print(labels) return left_sents, right_sents, labels, lsize_list, rsize_list if __name__ == '__main__': task='mnli' print('task: '+task) print('model: DecAtt') torch.manual_seed(123456) EMBEDDING_DIM = 300 PROJECTED_EMBEDDING_DIM = 300 parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--e', dest='num_epochs', default=5000, type=int, help='Number of epochs') parser.add_argument('--b', dest='batch_size', default=32, help='Batch size', type=int) parser.add_argument('--u', dest='num_units', help='Number of hidden units', default=100, type=int) parser.add_argument('--r', help='Learning rate', type=float, default=0.05, dest='rate') parser.add_argument('--lower', help='Lowercase the corpus', default=True, action='store_true') parser.add_argument('--model', help='Model selection', default='DecAtt', type=str) parser.add_argument('--optim', help='Optimizer algorithm', default='adagrad', choices=['adagrad', 'adadelta', 'adam']) parser.add_argument('--max_grad_norm', help='If the norm of the gradient vector exceeds this renormalize it\ to have the norm equal to max_grad_norm', type=float, default=5) if task=='snli': num_class=3 parser.add_argument('--train', help='JSONL or TSV file with training corpus', default='/data/snli_1.0_train.jsonl') parser.add_argument('--dev', help='JSONL or TSV file with development corpus', default='/data/snli_1.0_dev.jsonl') parser.add_argument('--test', help='JSONL or TSV file with testing corpus', default='/data/snli_1.0_test.jsonl') if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DecAtt/data/snli' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DecAtt/data/snli' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = pickle.load(open(basepath + "/train_pairs.p", "rb")) dev_pairs = pickle.load(open(basepath + "/dev_pairs.p", "rb")) test_pairs = pickle.load(open(basepath + "/test_pairs.p", "rb")) elif task=='mnli': num_class = 3 parser.add_argument('--train', help='JSONL or TSV file with training corpus', default='/data/mnli/multinli_1.0_train.jsonl') parser.add_argument('--dev_m', help='JSONL or TSV file with development corpus', default='/data/mnli/multinli_1.0_dev_matched.jsonl') parser.add_argument('--dev_um', help='JSONL or TSV file with development corpus', default='/data/mnli/multinli_1.0_dev_mismatched.jsonl') parser.add_argument('--test_m', help='JSONL or TSV file with testing corpus', default='/data/mnli/multinli_1.0_test_matched.jsonl') parser.add_argument('--test_um', help='JSONL or TSV file with testing corpus', default='/data/mnli/multinli_1.0_test_mismatched.jsonl') if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DecAtt/data/mnli' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DecAtt/data/mnli' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' ''' train_pairs = util.read_corpus(expanduser("~")+'/Documents/research/pytorch/DeepPairWiseWord' + args.train, True) dev_pairs_m = util.read_corpus(expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord' + args.dev_m, True) dev_pairs_um = util.read_corpus(expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord' + args.dev_um, True) test_pairs_m = util.read_corpus(expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord' + args.test_m, True) test_pairs_um = util.read_corpus(expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord' + args.test_um, True) pickle.dump(train_pairs, open("data/mnli/train_pairs.p", "wb")) pickle.dump(dev_pairs_m, open("data/mnli/dev_pairs_m.p", "wb")) pickle.dump(dev_pairs_um, open("data/mnli/dev_pairs_um.p", "wb")) pickle.dump(test_pairs_m, open("data/mnli/test_pairs_m.p", "wb")) pickle.dump(test_pairs_um, open("data/mnli/test_pairs_um.p", "wb")) ''' train_pairs = pickle.load(open(basepath + "/train_pairs.p", "rb")) dev_pairs_m = pickle.load(open(basepath + "/dev_pairs_m.p", "rb")) dev_pairs_um = pickle.load(open(basepath + "/dev_pairs_um.p", "rb")) test_pairs_m = pickle.load(open(basepath + "/test_pairs_m.p", "rb")) test_pairs_um = pickle.load(open(basepath + "/test_pairs_um.p", "rb")) elif task=='quora': num_class = 2 if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DeepPairWiseWord/data/quora' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/data/quora' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = readQuoradata(basepath+'/train/') dev_pairs=readQuoradata(basepath+'/dev/') test_pairs=readQuoradata(basepath+'/test/') elif task=='url': num_class = 2 if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DeepPairWiseWord/data/url' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/data/url' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = readQuoradata(basepath + '/train/') dev_pairs = None#readQuoradata(basepath + '/dev/') test_pairs = readQuoradata(basepath + '/test_9324/') if dev_pairs==None: dev_pairs=test_pairs elif task=='pit': num_class = 2 if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DeepPairWiseWord/data/pit' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/data/pit' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = readQuoradata(basepath + '/train/') test_pairs = readQuoradata(basepath + '/test/') dev_pairs=test_pairs elif task=='sts': num_class = 6 if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DeepPairWiseWord/data/sts' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/data/sts' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = readSTSdata(basepath + '/train/') test_pairs = readSTSdata(basepath + '/test/') dev_pairs=test_pairs elif task=='wikiqa': num_class=2 if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DeepPairWiseWord/data/wikiqa' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/data/wikiqa' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = readQuoradata(basepath + '/train/') dev_pairs = readQuoradata(basepath + '/dev/') test_pairs = readQuoradata(basepath + '/test/') elif task=='trecqa': num_class = 2 if torch.cuda.is_available(): print('CUDA is available!') basepath = expanduser("~") + '/pytorch/DeepPairWiseWord/data/trecqa' embedding_path = expanduser("~") + '/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' else: basepath = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/data/trecqa' embedding_path = expanduser("~") + '/Documents/research/pytorch/DeepPairWiseWord/VDPWI-NN-Torch/data/glove' train_pairs = readQuoradata(basepath + '/train-all/') dev_pairs = readQuoradata(basepath + '/raw-dev/') test_pairs = readQuoradata(basepath + '/raw-test/') #sys.exit() args = parser.parse_args() #print('Model: %s' % args.model) print('Read data ...') print('Number of training pairs: %d' % len(train_pairs)) print('Number of development m pairs: %d' % len(dev_pairs_m)) print('Number of development um pairs: %d' % len(dev_pairs_um)) print('Number of testing m pairs: %d' % len(test_pairs_m)) print('Number of testing um pairs: %d' % len(test_pairs_um)) batch_size = args.batch_size num_epochs = args.num_epochs tokens = [] dict={} word2id={} vocab=set() for pair in train_pairs: left=pair[0] right=pair[1] vocab |= set(left) vocab |= set(right) for pair in dev_pairs_m: left=pair[0] right=pair[1] vocab |= set(left) vocab |= set(right) for pair in dev_pairs_um: left=pair[0] right=pair[1] vocab |= set(left) vocab |= set(right) for pair in test_pairs_m: left=pair[0] right=pair[1] vocab |= set(left) vocab |= set(right) for pair in test_pairs_um: left=pair[0] right=pair[1] vocab |= set(left) vocab |= set(right) tokens=list(vocab) #for line in open(basepath + '/vocab.txt'): # tokens.append(line.strip().decode('utf-8')) wv_dict, wv_arr, wv_size = load_word_vectors(embedding_path, 'glove.840B', EMBEDDING_DIM) #embedding = [] tokens.append('oov') tokens.append('bos') #embedding.append(dict[word].numpy()) #print(len(embedding)) #np.save('embedding',np.array(embedding)) #sys.exit() pretrained_emb = np.zeros(shape=(len(tokens), EMBEDDING_DIM)) oov={} for id in range(100): oov[id]=torch.normal(torch.zeros(EMBEDDING_DIM),std=1) id=0 for word in tokens: try: dict[word] = wv_arr[wv_dict[word]]/torch.norm(wv_arr[wv_dict[word]]) except: #if args.model=='DecAtt': # dict[word]=oov[np.random.randint(100)] #else: dict[word] = torch.normal(torch.zeros(EMBEDDING_DIM),std=1) word2id[word]=id pretrained_emb[id] = dict[word].numpy() id+=1 if task=='sts': criterion = nn.KLDivLoss() else: criterion = torch.nn.NLLLoss(size_average=True) #criterion = torch.nn.CrossEntropyLoss() model=DecAtt(200,num_class,len(tokens),EMBEDDING_DIM, PROJECTED_EMBEDDING_DIM, pretrained_emb) if torch.cuda.is_available(): model = model.cuda() criterion = criterion.cuda() optimizer = torch.optim.Adagrad(model.parameters(), lr=0.05, weight_decay=5e-5) print('Start training...') batch_counter = 0 best_dev_acc_m = 0 best_dev_acc_um = 0 accumulated_loss=0 report_interval = 1000 model.train() train_pairs=np.array(train_pairs) rand_idx = np.random.permutation(len(train_pairs)) train_pairs = train_pairs[rand_idx] both_lengths = np.array([(len(train_pairs[i][0]), len(train_pairs[i][1])) for i in range(len(train_pairs))], dtype={'names': ['x', 'y'], 'formats': ['i4', 'i4']}) sorted_lengths = np.argsort(both_lengths, order=('x', 'y')) train_pairs = train_pairs[sorted_lengths] train_batch_list=prepare_data(train_pairs) batch_index=0 for epoch in range(num_epochs): batch_counter=0 accumulated_loss = 0 model.train() print('--' * 20) start_time = time.time() train_rand_i=np.random.permutation(len(train_batch_list)) train_batch_i=0 train_sents_scaned=0 train_num_correct=0 while train_batch_i<len(train_batch_list): batch_index, batch_index2=train_batch_list[train_rand_i[train_batch_i]] train_batch_i+=1 #batch_index2=batch_index+batch_size left_sents, right_sents, labels, lsize_list, rsize_list = create_batch(train_pairs, batch_index, batch_index2) train_sents_scaned+=len(labels) #print(lsize_list) #print(rsize_list) #batch_index=batch_index2 optimizer.zero_grad() output = model(left_sents, right_sents, lsize_list, rsize_list) result = output.data.cpu().numpy() a = np.argmax(result, axis=1) b = labels.data.cpu().numpy() train_num_correct += np.sum(a == b) #print('forward finished'+str(datetime.now())) #print(output) #print(labels) #sys.exit() loss = criterion(output, labels) loss.backward() '''''' grad_norm = 0. #para_norm = 0. for m in model.modules(): if isinstance(m, nn.Linear): # print(m) grad_norm += m.weight.grad.data.norm() ** 2 #para_norm += m.weight.data.norm() ** 2 if m.bias is not None: grad_norm += m.bias.grad.data.norm() ** 2 #para_norm += m.bias.data.norm() ** 2 grad_norm ** 0.5 #para_norm ** 0.5 try: shrinkage = args.max_grad_norm / grad_norm except: pass if shrinkage < 1: for m in model.modules(): # print m if isinstance(m, nn.Linear): m.weight.grad.data = m.weight.grad.data * shrinkage if m.bias is not None: m.bias.grad.data = m.bias.grad.data * shrinkage '''''' optimizer.step() #print('backword finished' + str(datetime.now())) batch_counter += 1 #print(batch_counter, loss.data[0]) accumulated_loss += loss.data[0] if batch_counter % report_interval ==0: msg = '%d completed epochs, %d batches' % (epoch, batch_counter) msg += '\t train batch loss: %f' % (accumulated_loss/train_sents_scaned) msg += '\t train accuracy: %f' % (train_num_correct/train_sents_scaned) print(msg) # valid after each epoch model.eval() dev_batch_index=0 dev_num_correct=0 msg = '%d completed epochs, %d batches' % (epoch, batch_counter) accumulated_loss=0 pred=[] while dev_batch_index<len(dev_pairs_m): left_sents, right_sents, labels, lsize_list, rsize_list = create_batch(dev_pairs_m, dev_batch_index, dev_batch_index+1) dev_batch_index += 1 output = model(left_sents, right_sents, lsize_list, rsize_list) result=np.exp(output.data.cpu().numpy()) loss = criterion(output, labels) accumulated_loss += loss.data[0] a = np.argmax(result, axis=1) b = labels.data.cpu().numpy() dev_num_correct+=np.sum(a == b) pred.extend(result) msg += '\t dev_m loss: %f' % accumulated_loss dev_acc=dev_num_correct/len(dev_pairs_m) msg += '\t dev_m accuracy: %f' % dev_acc print(msg) if dev_acc > best_dev_acc_m: best_dev_acc_m=dev_acc with open(basepath + '/prob_DecAtt_' + task+'_m', 'w') as f: for item in pred: f.writelines(str(item[0]) + '\t' + str(item[1]) + '\t' + str(item[2]) + '\n') test_batch_index = 0 pred=[] while test_batch_index < len(test_pairs_m): left_sents, right_sents, labels, lsize_list, rsize_list = create_batch(test_pairs_m, test_batch_index, test_batch_index+1) test_batch_index+=1 output = model(left_sents, right_sents, lsize_list, rsize_list) result = output.data.cpu().numpy() a = np.argmax(result, axis=1) pred.extend(a) with open(basepath + '/sub_m.csv', 'w+') as f: index = ['neutral','contradiction','entailment'] f.write("pairID,gold_label\n") for i, k in enumerate(pred): f.write(str(i + 9847) + "," + index[k] + "\n") #torch.save(model, basepath + '/model_DecAtt_m' + '.pkl') dev_batch_index=0 dev_num_correct=0 msg = '%d completed epochs, %d batches' % (epoch, batch_counter) accumulated_loss=0 pred=[] while dev_batch_index<len(dev_pairs_um): left_sents, right_sents, labels, lsize_list, rsize_list = create_batch(dev_pairs_um, dev_batch_index, dev_batch_index+1) dev_batch_index+=1 #left_sents, right_sents, labels = create_batch(dev_pairs, 0, len(dev_pairs)) output = model(left_sents, right_sents, lsize_list, rsize_list) result=np.exp(output.data.cpu().numpy()) loss = criterion(output, labels) accumulated_loss += loss.data[0] a = np.argmax(result, axis=1) b = labels.data.cpu().numpy() dev_num_correct+=np.sum(a == b) pred.extend(result) msg += '\t dev_um loss: %f' % accumulated_loss dev_acc=dev_num_correct/len(dev_pairs_um) msg += '\t dev_um accuracy: %f' % dev_acc print(msg) if dev_acc > best_dev_acc_um: best_dev_acc_um=dev_acc with open(basepath + '/prob_DecAtt_' + task+'_um', 'w') as f: for item in pred: f.writelines(str(item[0]) + '\t' + str(item[1]) + '\t' + str(item[2]) + '\n') test_batch_index = 0 pred=[] while test_batch_index < len(test_pairs_um): left_sents, right_sents, labels, lsize_list, rsize_list = create_batch(test_pairs_um, test_batch_index, test_batch_index+1) test_batch_index+=1 output = model(left_sents, right_sents, lsize_list, rsize_list) result = output.data.cpu().numpy() a = np.argmax(result, axis=1) pred.extend(a) with open(basepath + '/sub_um.csv', 'w+') as f: index = ['neutral','contradiction','entailment'] f.write("pairID,gold_label\n") for i, k in enumerate(pred): f.write(str(i) + "," + index[k] + "\n") #torch.save(model, basepath + '/model_DecAtt_um' + '.pkl') elapsed_time = time.time() - start_time print('Epoch ' + str(epoch) + ' finished within ' + str(timedelta(seconds=elapsed_time)))
{ "pile_set_name": "Github" }
#Wed May 29 15:32:16 MSK 2019 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip
{ "pile_set_name": "Github" }
/** * Copyright 2010-present Facebook. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook; /** * <p> * Identifies the state of a Session. * </p> * <p> * Session objects implement a state machine that controls their lifecycle. This * enum represents the states of the state machine. * </p> */ public enum SessionState { /** * Indicates that the Session has not yet been opened and has no cached * token. Opening a Session in this state will involve user interaction. */ CREATED(Category.CREATED_CATEGORY), /** * <p> * Indicates that the Session has not yet been opened and has a cached * token. Opening a Session in this state will not involve user interaction. * </p> * <p> * If you are using Session from an Android Service, you must provide a * TokenCachingStrategy implementation that contains a valid token to the Session * constructor. The resulting Session will be created in this state, and you * can then safely call open, passing null for the Activity. * </p> */ CREATED_TOKEN_LOADED(Category.CREATED_CATEGORY), /** * Indicates that the Session is in the process of opening. */ OPENING(Category.CREATED_CATEGORY), /** * Indicates that the Session is opened. In this state, the Session may be * used with a {@link Request}. */ OPENED(Category.OPENED_CATEGORY), /** * <p> * Indicates that the Session is opened and that the token has changed. In * this state, the Session may be used with {@link Request}. * </p> * <p> * Every time the token is updated, {@link Session.StatusCallback * StatusCallback} is called with this value. * </p> */ OPENED_TOKEN_UPDATED(Category.OPENED_CATEGORY), /** * Indicates that the Session is closed, and that it was not closed * normally. Typically this means that the open call failed, and the * Exception parameter to {@link Session.StatusCallback StatusCallback} will * be non-null. */ CLOSED_LOGIN_FAILED(Category.CLOSED_CATEGORY), /** * Indicates that the Session was closed normally. */ CLOSED(Category.CLOSED_CATEGORY); private final Category category; SessionState(Category category) { this.category = category; } /** * Returns a boolean indicating whether the state represents a successfully * opened state in which the Session can be used with a {@link Request}. * * @return a boolean indicating whether the state represents a successfully * opened state in which the Session can be used with a * {@link Request}. */ public boolean isOpened() { return this.category == Category.OPENED_CATEGORY; } /** * Returns a boolean indicating whether the state represents a closed * Session that can no longer be used with a {@link Request}. * * @return a boolean indicating whether the state represents a closed * Session that can no longer be used with a {@link Request}. */ public boolean isClosed() { return this.category == Category.CLOSED_CATEGORY; } private enum Category { CREATED_CATEGORY, OPENED_CATEGORY, CLOSED_CATEGORY } }
{ "pile_set_name": "Github" }
var formatDistanceLocale = { lessThanXSeconds: { one: 'bir saniyeden az', other: '{{count}} saniyeden az' }, xSeconds: { one: '1 saniye', other: '{{count}} saniye' }, halfAMinute: 'yarım dakika', lessThanXMinutes: { one: 'bir dakikadan az', other: '{{count}} dakikadan az' }, xMinutes: { one: '1 dakika', other: '{{count}} dakika' }, aboutXHours: { one: 'yaklaşık 1 saat', other: 'yaklaşık {{count}} saat' }, xHours: { one: '1 saat', other: '{{count}} saat' }, xDays: { one: '1 gün', other: '{{count}} gün' }, aboutXWeeks: { one: 'yaklaşık 1 hafta', other: 'yaklaşık {{count}} hafta' }, xWeeks: { one: '1 hafta', other: '{{count}} hafta' }, aboutXMonths: { one: 'yaklaşık 1 ay', other: 'yaklaşık {{count}} ay' }, xMonths: { one: '1 ay', other: '{{count}} ay' }, aboutXYears: { one: 'yaklaşık 1 yıl', other: 'yaklaşık {{count}} yıl' }, xYears: { one: '1 yıl', other: '{{count}} yıl' }, overXYears: { one: '1 yıldan fazla', other: '{{count}} yıldan fazla' }, almostXYears: { one: 'neredeyse 1 yıl', other: 'neredeyse {{count}} yıl' } } export default function formatDistance(token, count, options) { options = options || {} var result if (typeof formatDistanceLocale[token] === 'string') { result = formatDistanceLocale[token] } else if (count === 1) { result = formatDistanceLocale[token].one } else { result = formatDistanceLocale[token].other.replace('{{count}}', count) } if (options.addSuffix) { if (options.comparison > 0) { return result + ' sonra' } else { return result + ' önce' } } return result }
{ "pile_set_name": "Github" }
using System; using Glass.Mapper.Sc.DataMappers; using NUnit.Framework; using Sitecore.Data; namespace Glass.Mapper.Sc.FakeDb.DataMappers { [TestFixture] public class SitecoreFieldBooleanMapperFixture : AbstractMapperFixture { #region Method - GetField [Test] public void GetField_FieldValueZero_ReturnsFalse() { //Assign var fieldName = "Field"; var fieldValue = "0"; var fieldId = Guid.NewGuid(); var item = Helpers.CreateFakeItem(fieldId, fieldValue); var field = item.Fields[new ID(fieldId)]; var mapper = new SitecoreFieldBooleanMapper(); //Act var result = mapper.GetField(field, null, null); //Assert Assert.AreEqual(false, result); } [Test] public void GetField_FieldValueStringEmpty_ReturnsFalse() { //Assign var fieldName = "Field"; var fieldValue = string.Empty; var fieldId = Guid.NewGuid(); var item = Helpers.CreateFakeItem(fieldId, fieldValue); var field = item.Fields[new ID(fieldId)]; var mapper = new SitecoreFieldBooleanMapper(); //Act var result = mapper.GetField(field, null, null); //Assert Assert.AreEqual(false, result); } [Test] public void GetField_FieldValueOne_ReturnsTrue() { //Assign var fieldName = "Field"; var fieldValue = "1"; var fieldId = Guid.NewGuid(); var item = Helpers.CreateFakeItem(fieldId, fieldValue); var field = item.Fields[new ID(fieldId)]; var mapper = new SitecoreFieldBooleanMapper(); //Act var result = mapper.GetField(field, null, null); //Assert Assert.AreEqual(true, result); } [Test] public void GetField_FieldValueRandom_ReturnsFalse() { //Assign var fieldName = "Field"; var fieldValue = "afaegaeg"; var fieldId = Guid.NewGuid(); var item = Helpers.CreateFakeItem(fieldId, fieldValue); var field = item.Fields[new ID(fieldId)]; var mapper = new SitecoreFieldBooleanMapper(); //Act var result = mapper.GetField(field, null, null); //Assert Assert.AreEqual(false, result); } #endregion #region Method - SetField [Test] public void SetField_ValueFalse_FieldSetToZero() { //Assign var fieldName = "Field"; var expected = "0"; var mapper = new SitecoreFieldBooleanMapper(); var objectValue = false; var fieldId = Guid.NewGuid(); var item = Helpers.CreateFakeItem(fieldId, string.Empty); var field = item.Fields[new ID(fieldId)]; item.Editing.BeginEdit(); //Act mapper.SetField(field, objectValue, null, null); //Assert Assert.AreEqual(expected, field.Value); } [Test] public void SetField_ValueTrue_FieldSetToOne() { //Assign var fieldName = "Field"; var expected = "1"; var mapper = new SitecoreFieldBooleanMapper(); var objectValue = true; var fieldId = Guid.NewGuid(); var item = Helpers.CreateFakeItem(fieldId, string.Empty); var field = item.Fields[new ID(fieldId)]; item.Editing.BeginEdit(); //Act mapper.SetField(field, objectValue, null, null); //Assert Assert.AreEqual(expected, field.Value); } #endregion } }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2014, The Linux Foundation. All rights reserved. * * This software is licensed under the terms of the GNU General Public * License version 2, as published by the Free Software Foundation, and * may be copied, distributed, and modified under those terms. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ #include <linux/kernel.h> #include <linux/bitops.h> #include <linux/regmap.h> #include <linux/export.h> #include "clk-regmap-mux.h" static inline struct clk_regmap_mux *to_clk_regmap_mux(struct clk_hw *hw) { return container_of(to_clk_regmap(hw), struct clk_regmap_mux, clkr); } static u8 mux_get_parent(struct clk_hw *hw) { struct clk_regmap_mux *mux = to_clk_regmap_mux(hw); struct clk_regmap *clkr = to_clk_regmap(hw); unsigned int mask = GENMASK(mux->width - 1, 0); unsigned int val; regmap_read(clkr->regmap, mux->reg, &val); val >>= mux->shift; val &= mask; return val; } static int mux_set_parent(struct clk_hw *hw, u8 index) { struct clk_regmap_mux *mux = to_clk_regmap_mux(hw); struct clk_regmap *clkr = to_clk_regmap(hw); unsigned int mask = GENMASK(mux->width + mux->shift - 1, mux->shift); unsigned int val; val = index; val <<= mux->shift; return regmap_update_bits(clkr->regmap, mux->reg, mask, val); } const struct clk_ops clk_regmap_mux_closest_ops = { .get_parent = mux_get_parent, .set_parent = mux_set_parent, .determine_rate = __clk_mux_determine_rate_closest, }; EXPORT_SYMBOL_GPL(clk_regmap_mux_closest_ops);
{ "pile_set_name": "Github" }
// RUN: %clang_analyze_cc1 -triple i386-apple-darwin10 -DI386 -analyzer-checker=core,debug.ExprInspection -fobjc-arc -analyzer-config c++-inlining=constructors -Wno-null-dereference -std=c++11 -verify -analyzer-config eagerly-assume=false %s // RUN: %clang_analyze_cc1 -triple i386-apple-darwin10 -DI386 -analyzer-checker=core,debug.ExprInspection -fobjc-arc -analyzer-config c++-inlining=constructors -Wno-null-dereference -std=c++11 -verify -DTEST_INLINABLE_ALLOCATORS -analyzer-config eagerly-assume=false %s // RUN: %clang_analyze_cc1 -triple x86_64-apple-darwin12 -analyzer-checker=core,debug.ExprInspection -fobjc-arc -analyzer-config c++-inlining=constructors -Wno-null-dereference -std=c++11 -verify -analyzer-config eagerly-assume=false %s // RUN: %clang_analyze_cc1 -triple x86_64-apple-darwin12 -analyzer-checker=core,debug.ExprInspection -fobjc-arc -analyzer-config c++-inlining=constructors -Wno-null-dereference -std=c++11 -verify -DTEST_INLINABLE_ALLOCATORS -analyzer-config eagerly-assume=false %s #include "Inputs/system-header-simulator-cxx.h" void clang_analyzer_eval(bool); void clang_analyzer_checkInlined(bool); // A simplified version of std::move. template <typename T> T &&move(T &obj) { return static_cast<T &&>(obj); } struct Wrapper { __strong id obj; }; void test() { Wrapper w; // force a diagnostic *(char *)0 = 1; // expected-warning{{Dereference of null pointer}} } struct IntWrapper { int x; }; void testCopyConstructor() { IntWrapper a; a.x = 42; IntWrapper b(a); clang_analyzer_eval(b.x == 42); // expected-warning{{TRUE}} } struct NonPODIntWrapper { int x; virtual int get(); }; void testNonPODCopyConstructor() { NonPODIntWrapper a; a.x = 42; NonPODIntWrapper b(a); clang_analyzer_eval(b.x == 42); // expected-warning{{TRUE}} } namespace ConstructorVirtualCalls { class A { public: int *out1, *out2, *out3; virtual int get() { return 1; } A(int *out1) { *out1 = get(); } }; class B : public A { public: virtual int get() { return 2; } B(int *out1, int *out2) : A(out1) { *out2 = get(); } }; class C : public B { public: virtual int get() { return 3; } C(int *out1, int *out2, int *out3) : B(out1, out2) { *out3 = get(); } }; void test() { int a, b, c; C obj(&a, &b, &c); clang_analyzer_eval(a == 1); // expected-warning{{TRUE}} clang_analyzer_eval(b == 2); // expected-warning{{TRUE}} clang_analyzer_eval(c == 3); // expected-warning{{TRUE}} clang_analyzer_eval(obj.get() == 3); // expected-warning{{TRUE}} // Sanity check for devirtualization. A *base = &obj; clang_analyzer_eval(base->get() == 3); // expected-warning{{TRUE}} } } namespace TemporaryConstructor { class BoolWrapper { public: BoolWrapper() { clang_analyzer_checkInlined(true); // expected-warning{{TRUE}} value = true; } bool value; }; void test() { // PR13717 - Don't crash when a CXXTemporaryObjectExpr is inlined. if (BoolWrapper().value) return; } } namespace ConstructorUsedAsRValue { using TemporaryConstructor::BoolWrapper; bool extractValue(BoolWrapper b) { return b.value; } void test() { bool result = extractValue(BoolWrapper()); clang_analyzer_eval(result); // expected-warning{{TRUE}} } } namespace PODUninitialized { class POD { public: int x, y; }; class PODWrapper { public: POD p; }; class NonPOD { public: int x, y; NonPOD() {} NonPOD(const NonPOD &Other) : x(Other.x), y(Other.y) // expected-warning {{undefined}} { } NonPOD(NonPOD &&Other) : x(Other.x), y(Other.y) // expected-warning {{undefined}} { } NonPOD &operator=(const NonPOD &Other) { x = Other.x; y = Other.y; // expected-warning {{undefined}} return *this; } NonPOD &operator=(NonPOD &&Other) { x = Other.x; y = Other.y; // expected-warning {{undefined}} return *this; } }; class NonPODWrapper { public: class Inner { public: int x, y; Inner() {} Inner(const Inner &Other) : x(Other.x), y(Other.y) // expected-warning {{undefined}} { } Inner(Inner &&Other) : x(Other.x), y(Other.y) // expected-warning {{undefined}} { } Inner &operator=(const Inner &Other) { x = Other.x; // expected-warning {{undefined}} y = Other.y; return *this; } Inner &operator=(Inner &&Other) { x = Other.x; // expected-warning {{undefined}} y = Other.y; return *this; } }; Inner p; }; void testPOD(const POD &pp) { POD p; p.x = 1; POD p2 = p; // no-warning clang_analyzer_eval(p2.x == 1); // expected-warning{{TRUE}} POD p3 = move(p); // no-warning clang_analyzer_eval(p3.x == 1); // expected-warning{{TRUE}} // Use rvalues as well. clang_analyzer_eval(POD(p3).x == 1); // expected-warning{{TRUE}} // Copy from symbolic references correctly. POD p4 = pp; // Make sure that p4.x contains a symbol after copy. if (p4.x > 0) clang_analyzer_eval(p4.x > 0); // expected-warning{{TRUE}} // FIXME: Element region gets in the way, so these aren't the same symbols // as they should be. clang_analyzer_eval(pp.x == p4.x); // expected-warning{{UNKNOWN}} PODWrapper w; w.p.y = 1; PODWrapper w2 = w; // no-warning clang_analyzer_eval(w2.p.y == 1); // expected-warning{{TRUE}} PODWrapper w3 = move(w); // no-warning clang_analyzer_eval(w3.p.y == 1); // expected-warning{{TRUE}} // Use rvalues as well. clang_analyzer_eval(PODWrapper(w3).p.y == 1); // expected-warning{{TRUE}} } void testNonPOD() { NonPOD p; p.x = 1; NonPOD p2 = p; } void testNonPODMove() { NonPOD p; p.x = 1; NonPOD p2 = move(p); } void testNonPODWrapper() { NonPODWrapper w; w.p.y = 1; NonPODWrapper w2 = w; } void testNonPODWrapperMove() { NonPODWrapper w; w.p.y = 1; NonPODWrapper w2 = move(w); } // Not strictly about constructors, but trivial assignment operators should // essentially work the same way. namespace AssignmentOperator { void testPOD() { POD p; p.x = 1; POD p2; p2 = p; // no-warning clang_analyzer_eval(p2.x == 1); // expected-warning{{TRUE}} POD p3; p3 = move(p); // no-warning clang_analyzer_eval(p3.x == 1); // expected-warning{{TRUE}} PODWrapper w; w.p.y = 1; PODWrapper w2; w2 = w; // no-warning clang_analyzer_eval(w2.p.y == 1); // expected-warning{{TRUE}} PODWrapper w3; w3 = move(w); // no-warning clang_analyzer_eval(w3.p.y == 1); // expected-warning{{TRUE}} } void testReturnValue() { POD p; p.x = 1; POD p2; clang_analyzer_eval(&(p2 = p) == &p2); // expected-warning{{TRUE}} PODWrapper w; w.p.y = 1; PODWrapper w2; clang_analyzer_eval(&(w2 = w) == &w2); // expected-warning{{TRUE}} } void testNonPOD() { NonPOD p; p.x = 1; NonPOD p2; p2 = p; } void testNonPODMove() { NonPOD p; p.x = 1; NonPOD p2; p2 = move(p); } void testNonPODWrapper() { NonPODWrapper w; w.p.y = 1; NonPODWrapper w2; w2 = w; } void testNonPODWrapperMove() { NonPODWrapper w; w.p.y = 1; NonPODWrapper w2; w2 = move(w); } } } namespace ArrayMembers { struct Primitive { int values[3]; }; void testPrimitive() { Primitive a = { { 1, 2, 3 } }; clang_analyzer_eval(a.values[0] == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1] == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[2] == 3); // expected-warning{{TRUE}} Primitive b = a; clang_analyzer_eval(b.values[0] == 1); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[1] == 2); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[2] == 3); // expected-warning{{TRUE}} Primitive c; c = b; clang_analyzer_eval(c.values[0] == 1); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[1] == 2); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[2] == 3); // expected-warning{{TRUE}} } struct NestedPrimitive { int values[2][3]; }; void testNestedPrimitive() { NestedPrimitive a = { { { 0, 0, 0 }, { 1, 2, 3 } } }; clang_analyzer_eval(a.values[1][0] == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1][1] == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1][2] == 3); // expected-warning{{TRUE}} NestedPrimitive b = a; clang_analyzer_eval(b.values[1][0] == 1); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[1][1] == 2); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[1][2] == 3); // expected-warning{{TRUE}} NestedPrimitive c; c = b; clang_analyzer_eval(c.values[1][0] == 1); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[1][1] == 2); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[1][2] == 3); // expected-warning{{TRUE}} } struct POD { IntWrapper values[3]; }; void testPOD() { POD a = { { { 1 }, { 2 }, { 3 } } }; clang_analyzer_eval(a.values[0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[2].x == 3); // expected-warning{{TRUE}} POD b = a; clang_analyzer_eval(b.values[0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[2].x == 3); // expected-warning{{TRUE}} POD c; c = b; clang_analyzer_eval(c.values[0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[2].x == 3); // expected-warning{{TRUE}} } struct NestedPOD { IntWrapper values[2][3]; }; void testNestedPOD() { NestedPOD a = { { { { 0 }, { 0 }, { 0 } }, { { 1 }, { 2 }, { 3 } } } }; clang_analyzer_eval(a.values[1][0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1][1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1][2].x == 3); // expected-warning{{TRUE}} NestedPOD b = a; clang_analyzer_eval(b.values[1][0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[1][1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(b.values[1][2].x == 3); // expected-warning{{TRUE}} NestedPOD c; c = b; clang_analyzer_eval(c.values[1][0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[1][1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(c.values[1][2].x == 3); // expected-warning{{TRUE}} } struct NonPOD { NonPODIntWrapper values[3]; }; void testNonPOD() { NonPOD a; a.values[0].x = 1; a.values[1].x = 2; a.values[2].x = 3; clang_analyzer_eval(a.values[0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[2].x == 3); // expected-warning{{TRUE}} NonPOD b = a; clang_analyzer_eval(b.values[0].x == 1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(b.values[1].x == 2); // expected-warning{{UNKNOWN}} clang_analyzer_eval(b.values[2].x == 3); // expected-warning{{UNKNOWN}} NonPOD c; c = b; clang_analyzer_eval(c.values[0].x == 1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(c.values[1].x == 2); // expected-warning{{UNKNOWN}} clang_analyzer_eval(c.values[2].x == 3); // expected-warning{{UNKNOWN}} } struct NestedNonPOD { NonPODIntWrapper values[2][3]; }; void testNestedNonPOD() { NestedNonPOD a; a.values[0][0].x = 0; a.values[0][1].x = 0; a.values[0][2].x = 0; a.values[1][0].x = 1; a.values[1][1].x = 2; a.values[1][2].x = 3; clang_analyzer_eval(a.values[1][0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1][1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1][2].x == 3); // expected-warning{{TRUE}} NestedNonPOD b = a; clang_analyzer_eval(b.values[1][0].x == 1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(b.values[1][1].x == 2); // expected-warning{{UNKNOWN}} clang_analyzer_eval(b.values[1][2].x == 3); // expected-warning{{UNKNOWN}} NestedNonPOD c; c = b; clang_analyzer_eval(c.values[1][0].x == 1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(c.values[1][1].x == 2); // expected-warning{{UNKNOWN}} clang_analyzer_eval(c.values[1][2].x == 3); // expected-warning{{UNKNOWN}} } struct NonPODDefaulted { NonPODIntWrapper values[3]; NonPODDefaulted() = default; NonPODDefaulted(const NonPODDefaulted &) = default; NonPODDefaulted &operator=(const NonPODDefaulted &) = default; }; void testNonPODDefaulted() { NonPODDefaulted a; a.values[0].x = 1; a.values[1].x = 2; a.values[2].x = 3; clang_analyzer_eval(a.values[0].x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[1].x == 2); // expected-warning{{TRUE}} clang_analyzer_eval(a.values[2].x == 3); // expected-warning{{TRUE}} NonPODDefaulted b = a; clang_analyzer_eval(b.values[0].x == 1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(b.values[1].x == 2); // expected-warning{{UNKNOWN}} clang_analyzer_eval(b.values[2].x == 3); // expected-warning{{UNKNOWN}} NonPODDefaulted c; c = b; clang_analyzer_eval(c.values[0].x == 1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(c.values[1].x == 2); // expected-warning{{UNKNOWN}} clang_analyzer_eval(c.values[2].x == 3); // expected-warning{{UNKNOWN}} } }; namespace VirtualInheritance { int counter; struct base { base() { ++counter; } }; struct virtual_subclass : public virtual base { virtual_subclass() {} }; struct double_subclass : public virtual_subclass { double_subclass() {} }; void test() { counter = 0; double_subclass obj; clang_analyzer_eval(counter == 1); // expected-warning{{TRUE}} } struct double_virtual_subclass : public virtual virtual_subclass { double_virtual_subclass() {} }; void testVirtual() { counter = 0; double_virtual_subclass obj; clang_analyzer_eval(counter == 1); // expected-warning{{TRUE}} } } namespace ZeroInitialization { struct raw_pair { int p1; int p2; }; void testVarDecl() { raw_pair p{}; clang_analyzer_eval(p.p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p.p2 == 0); // expected-warning{{TRUE}} } void testTemporary() { clang_analyzer_eval(raw_pair().p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(raw_pair().p2 == 0); // expected-warning{{TRUE}} } void testArray() { raw_pair p[2] = {}; clang_analyzer_eval(p[0].p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p[0].p2 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p[1].p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p[1].p2 == 0); // expected-warning{{TRUE}} } void testNew() { raw_pair *pp = new raw_pair(); clang_analyzer_eval(pp->p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(pp->p2 == 0); // expected-warning{{TRUE}} } void testArrayNew() { // FIXME: Pending proper implementation of constructors for 'new[]'. raw_pair *p = new raw_pair[2](); clang_analyzer_eval(p[0].p1 == 0); // expected-warning{{UNKNOWN}} clang_analyzer_eval(p[0].p2 == 0); // expected-warning{{UNKNOWN}} clang_analyzer_eval(p[1].p1 == 0); // expected-warning{{UNKNOWN}} clang_analyzer_eval(p[1].p2 == 0); // expected-warning{{UNKNOWN}} } struct initializing_pair { public: int x; raw_pair y; initializing_pair() : x(), y() {} }; void testFieldInitializers() { initializing_pair p; clang_analyzer_eval(p.x == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p.y.p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p.y.p2 == 0); // expected-warning{{TRUE}} } struct subclass : public raw_pair { subclass() = default; }; void testSubclass() { subclass p; clang_analyzer_eval(p.p1 == 0); // expected-warning{{garbage}} } struct initializing_subclass : public raw_pair { initializing_subclass() : raw_pair() {} }; void testInitializingSubclass() { initializing_subclass p; clang_analyzer_eval(p.p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p.p2 == 0); // expected-warning{{TRUE}} } struct pair_wrapper { pair_wrapper() : p() {} raw_pair p; }; struct virtual_subclass : public virtual pair_wrapper { virtual_subclass() {} }; struct double_virtual_subclass : public virtual_subclass { double_virtual_subclass() { // This previously caused a crash because the pair_wrapper subobject was // initialized twice. } }; class Empty { public: static int glob; Empty(); // No body. Empty(int x); // Body below. }; class PairContainer : public Empty { public: raw_pair p; int q; PairContainer() : Empty(), p() { // This previously caused a crash because the empty base class looked // like an initialization of 'p'. } PairContainer(int) : Empty(), p() { // Test inlining something else here. } PairContainer(double): Empty(1), p() { clang_analyzer_eval(p.p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p.p2 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(q == 1); // expected-warning{{TRUE}} // This one's indeed UNKNOWN. Definitely not TRUE. clang_analyzer_eval(p.p2 == glob); // expected-warning{{UNKNOWN}} } }; Empty::Empty(int x) { static_cast<PairContainer *>(this)->p.p1 = x; static_cast<PairContainer *>(this)->q = x; // Our static member will store the old garbage values of fields that aren't // yet initialized. It's not certainly garbage though (i.e. the constructor // could have been called on an initialized piece of memory), so no // uninitialized value warning here, and it should be a symbol, not // undefined value, for later comparison. glob = static_cast<PairContainer *>(this)->p.p2; } class Empty2 { public: static int glob_p1, glob_p2; Empty2(); // Body below. }; class PairDoubleEmptyContainer: public Empty, public Empty2 { public: raw_pair p; PairDoubleEmptyContainer(): Empty(), Empty2(), p() { clang_analyzer_eval(p.p1 == 0); // expected-warning{{TRUE}} clang_analyzer_eval(p.p2 == 0); // expected-warning{{TRUE}} // This is indeed UNKNOWN. clang_analyzer_eval(p.p1 == glob_p1); // expected-warning{{UNKNOWN}} clang_analyzer_eval(p.p2 == glob_p2); // expected-warning{{UNKNOWN}} } }; Empty2::Empty2() { glob_p1 = static_cast<PairDoubleEmptyContainer *>(this)->p.p1; glob_p2 = static_cast<PairDoubleEmptyContainer *>(this)->p.p2; } class PairContainerContainer { int padding; PairContainer pc; public: PairContainerContainer() : pc(1) {} }; } namespace InitializerList { struct List { bool usedInitializerList; List() : usedInitializerList(false) {} List(std::initializer_list<int>) : usedInitializerList(true) {} }; void testStatic() { List defaultCtor; clang_analyzer_eval(!defaultCtor.usedInitializerList); // expected-warning{{TRUE}} List list{1, 2}; clang_analyzer_eval(list.usedInitializerList); // expected-warning{{TRUE}} } void testDynamic() { List *list = new List{1, 2}; clang_analyzer_eval(list->usedInitializerList); // expected-warning{{TRUE}} } } namespace PR19579 { class C {}; void f() { C(); int a; extern void use(int); use(a); // expected-warning{{uninitialized}} } void g() { struct S { C c; int i; }; // This order triggers the initialization of the inner "a" after the // constructor for "C" is run, which used to confuse the analyzer // (is "C()" the initialization of "a"?). struct S s = { C(), ({ int a, b = 0; 0; }) }; } } namespace NoCrashOnEmptyBaseOptimization { struct NonEmptyBase { int X; explicit NonEmptyBase(int X) : X(X) {} }; struct EmptyBase {}; struct S : NonEmptyBase, EmptyBase { S() : NonEmptyBase(0), EmptyBase() {} }; void testSCtorNoCrash() { S s; } } namespace EmptyBaseAssign { struct B1 {}; struct B2 { int x; }; struct D: public B1, public B2 { const D &operator=(const D &d) { *((B2 *)this) = d; *((B1 *)this) = d; return *this; } }; void test() { D d1; d1.x = 1; D d2; d2 = d1; clang_analyzer_eval(d2.x == 1); // expected-warning{{TRUE}} } } namespace vbase_zero_init { class A { virtual void foo(); }; class B { virtual void bar(); public: static int glob_y, glob_z, glob_w; int x; B(); // Body below. }; class C : virtual public A { public: int y; }; class D : public B, public C { public: // 'z', unlike 'w', resides in an area that would have been within padding of // base class 'C' if it wasn't part of 'D', but only on 64-bit systems. int z, w; // Initialization order: A(), B(), C(). D() : A(), C() { clang_analyzer_eval(x == 1); // expected-warning{{TRUE}} clang_analyzer_eval(y == 0); // expected-warning{{TRUE}} #ifdef I386 clang_analyzer_eval(z == 3); // expected-warning{{TRUE}} #else // FIXME: Should be TRUE. Initialized in B(). clang_analyzer_eval(z == 3); // expected-warning{{UNKNOWN}} #endif clang_analyzer_eval(w == 4); // expected-warning{{TRUE}} // FIXME: Should be UNKNOWN. Changed in B() since glob_y was assigned. clang_analyzer_eval(y == glob_y); // expected-warning{{TRUE}} #ifdef I386 clang_analyzer_eval(z == glob_z); // expected-warning{{UNKNOWN}} #else // FIXME: Should be UNKNOWN. Changed in B() since glob_z was assigned. clang_analyzer_eval(z == glob_z); // expected-warning{{TRUE}} #endif clang_analyzer_eval(w == glob_w); // expected-warning{{UNKNOWN}} } // no-crash }; B::B() : x(1) { // Our static members will store the old garbage values of fields that aren't // yet initialized. These aren't certainly garbage though (i.e. the // constructor could have been called on an initialized piece of memory), // so no uninitialized value warning here, and these should be symbols, not // undefined values, for later comparison. glob_y = static_cast<D *>(this)->y; glob_z = static_cast<D *>(this)->z; glob_w = static_cast<D *>(this)->w; static_cast<D *>(this)->y = 2; static_cast<D *>(this)->z = 3; static_cast<D *>(this)->w = 4; } }
{ "pile_set_name": "Github" }
{ "comments": [ "Single Host connected to two devices (Dual-Homed)." ], "title": "Dual-Homed Scenario", "params": { "lastAuto": 0 }, "description": [ "Simple sequence of events...", "", "1. add instance", "2. add device [205]", "3. add device [206]", "4. add link [205] --> [206]", "5. add link [206] --> [205]", "6. add host (to [206])", "7. update device [205]", "8. update device [206]", "9. move host (to include [205])", "10. remove host", "11. update device [206", "12. add host (to [206])", "13. update host" ] }
{ "pile_set_name": "Github" }
#!/usr/bin/env node --allow-natives-syntax 'use strict' const test = require('tape') const { assertKind , FAST_SMI_ONLY , FAST_DOUBLE , FAST , DICTIONARY } = require('./util/element-kind') // https://cs.chromium.org/chromium/src/v8/src/objects/js-array.h?type=cs&q=kmaxFast&l=90 const kMaxFastArrayLength = 32 * 1024 * 1024 test('\narray that was not pre-allocated but grown on demand', function(t) { const arr = [] let len = kMaxFastArrayLength + 1 while (len--) { arr.push(len) } assertKind(t, arr, FAST_SMI_ONLY, `to ${kMaxFastArrayLength + 1} elements, is fast`) arr[1] = undefined const msg = ( `to ${kMaxFastArrayLength + 1} elements, becomes fast ` + `(no longer Smi only) when assigning a slot to 'undefined'` ) assertKind(t, arr, FAST, msg) t.end() }) function fillSmis(arr, max = arr.length) { for (let i = 0; max > i && i < arr.length; i++) arr[i] = i } function fillDoubles(arr) { for (let i = 0; i < arr.length; i++) arr[i] = i * 0.1 } test('\narrays that were pre-allocated to hold a specific number of elements', function(t) { const a = new Array(kMaxFastArrayLength) const b = new Array(kMaxFastArrayLength + 1) assertKind(t, a, FAST_SMI_ONLY, `to ${a.length}, is initially fast smis`) assertKind(t, b, DICTIONARY, `to ${b.length}, is initially slow`) fillSmis(b, 1E6) assertKind(t, b, DICTIONARY, `to ${b.length}, and filled partially with Smis, is still slow`) fillSmis(b, b.length) assertKind(t, b, FAST_SMI_ONLY, `to ${b.length} and filled completely with Smis, becomes fast smis`) fillDoubles(a, 10) assertKind(t, a, FAST_DOUBLE, `to ${a.length}, and filled partially with Doubles becomes fast doubles`) t.end() })
{ "pile_set_name": "Github" }
(ns plastic.worker.editor.layout.spatial (:require [plastic.logging :refer-macros [log info warn error group group-end]] [plastic.worker.editor.toolkit.id :as id] [plastic.util.helpers :as helpers] [plastic.worker.editor.layout.utils :as utils] [meld.zip :as zip])) ; ------------------------------------------------------------------------------------------------------------------- (defn add-min-max [spatial-graph] (let [keys (keys spatial-graph)] (-> spatial-graph (assoc :min (helpers/best-val keys <)) (assoc :max (helpers/best-val keys >))))) (defn build-spatial-web [root-loc selectables] (let [ids (zip/descendants root-loc) ; guaranteed to be in left-to-right/top-down order fetch-selectables (fn [id] (let [spot-id (id/make-spot id)] [(get selectables id) (get selectables spot-id)])) spatial-selectables (filter utils/spatial? (mapcat fetch-selectables ids))] (helpers/process-map (group-by :section spatial-selectables) (fn [section-items] (add-min-max (group-by :spatial-index section-items))))))
{ "pile_set_name": "Github" }
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build linux // +build ppc64le ppc64 // +build !gccgo package unix import "syscall" func Syscall(trap, a1, a2, a3 uintptr) (r1, r2 uintptr, err syscall.Errno) { return syscall.Syscall(trap, a1, a2, a3) } func Syscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err syscall.Errno) { return syscall.Syscall6(trap, a1, a2, a3, a4, a5, a6) } func RawSyscall(trap, a1, a2, a3 uintptr) (r1, r2 uintptr, err syscall.Errno) { return syscall.RawSyscall(trap, a1, a2, a3) } func RawSyscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err syscall.Errno) { return syscall.RawSyscall6(trap, a1, a2, a3, a4, a5, a6) }
{ "pile_set_name": "Github" }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ratpack.exec.internal; import ratpack.func.Block; public interface Continuation { void resume(Block rest); }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <project version="4"> <component name="RunConfigurationProducerService"> <option name="ignoredProducers"> <set> <option value="org.jetbrains.plugins.gradle.execution.test.runner.AllInPackageGradleConfigurationProducer" /> <option value="org.jetbrains.plugins.gradle.execution.test.runner.TestClassGradleConfigurationProducer" /> <option value="org.jetbrains.plugins.gradle.execution.test.runner.TestMethodGradleConfigurationProducer" /> </set> </option> </component> </project>
{ "pile_set_name": "Github" }
# All this catalog "translates" are quotation characters. # The msgids must be ASCII and therefore cannot contain real quotation # characters, only substitutes like grave accent (0x60), apostrophe (0x27) # and double quote (0x22). These substitutes look strange; see # http://www.cl.cam.ac.uk/~mgk25/ucs/quotes.html # # This catalog translates grave accent (0x60) and apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019). # It also translates pairs of apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019) # and pairs of quotation mark (0x22) to # left double quotation mark (U+201C) and right double quotation mark (U+201D). # # When output to an UTF-8 terminal, the quotation characters appear perfectly. # When output to an ISO-8859-1 terminal, the single quotation marks are # transliterated to apostrophes (by iconv in glibc 2.2 or newer) or to # grave/acute accent (by libiconv), and the double quotation marks are # transliterated to 0x22. # When output to an ASCII terminal, the single quotation marks are # transliterated to apostrophes, and the double quotation marks are # transliterated to 0x22. # # This catalog furthermore displays the text between the quotation marks in # bold face, assuming the VT100/XTerm escape sequences. #
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 4b06650fb3b6d4b738815827309fb1aa folderAsset: yes DefaultImporter: externalObjects: {} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
namespace FluentAssertions.Formatting { public class NullValueFormatter : IValueFormatter { /// <summary> /// Indicates whether the current <see cref="IValueFormatter"/> can handle the specified <paramref name="value"/>. /// </summary> /// <param name="value">The value for which to create a <see cref="string"/>.</param> /// <returns> /// <c>true</c> if the current <see cref="IValueFormatter"/> can handle the specified value; otherwise, <c>false</c>. /// </returns> public bool CanHandle(object value) { return value is null; } /// <inheritdoc /> public string Format(object value, FormattingContext context, FormatChild formatChild) { return "<null>"; } } }
{ "pile_set_name": "Github" }