repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
CMU-TRP/podd-api
reports/admin.py
<filename>reports/admin.py from django.contrib import admin from leaflet.admin import LeafletGeoAdmin from treebeard.forms import movenodeform_factory from treebeard.admin import TreeAdmin from common.models import Domain from reports.filters import AdministrationAreaFilter, DateTimeRangeFilter from reports.forms import ReportTypeForm, ReportForm, SpreadsheetResponseForm, ReportInvestigationForm, ReportLaboratoryCaseForm, \ GoogleCalendarResponseForm from reports.models import AdministrationArea, ReportType, Report, ReportState, CaseDefinition, SpreadsheetResponse, \ ReportTypeCategory, ReportInvestigation, ReportLaboratoryCase, AnimalLaboratoryCause, ReportLaboratoryItem, \ GoogleCalendarResponse, RecordSpec class AdministrationAreaAdmin(LeafletGeoAdmin): # form = movenodeform_factory(AdministrationArea) list_display = ('name', 'code') search_fields = ('name', 'code', ) exclude = ('curated_in', 'parent', 'mpoly', 'relative_to') class ReportAdmin(LeafletGeoAdmin): form = ReportForm change_form_template = 'admin/reports/extras/report_change_form.html' list_display = ('__unicode__', 'test_flag', 'type', 'administration_area', 'date') list_filter = ('negative', AdministrationAreaFilter, 'test_flag', 'type', ('date', DateTimeRangeFilter)) exclude = ('parent', 'tags') class ReportTypeCategoryAdmin(admin.ModelAdmin): pass class ReportTypeAdmin(admin.ModelAdmin): form = ReportTypeForm class RecordSpecAdmin(admin.ModelAdmin): pass class ReportStateAdmin(admin.ModelAdmin): search_fields = ('report_type__name', 'code', 'name') pass class CaseDefinitionAdmin(admin.ModelAdmin): search_fields = ['report_type__name', 'code'] def formfield_for_foreignkey(self, db_field, request=None, **kwargs): if db_field.name == 'from_state' or db_field.name == 'to_state': kwargs["queryset"] = ReportState.objects.order_by('report_type__name', 'code') elif db_field.name == 'domain': kwargs["queryset"] = Domain.objects.order_by('name') elif db_field.name == 'report_type': kwargs["queryset"] = ReportType.objects.order_by('name') return super(CaseDefinitionAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) class SpreadsheetResponseAdmin(admin.ModelAdmin): form = SpreadsheetResponseForm class ReportInvestigationAdmin(admin.ModelAdmin): form = ReportInvestigationForm class ReportLaboratoryCaseAdmin(admin.ModelAdmin): form = ReportLaboratoryCaseForm class GoogleCalendarResponseAdmin(admin.ModelAdmin): form = GoogleCalendarResponseForm admin.site.register(AdministrationArea, AdministrationAreaAdmin) admin.site.register(ReportTypeCategory, ReportTypeCategoryAdmin) admin.site.register(ReportType, ReportTypeAdmin) admin.site.register(ReportState, ReportStateAdmin) admin.site.register(CaseDefinition, CaseDefinitionAdmin) admin.site.register(SpreadsheetResponse, SpreadsheetResponseAdmin) admin.site.register(RecordSpec, RecordSpecAdmin) admin.site.register(ReportInvestigation, ReportInvestigationAdmin) admin.site.register(ReportLaboratoryCase, ReportLaboratoryCaseAdmin) admin.site.register(AnimalLaboratoryCause) admin.site.register(ReportLaboratoryItem) admin.site.register(GoogleCalendarResponse, GoogleCalendarResponseAdmin)
Wikunia/hakank
jsr_331/Grid17x17.java
<reponame>Wikunia/hakank package org.jcp.jsr331.hakan; /** * * 17x17 problem in JSR-331. * * Problem from <NAME>: * http://lookforlight.tumblr.com/post/996786415/lets-do-real-cp-forbiddenassignment * """ * The n x m grid is c-colorable if there is a way * to c-color the vertices of the n x m grid so that * there is no rectangle with all four corners the * same color. (The rectangles I care about have the * sides parallel to the x and y axis.) * * Is there a 17x17 solution? * see: http://blog.computationalcomplexity.org/2009/11/17x17-challenge-worth-28900-this-is-not.html * """ * * Compare with the following models: * - MiniZinc: http://hakank.org/minizinc/17_b.mzn * http://hakank.org/minizinc/17_b3.mzn * - Comet: http://hakank.org/comet/17_b.co * - Zinc: http://hakank.org/minizinc/17_b3.zinc * * * Model by <NAME> (<EMAIL>) * Also see http://www.hakank.org/jsr_331/ */ import javax.constraints.*; import java.io.*; import java.util.*; import java.text.*; public class Grid17x17 { int num_rows; int num_cols; int num_colors; Var[] space_flatten; Problem p = ProblemFactory.newProblem("Grid 17x17"); // main public static void main(String[] args) { int num_rows_in = 6; int num_cols_in = 6 ; int num_colors_in = 4; if (args.length >= 1) { num_rows_in = Integer.parseInt(args[0]); } if (args.length >= 2) { num_cols_in = Integer.parseInt(args[1]); } if (args.length >= 3) { num_colors_in = Integer.parseInt(args[2]); } System.out.println("\nnum_rows: " + num_rows_in + " num_cols: " + num_cols_in + " num_colors: " + num_colors_in + "\n"); Grid17x17 p = new Grid17x17(); p.define(num_rows_in, num_cols_in, num_colors_in); p.solve(); } // Problem definition public void define(int num_rows_in, int num_cols_in, int num_colors_in) { num_rows = num_rows_in; num_cols = num_cols_in; num_colors = num_colors_in; Var[][] space = new Var[num_rows][num_cols]; space_flatten = new Var[num_rows*num_cols]; for(int i = 0; i < num_rows; i++) { for(int j = 0; j < num_cols; j++) { space[i][j] = p.variable("space-"+i+"-"+j, 0, num_colors-1); space_flatten[i*num_cols+j] = space[i][j]; } } int[] values = new int[num_colors]; int[] cardMin = new int[num_colors]; int[] cardMax = new int[num_colors]; for(int i = 0; i < num_colors; i++) { values[i] = i; cardMin[i] = 0; cardMax[i] = num_colors-1; } for(int r = 0; r < num_rows; r++) { for(int r2 = 0; r2 < r; r2++) { for(int c = 0; c < num_cols; c++) { for(int c2 = 0; c2 < c; c2++) { Var[] tmp = {space[r][c], space[r2][c], space[r][c2], space[r2][c2]}; p.postGlobalCardinality(tmp, values, cardMin, cardMax); } } } } // symmetry breaking p.post(space[0][0], "=", 0); } public void solve() { // // search // Solver solver = p.getSolver(); SearchStrategy strategy = solver.getSearchStrategy(); strategy.setVars(space_flatten); // strategy.setVarSelectorType(VarSelectorType.INPUT_ORDER); strategy.setVarSelectorType(VarSelectorType.MIN_VALUE); // strategy.setVarSelectorType(VarSelectorType.MAX_VALUE); // strategy.setVarSelectorType(VarSelectorType.MIN_DOMAIN); // strategy.setVarSelectorType(VarSelectorType.MIN_DOMAIN_MIN_VALUE); // strategy.setVarSelectorType(VarSelectorType.MIN_DOMAIN_RANDOM); // strategy.setVarSelectorType(VarSelectorType.RANDOM); // strategy.setVarSelectorType(VarSelectorType.MIN_DOMAIN_MAX_DEGREE); // strategy.setVarSelectorType(VarSelectorType.MIN_DOMAIN_OVER_DEGREE); // strategy.setVarSelectorType(VarSelectorType.MIN_DOMAIN_OVER_WEIGHTED_DEGREE); // strategy.setVarSelectorType(VarSelectorType.MAX_WEIGHTED_DEGREE); // strategy.setVarSelectorType(VarSelectorType.MAX_IMPACT); // strategy.setVarSelectorType(VarSelectorType.MAX_DEGREE); // strategy.setVarSelectorType(VarSelectorType.MAX_REGRET); // strategy.setValueSelectorType(ValueSelectorType.IN_DOMAIN); // strategy.setValueSelectorType(ValueSelectorType.MIN); // strategy.setValueSelectorType(ValueSelectorType.MAX); strategy.setValueSelectorType(ValueSelectorType.MIN_MAX_ALTERNATE); // strategy.setValueSelectorType(ValueSelectorType.MIDDLE); // strategy.setValueSelectorType(ValueSelectorType.MEDIAN); // strategy.setValueSelectorType(ValueSelectorType.RANDOM); // strategy.setValueSelectorType(ValueSelectorType.MIN_IMPACT); // strategy.setValueSelectorType(ValueSelectorType.CUSTOM); // // tracing // // solver.addSearchStrategy(new StrategyLogVariables(solver)); // solver.traceExecution(true); // // solve // int num_sols = 0; SolutionIterator iter = solver.solutionIterator(); while (iter.hasNext()) { num_sols++; Solution s = iter.next(); // s.log(); for(int i = 0; i < num_rows; i++) { for(int j = 0; j < num_cols; j++) { System.out.print(s.getValue("space-"+i+"-"+j) + " "); } System.out.println(); } System.out.println(); if (num_sols > 0) { break; } } solver.logStats(); } }
sbr-technologies/mls_property
frontend/web/public_main/js/site.js
String.prototype.trimToLength = function(m) { return (this.length > m) ? jQuery.trim(this).substring(0, m).split(" ").slice(0, -1).join(" ") + "..." : this; }; jQuery(document).on('click', '.mega-dropdown', function(e) { e.stopPropagation() }) $('.trigger').click(function() { $('.package-content').hide(); $('.' + $(this).data('rel')).show(); }); function isNumberKey(evt){ var charCode = (evt.which) ? evt.which : event.keyCode if (charCode > 31 && (charCode < 48 || charCode > 57)){ return false; } return true; } $(document).ready(function () { $('.btn_search_realestate').on('click', function(){ var that = $(this); var thisForm = that.closest('form'); var rentType = thisForm.find('.realestate_search_rent_type').val(); var loc = thisForm.find('.realestate_search_location').val(); var minPrice = thisForm.find('.adv_min_price').val(); var maxPrice = thisForm.find('.adv_max_price').val(); var bedroom = thisForm.find('.adv_bedroom').val(); var bathroom = thisForm.find('.adv_bathroom').val(); var propType = thisForm.find('.adv_property_type').val(); var constStatus = thisForm.find('.adv_construction_status').val(); var marktStatus = thisForm.find('.adv_market_status').val(); var propertyID = thisForm.find('.adv_propertyid').val(); if(!loc){ alert('Please select a location'); thisForm.find('.adventure-search-box').slideUp(); return false; } var url = thisForm.attr('action'); var loca = loc.split(', '), state, town, area; if(loca.length === 1){ state = loca[0]; url = updateQueryStringParameter(url, 'state', state); }else if(loca.length === 2){ town = loca[0]; state = loca[1]; url = updateQueryStringParameter(url, 'town', town); url = updateQueryStringParameter(url, 'state', state); }else if(loca.length === 3) { area = loca[0]; town = loca[1]; state = loca[2]; url = updateQueryStringParameter(url, 'area', area); url = updateQueryStringParameter(url, 'town', town); url = updateQueryStringParameter(url, 'state', state); } if(rentType){ url = updateQueryStringParameter(url, 'rent_type', rentType); }if(minPrice){ url = updateQueryStringParameter(url, 'min_price', minPrice); }if(maxPrice){ url = updateQueryStringParameter(url, 'max_price', maxPrice); }if(bedroom){ url = updateQueryStringParameter(url, 'bedroom', bedroom); }if(bathroom){ url = updateQueryStringParameter(url, 'bathroom', bathroom); }if(propType){ url = updateQueryStringParameter(url, 'prop_types', propType); }if(constStatus){ url = updateQueryStringParameter(url, 'const_status', constStatus); }if(marktStatus){ url = updateQueryStringParameter(url, 'market_statuses', marktStatus); }if(propertyID){ url = updateQueryStringParameter(url, 'property_id', propertyID); } window.location.href = url; }); $(document).on('click', '.btn_search_hotel', function(){ var that = $(this); var thisForm = that.closest('form'); var loc = thisForm.find('.realestate_search_location').val(); var rating = thisForm.find('.adv_user_rating').val(); var facilities = []; $.each($("input[name='chk_filter_hotel_facilities[]']:checked"), function() { facilities.push($(this).val()); }); if(!loc){ alert('Please select a location'); thisForm.find('.adventure-search-box').slideUp(); return false; } var url = thisForm.attr('action'); url = updateQueryStringParameter(url, 'location', loc); if(rating){ url = updateQueryStringParameter(url, 'rating', rating); }if(facilities){ url = updateQueryStringParameter(url, 'facilities', facilities.join('-')); } window.location.href = url; }); $('.btn_search_agent').on('click', function(){ var that = $(this), url; var thisForm = that.closest('form'); var loc = thisForm.find('.realestate_search_location').val(); var agent = thisForm.find('.realestate_search_agent').val(); url = updateQueryStringParameter(thisForm.attr('action'), 'locations', loc); url = updateQueryStringParameter(url, 'agent', agent); window.location.href = url; }); $('.btn_get_property_estimate').on('click', function(){ var that = $(this), url; var thisForm = that.closest('form'); var loc = thisForm.find('.realestate_search_location').val(); url = updateQueryStringParameter(thisForm.attr('action'), 'location', loc); window.location.href = url; }); $('.galleryimg').gallerybox(); $('.featuresimg').gallerybox(); //Login and forgot password Show/Hide $('.forgot-pass-link').click(function () { $('.full-login-box').slideUp(); $('.forgot-password-box').slideDown(); }); $('.forgot-pass-click-here').click(function () { $('.full-login-box').slideDown(); $('.forgot-password-box').slideUp(); }); //Listing Slider $("#owl-demo").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); $("#owl-demo-agency").owlCarousel({ items: 5, navigation: true, slideBy:5, navigationText: ["<i class='fa fa-angle-double-left' aria-hidden='true'></i> Prev 5", "Next 5 <i class='fa fa-angle-double-right' aria-hidden='true'></i>"], }); //Listing Slider $("#newest-listings-buy").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); $("#home-listings-buy").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); //owlCarousel $("#luxury-listings-buy").owlCarousel({ items : 2, navigation : true, navigationText : ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], itemsDesktop : [1199, 2], itemsDesktopSmall : [979, 1], itemsTablet : [768, 1], }); $("#affordable-listings-buy").owlCarousel({ items : 2, navigation : true, navigationText : ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], itemsDesktop : [1199, 2], itemsDesktopSmall : [979, 1], itemsTablet : [768, 1], }); $("#news-feed-list").owlCarousel({ items : 4, navigation : true, navigationText : ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], itemsDesktop : [1199, 2], itemsDesktopSmall : [979, 1], itemsTablet : [768, 1], }); //Listing Slider $("#rental-pools").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); $("#pet-friendly").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); $("#homes-rental").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); $("#studio-rental").owlCarousel({ items: 4, navigation: true, navigationText: ["<i class='fa fa-long-arrow-left' aria-hidden='true'></i>", "<i class='fa fa-long-arrow-right' aria-hidden='true'></i>"], }); //Counter $('.count').each(function () { $(this).prop('Counter', 0).animate({ Counter: $(this).text() }, { duration: 4000, easing: 'swing', step: function (now) { $(this).text(Math.ceil(now).toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",")); } }); }); //Footer Link Show/Hide $('.footer-more').click(function () { $(this).prev('.footer-extra-link').slideDown(); $(this).hide(); $(this).next('.footer-less').show(); }); $('.footer-less').click(function () { $(this).prev('.footer-more').prev('.footer-extra-link').slideUp(); $(this).prev('.footer-more').show(); $(this).hide(); }); //Go to top $(window).scroll(function () { if ($(this).scrollTop() > 100) { $('.scrollToTop').fadeIn(); } else { $('.scrollToTop').fadeOut(); } }); //Click event to scroll to top $('.scrollToTop').click(function () { $('html, body').animate({scrollTop: 0}, 800); return false; }); //Property Search window height var windowHeight = $(window).innerHeight(); var headerHeight = $('.mainheader').height(); var menuHeight = $('.property-menu-bar').height(); //$('.property-search-sec, .property-search-map, .property-search-right-sec').css('height', ((windowHeight - (headerHeight + menuHeight)) - 10) + 'px' ) $('.property-search-sec, .property-search-map').css('height', ((windowHeight - (headerHeight + menuHeight)) - 10) + 'px') var propertyHeight = $('.property-menu-bar').height(); $('.property-menu-bar').after('<div class="property-menu-bar-inner"></div>'); //alert(propertyHeight); $('.property-menu-bar-inner').css('height', propertyHeight + 'px') //slimscroll $('.property-search-tabbar-sec .property-search-listing-sec').slimscroll({ //size: '15px' alwaysVisible: true, height: '400px', }); //News Carousel $('.carousel .vertical .item').each(function () { var next = $(this).next(); if (!next.length) { next = $(this).siblings(':first'); } next.children(':first-child').clone().appendTo($(this)); for (var i = 1; i < 2; i++) { next = next.next(); if (!next.length) { next = $(this).siblings(':first'); } next.children(':first-child').clone().appendTo($(this)); } }); $('body').on('submit', '.frm_newsletter_subscribe', function (e) { var postData = $(this).serialize(); var postUrl = $(this).attr("action");// alert(postUrl); $('.help-block help-block-error').remove(); $.ajax({ url: postUrl, type: "POST", //cache : false, data: postData, dataType: 'json', success: function (resp) { if (resp.success == true) { resetFormVal('frm_newsletter_subscribe', 0); $('.emailCls').val(''); $('.sucmsgdiv').html(resp.message); $('#sucMsgDiv').show('slow'); setTimeout(function () { $('#sucMsgDiv').fadeOut('slow'); }, 3000); //setTimeout(function(){ window.location = resp.redirect_url }, 5000); } else { var msg = first(resp.errors); $('.failmsgdiv').html(msg); $('#failMsgDiv').show('slow'); setTimeout(function () { $('#failMsgDiv').fadeOut('slow'); }, 3000); } }, error: function (xhr, textStatus, thrownError) { alert('Something went to wrong.Please Try again later...'); } }); return false; }); $('body').on('submit', '.contact-form', function (e) { var postData = $(this).serialize(); var postUrl = $(this).attr("action");// alert(postUrl); $('.help-block help-block-error').remove(); $.ajax({ url: postUrl, type: "POST", //cache : false, data: postData, dataType: 'json', success: function (resp) { if (resp.success == true) { $('.emailCls').val(''); $('.sucmsgdiv').html(resp.message); $('#sucMsgDiv').show('slow'); //setTimeout(function(){ window.location = resp.redirect_url }, 5000); } else { $('.failmsgdiv').html(resp.message); $('#failMsgDiv').show('slow'); } }, error: function (xhr, textStatus, thrownError) { alert('Something went to wrong.Please Try again later...'); } }); return false; }); $('body').on('submit', '.private-request-form', function (e) { $.loading(); var postData = $(this).serialize(); var postUrl = $(this).attr("action");// alert(postUrl); $('.help-block help-block-error').remove(); $.ajax({ url: postUrl, type: "POST", //cache : false, data: postData, dataType: 'json', success: function (resp) { $.loaded(); $(".error-message").remove(); if (resp.success == true) { $('.error-message').remove(); $('.emailCls').val(''); $('.txt_field').val(''); $('.reqsucmsgdiv').html(resp.message); $('#reqSucMsgDiv').show('slow'); setTimeout(function (){$('#reqSucMsgDiv').fadeOut('slow');},3000); } else { $('.reqfailmsgdiv').html(resp.message); if(resp.errors.name){ $('#propertyshowingrequest-name').after("<div class='error-message'>"+resp.errors.name+"</div>"); } if(resp.errors.email){ $('#propertyshowingrequest-email').after("<div class='error-message'>"+resp.errors.email+"</div>") ; } if(resp.errors.phone){ $('#propertyshowingrequest-phone').after("<div class='error-message'>"+resp.errors.phone+"</div>") ; } if(resp.errors.schedule){ $('#propertyshowingrequest-schedule').after("<div class='error-message'>"+resp.errors.schedule+"</div>") ; } $('#reqFailMsgDiv').show('slow'); setTimeout(function (){$('#reqFailMsgDiv').fadeOut('slow');},3000); } }, error: function (xhr, textStatus, thrownError) { alert('Something went to wrong.Please Try again later...'); } }); return false; }); $('body').on('submit', '.contact-agent-form', function (e) { // $.loading(); var postData = $(this).serialize(); var postUrl = $(this).attr("action");// alert(postUrl); $('.help-block help-block-error').remove(); $.ajax({ url: postUrl, type: "POST", //cache : false, data: postData, dataType: 'json', success: function (resp) { // $.loaded(); $(".error-message").remove(); if (resp.success == true) { $('.error-message').remove(); $('.emailCls').val(''); $('.txt_field').val(''); $('.reqsucmsgdiv').html(resp.message); $('#reqSucMsgDiv').show('slow'); setTimeout(function (){$('#reqSucMsgDiv').fadeOut('slow');},3000); } else { $('.reqfailmsgdiv').html(resp.message); if(resp.errors.name){ $('#contactagent-name').after("<div class='error-message'>"+resp.errors.name+"</div>"); } if(resp.errors.email){ $('#contactagent-email').after("<div class='error-message'>"+resp.errors.email+"</div>") ; } if(resp.errors.phone){ $('#contactagent-phone').after("<div class='error-message'>"+resp.errors.phone+"</div>") ; } $('#reqFailMsgDiv').show('slow'); setTimeout(function (){$('#reqFailMsgDiv').fadeOut('slow');},3000); } }, error: function (xhr, textStatus, thrownError) { alert('Something went to wrong.Please Try again later...'); } }); return false; }); $('#mls_bs_modal_one').on('shown.bs.modal', function (e) { $('#mls_bs_modal_one').off('hidden.bs.modal'); }); $('#mls_bs_modal_two').on('shown.bs.modal', function (e) { $('#mls_bs_modal_two').off('hidden.bs.modal'); }); $(document).on('hidden.bs.modal', '#mls_bs_modal_one', function (e) { if (typeof $(e.target).data('noremote') != 'undefined') { return true; } $(e.target).removeData('bs.modal'); $(e.target).find('.modal-content').html(''); }); $(document).on('hidden.bs.modal', '#mls_bs_modal_two', function (e) { if (typeof $(e.target).data('noremote') != 'undefined') { return true; } $(e.target).removeData('bs.modal'); $(e.target).find('.modal-content').html(''); }); $(document).on('click', '.lnk_signup' ,function (e) { var thisLink = $(this); $('#mls_bs_modal_one').modal('hide'); $('#mls_bs_modal_one').on('hidden.bs.modal', function (e) { $('#mls_bs_modal_two').modal({remote: thisLink.data('href')}); }); }); $(document).on('click', '.lnk_login' ,function (e) { var thisLink = $(this); $('#mls_bs_modal_two').modal('hide'); $('#mls_bs_modal_two').on('hidden.bs.modal', function (e) { $('#mls_bs_modal_one').modal({remote: thisLink.data('href')}); }); }); $(document).on('click', '.btn_add_package_booking_info', function () { var localInfoRowHtml = $('.dv_package_booking_block_template').html().replace(/curTime/g, Math.floor(Date.now() / 1000)); $('.dv_package_booking_info_container').append(localInfoRowHtml); }); $(document).on('click', '.delete_child', function () { var that = $(this), thatItem = that.closest('.item'); if (thatItem.hasClass('new') || confirm("Are you sure you want to delete this row?")) { if(that.hasClass('new')){ thatItem.remove(); }else{ thatItem.find('.hidin_child_id').val('1'); thatItem.hide(); } } }); }); function first(obj) { for (var a in obj) return obj[a]; } $(window).scroll(function () { if ($(this).scrollTop() > 650) { //$('.scroll-top-sec').show(); $('.scroll-top-sec').addClass("headerstuck"); } else { $('.scroll-top-sec').removeClass("headerstuck"); } }); $(document).ready(function () { //slimscroll $('.location-left-tab').slimscroll({ //size: '15px' distance: '5px', alwaysVisible: true, height: '346px', }); $('.features-more').click(function(){ $(this).prev('.features-listing-sub').slideDown(); $(this).hide(); $(this).next(".features-less").show(); }); $('.features-less').click(function(){ $(this).prev('.features-more').prev('.features-listing-sub').slideUp(); $(this).hide(); $(this).prev(".features-more").show(); }); $(".show-more-properties").click(function () { $(this).prev(".active-sold-table-listing").addClass("show-active-sold-table-listing"); $(this).hide(); $(this).next(".less-more-properties").show(); }); $(".less-more-properties").click(function () { $(this).prev(".show-more-properties").prev(".active-sold-table-listing").removeClass("show-active-sold-table-listing"); $(this).hide(); $(this).prev(".show-more-properties").show(); }); $(".show-more-Price").click(function () { $(this).prev(".average-price-table-listing").addClass("show-average-price-table-listing"); $(this).hide(); $(this).next(".less-more-Price").show(); }); $(".less-more-Price").click(function () { $(this).prev(".show-more-Price").prev(".average-price-table-listing").removeClass("show-average-price-table-listing"); $(this).hide(); $(this).prev(".show-more-Price").show(); }); }); $(document).ready(function () { $(document).on("scroll", onScroll); //smoothscroll $('.smoothscrollproperty').on('click', function (e) { e.preventDefault(); $(document).off("scroll"); $('.smoothscrollproperty').each(function () { $(this).removeClass('active'); }) $(this).addClass('active'); var target = this.hash, menu = target; $target = $(target); $('html, body').stop().animate({ 'scrollTop': $target.offset().top - ($(".headerstuck").outerHeight(true) + 20) }, 500, 'swing', function () { // window.location.hash = target; $(document).on("scroll", onScroll); }); }); $('.bnt_save_comment').on('click', function(){ $('#frm_blog_comment_data').ajaxForm({ //display the uploaded images beforeSubmit:function(e){ $.loading(); }, success:function(resp){ $.loaded(); if (resp.success == true){ resetFormVal('frm_blog_comment_data',0); $('.sucmsgdiv').html(resp.message); $('#sucMsgDiv').show('slow'); setTimeout(function(){ $('#sucMsgDiv').fadeOut('slow'); }, 3000); }else{ $('.failmsgdiv').html(resp.message); $('#failMsgDiv').show('slow'); setTimeout(function(){ $('#failMsgDiv').fadeOut('slow'); }, 3000); } }, error:function(e){ $.loaded(); } }).submit(); }); }); $(document).ready(function() { $('.more-menu-link').click(function(){ $(this).prev('.more-menu').slideDown(); $(this).hide(); $(this).next(".less-menu-link").show(); }); $('.less-menu-link').click(function(){ $(this).prev('.more-menu-link').prev('.more-menu').slideUp(); $(this).hide(); $(this).prev(".more-menu-link").show(); }); $('body').on('submit', '#contact-form', function(e){ var postData = $(this).serialize(); var postUrl = $(this).attr("action");// alert(postUrl); $('.help-block help-block-error').remove(); $.loading(); $.ajax({ url : postUrl, type: "POST", //cache : false, data : postData, //dataType : 'json', success:function(resp) { $.loaded(); // $('#signup_captcha_image').trigger('click'); if (resp.success == true){ resetFormVal('contact-form',0); $('.sucmsgdiv').html(resp.message); $('#sucMsgDiv').show('slow'); setTimeout(function(){ $('#sucMsgDiv').fadeOut('slow'); }, 3000); }else{ $('.failmsgdiv').html(resp.message); $('#failMsgDiv').show('slow'); setTimeout(function(){ $('#failMsgDiv').fadeOut('slow'); }, 3000); } }, error: function(xhr, textStatus, thrownError) { } }); return false; }); $('.btn_sell_estimate').on('click', function(){ var that = $(this); var thisForm = that.closest('form'); var rentType = thisForm.find('.realestate_search_rent_type').val(); var loc = thisForm.find('.realestate_search_location').val(); var addr = thisForm.find('.realestate_search_address').val(); var minPrice = thisForm.find('.adv_min_price').val(); var maxPrice = thisForm.find('.adv_max_price').val(); var bedroom = thisForm.find('.adv_bedroom').val(); var bathroom = thisForm.find('.adv_bathroom').val(); var garage = thisForm.find('.adv_garage').val(); var propType = thisForm.find('.adv_property_type').val(); var constStatus = thisForm.find('.adv_construction_status').val(); var marktStatus = thisForm.find('.adv_market_status').val(); var propertyID = thisForm.find('.adv_propertyid').val(); if(!loc){ alert('Please select a location'); thisForm.find('.adventure-search-box').slideUp(); return false; } var url = thisForm.attr('action'); var loca = loc.split(', '), state, town, area; if(loca.length === 1){ state = loca[0]; url = updateQueryStringParameter(url, 'state', state); }else if(loca.length === 2){ town = loca[0]; state = loca[1]; url = updateQueryStringParameter(url, 'town', town); url = updateQueryStringParameter(url, 'state', state); }else if(loca.length === 3) { area = loca[0]; town = loca[1]; state = loca[2]; url = updateQueryStringParameter(url, 'area', area); url = updateQueryStringParameter(url, 'town', town); url = updateQueryStringParameter(url, 'state', state); } var addra = addr.split(', '), streetName, streetNumber, apptUnit; if(addra.length === 1){ streetName = addra[0]; url = updateQueryStringParameter(url, 'street_address', streetName); }else if(addra.length === 2){ streetNumber = addra[0]; streetName = addra[1]; url = updateQueryStringParameter(url, 'street_number', streetNumber); url = updateQueryStringParameter(url, 'street_address', streetName); }else if(addra.length === 3) { streetNumber = addra[0]; streetName = addra[1]; apptUnit = addra[2]; url = updateQueryStringParameter(url, 'street_number', streetNumber); url = updateQueryStringParameter(url, 'street_address', streetName); url = updateQueryStringParameter(url, 'appartment_unit', apptUnit); } if(rentType){ url = updateQueryStringParameter(url, 'rent_type', rentType); }if(minPrice){ url = updateQueryStringParameter(url, 'min_price', minPrice); }if(maxPrice){ url = updateQueryStringParameter(url, 'max_price', maxPrice); }if(bedroom){ url = updateQueryStringParameter(url, 'bedroom', bedroom); }if(bathroom){ url = updateQueryStringParameter(url, 'bathroom', bathroom); }if(garage){ url = updateQueryStringParameter(url, 'garage', garage); }if(propType){ url = updateQueryStringParameter(url, 'prop_types', propType); }if(constStatus){ url = updateQueryStringParameter(url, 'const_status', constStatus); }if(marktStatus){ url = updateQueryStringParameter(url, 'market_statuses', marktStatus); }if(propertyID){ url = updateQueryStringParameter(url, 'property_id', propertyID); } window.location.href = url; }); }); function onScroll(event) { var scrollPos = $(document).scrollTop(); $('#menu-center a').each(function () { var currLink = $(this); var refElement = $(currLink.attr("href")); if (refElement.position().top <= scrollPos && refElement.position().top + refElement.height() > scrollPos) { $('#menu-center ul li a').removeClass("active"); currLink.addClass("active"); } else { currLink.removeClass("active"); } }); $('#rental-menu-center a').each(function () { var currLink = $(this); var refElement = $(currLink.attr("href")); if (refElement.position().top <= scrollPos && refElement.position().top + refElement.height() > scrollPos) { $('#menu-center ul li a').removeClass("active"); currLink.addClass("active"); } else { currLink.removeClass("active"); } }); } function kFormatter(num) { return num >= 1000000 ? 'N'+ (num/1000000).toFixed(0) + 'M' : (num > 999? 'N'+ (num/1000).toFixed(0) + 'k': 'N'+ num) } function resetFormVal(frmId,radVal,hidVal){ if(radVal == 1){ $('#'+frmId).find('input:checkbox').removeAttr('checked').removeAttr('selected'); $('.'+frmId).find('input:checkbox').removeAttr('checked').removeAttr('selected'); }else{ $('#'+frmId).find('input:radio, input:checkbox').removeAttr('checked').removeAttr('selected'); $('.'+frmId).find('input:radio, input:checkbox').removeAttr('checked').removeAttr('selected'); } if(hidVal == 1){ $('#'+frmId).find('input:hidden').val(''); } $('#'+frmId).find('input:password,input:text, input:file, select, textarea').val(''); $('.'+frmId).find('input:password,input:text, input:file, select, textarea').val(''); $('.help-block help-block-error').remove(); } function updateQueryStringParameter(uri, key, value) { var re = new RegExp("([?&])" + key + "=.*?(&|$)", "i"); var separator = uri.indexOf('?') !== -1 ? "&" : "?"; if (uri.match(re)) { return uri.replace(re, '$1' + key + "=" + value + '$2'); } else { return uri + separator + key + "=" + value; } } /** * * @param {type} min * @param {type} max * @returns {String} */ function getPriceButtonText(min, max){ var btnText = 'Any Price'; var minText = 'No Min', maxText = 'No Max'; if(min){ minText = kFormatter(min); } if(max){ maxText = kFormatter(max); } if(min || max){ btnText = minText+ '-'+ maxText; } return btnText; }
ffarsad/aion
modPrecompiled/test/org/aion/precompiled/contracts/ATB/BridgeControllerRingTest.java
package org.aion.precompiled.contracts.ATB; import static com.google.common.truth.Truth.assertThat; import static org.aion.precompiled.contracts.ATB.BridgeTestUtils.dummyContext; import org.aion.types.Address; import org.aion.crypto.ECKey; import org.aion.crypto.ECKeyFac; import org.aion.crypto.HashUtil; import org.aion.precompiled.contracts.DummyRepo; import org.junit.Before; import org.junit.Test; public class BridgeControllerRingTest { private BridgeStorageConnector connector; private BridgeController controller; private static final Address CONTRACT_ADDR = new Address(HashUtil.h256("contractAddress".getBytes())); private static final Address OWNER_ADDR = new Address(HashUtil.h256("ownerAddress".getBytes())); private static final ECKey members[] = new ECKey[] { ECKeyFac.inst().create(), ECKeyFac.inst().create(), ECKeyFac.inst().create(), ECKeyFac.inst().create(), ECKeyFac.inst().create() }; private static byte[][] getMemberAddress(ECKey[] members) { byte[][] memberList = new byte[members.length][]; for (int i = 0; i < members.length; i++) { memberList[i] = members[i].getAddress(); } return memberList; } @Before public void beforeEach() { DummyRepo repo = new DummyRepo(); this.connector = new BridgeStorageConnector(repo, CONTRACT_ADDR); this.controller = new BridgeController( connector, dummyContext().getSideEffects(), CONTRACT_ADDR, OWNER_ADDR); this.controller.initialize(); byte[][] memberList = new byte[members.length][]; for (int i = 0; i < members.length; i++) { memberList[i] = members[i].getAddress(); } // setup initial ring structure this.controller.ringInitialize(OWNER_ADDR.toBytes(), memberList); } @Test public void testRingInitialization() { for (ECKey k : members) { assertThat(this.connector.getActiveMember(k.getAddress())).isTrue(); } } @Test public void testRingReinitialization() { ErrCode code = this.controller.ringInitialize(OWNER_ADDR.toBytes(), getMemberAddress(members)); assertThat(code).isEqualTo(ErrCode.RING_LOCKED); } private static final byte[] memberAddress = HashUtil.h256("memberAddress".getBytes()); @Test public void testRingAddMember() { ErrCode code = this.controller.ringAddMember(OWNER_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.NO_ERROR); } @Test public void testRingAddMemberNotOwner() { ErrCode code = this.controller.ringAddMember(CONTRACT_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.NOT_OWNER); } @Test public void testRingAddExistingMember() { // add member twice this.controller.ringAddMember(OWNER_ADDR.toBytes(), memberAddress); ErrCode code = this.controller.ringAddMember(OWNER_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.RING_MEMBER_EXISTS); } @Test public void testRingRemoveMember() { ErrCode code; code = this.controller.ringAddMember(OWNER_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.NO_ERROR); assertThat(this.connector.getActiveMember(memberAddress)).isTrue(); code = this.controller.ringRemoveMember(OWNER_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.NO_ERROR); assertThat(this.connector.getActiveMember(memberAddress)).isFalse(); } @Test public void testRingRemoveMemberNotOwner() { ErrCode code = this.controller.ringRemoveMember(CONTRACT_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.NOT_OWNER); } @Test public void testRingRemoveNonExistingMember() { ErrCode code = this.controller.ringRemoveMember(OWNER_ADDR.toBytes(), memberAddress); assertThat(code).isEqualTo(ErrCode.RING_MEMBER_NOT_EXISTS); } }
ethansaxenian/RosettaDecode
lang/Java/loops-foreach-2.java
<reponame>ethansaxenian/RosettaDecode Iterable collect; ... collect.forEach(o -> System.out.println(o));
ferag/keystone
keystone/common/sql/migrate_repo/versions/089_add_root_of_all_domains.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sql _PROJECT_TABLE_NAME = 'project' _DOMAIN_TABLE_NAME = 'domain' NULL_DOMAIN_ID = '<<keystone.domain.root>>' def upgrade(migrate_engine): def _generate_root_domain_project(): # Generate a project that will act as a root for all domains, in order # for use to be able to use a FK constraint on domain_id. Projects # acting as a domain will not reference this as their parent_id, just # as domain_id. # # This special project is filtered out by the driver, so is never # visible to the manager or API. project_ref = { 'id': NULL_DOMAIN_ID, 'name': NULL_DOMAIN_ID, 'enabled': False, 'description': '', 'domain_id': NULL_DOMAIN_ID, 'is_domain': True, 'parent_id': None, 'extra': '{}' } return project_ref def _generate_root_domain(): # Generate a similar root for the domain table, this is an interim # step so as to allow continuation of current project domain_id FK. # # This special domain is filtered out by the driver, so is never # visible to the manager or API. domain_ref = { 'id': NULL_DOMAIN_ID, 'name': NULL_DOMAIN_ID, 'enabled': False, 'extra': '{}' } return domain_ref meta = sql.MetaData() meta.bind = migrate_engine session = sql.orm.sessionmaker(bind=migrate_engine)() project_table = sql.Table(_PROJECT_TABLE_NAME, meta, autoload=True) domain_table = sql.Table(_DOMAIN_TABLE_NAME, meta, autoload=True) root_domain = _generate_root_domain() new_entry = domain_table.insert().values(**root_domain) session.execute(new_entry) session.commit() root_domain_project = _generate_root_domain_project() new_entry = project_table.insert().values(**root_domain_project) session.execute(new_entry) session.commit() session.close()
hishidama/asakusafw-compiler
dag/compiler/jdbc/src/main/java/com/asakusafw/dag/compiler/jdbc/windgate/WindGateJdbcOutputProcessorGenerator.java
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.dag.compiler.jdbc.windgate; import static com.asakusafw.dag.compiler.codegen.AsmUtil.*; import java.util.List; import java.util.function.Function; import java.util.function.Supplier; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import com.asakusafw.dag.compiler.codegen.AsmUtil.LocalVarRef; import com.asakusafw.dag.compiler.codegen.ClassGeneratorContext; import com.asakusafw.dag.compiler.codegen.SupplierGenerator; import com.asakusafw.dag.compiler.jdbc.PreparedStatementAdapterGenerator; import com.asakusafw.dag.compiler.model.ClassData; import com.asakusafw.dag.runtime.jdbc.operation.JdbcOutputProcessor; import com.asakusafw.dag.runtime.jdbc.util.WindGateJdbcDirect; import com.asakusafw.lang.compiler.model.description.ClassDescription; import com.asakusafw.lang.utils.common.Arguments; import com.asakusafw.lang.utils.common.Lang; import com.asakusafw.lang.utils.common.Optionals; import com.asakusafw.lang.utils.common.Tuple; /** * Generates {@link JdbcOutputProcessor} using {@link WindGateJdbcDirect} API. * @since 0.4.0 */ public final class WindGateJdbcOutputProcessorGenerator { private static final Type TRUNCATE_BUILDER = typeOf(WindGateJdbcDirect.TruncateBuilder.class); private static final Type OUTPUT_BUILDER = typeOf(WindGateJdbcDirect.OutputBuilder.class); private static final String CATEGORY = "jdbc.windgate"; //$NON-NLS-1$ private static final String HINT = "OutputProcessor"; //$NON-NLS-1$ private WindGateJdbcOutputProcessorGenerator() { return; } /** * Generates {@link JdbcOutputProcessor} class. * @param context the current context * @param specs the target operation specs * @return the generated class data */ public static ClassData generate(ClassGeneratorContext context, List<Spec> specs) { Arguments.requireNonNull(context); Arguments.requireNonNull(specs); return generate(context, specs, context.getClassName(CATEGORY, HINT)); } /** * Generates {@link JdbcOutputProcessor} class. * @param context the current context * @param specs the target operation specs * @param target the target class * @return the generated class data */ public static ClassData generate(ClassGeneratorContext context, List<Spec> specs, ClassDescription target) { Arguments.requireNonNull(context); Arguments.requireNonNull(specs); ClassWriter writer = newWriter(target, JdbcOutputProcessor.class); defineEmptyConstructor(writer, JdbcOutputProcessor.class, v -> { LocalVarRef self = new LocalVarRef(Opcodes.ALOAD, 0); self.load(v); specs.forEach(spec -> { truncate(context, v, spec); output(context, v, spec); }); v.visitInsn(Opcodes.POP); }); writer.visitEnd(); return new ClassData(target, writer::toByteArray); } private static void truncate(ClassGeneratorContext context, MethodVisitor v, Spec spec) { if (spec.initialize == false) { return; } getConst(v, spec.id); getConst(v, spec.model.getProfileName()); getConst(v, spec.model.getProfileName()); getConst(v, spec.model.getTableName()); getList(v, Lang.project(spec.model.getColumnMappings(), Tuple::left)); v.visitMethodInsn(Opcodes.INVOKESTATIC, typeOf(WindGateJdbcDirect.class).getInternalName(), "truncate", Type.getMethodDescriptor(TRUNCATE_BUILDER, typeOf(String.class), // profileName typeOf(String.class), // tableName typeOf(List.class)), // columnNames false); Lang.forEach(Optionals.of(spec.model.getCustomTruncate()), s -> { getConst(v, s); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, TRUNCATE_BUILDER.getInternalName(), "withCustomTruncate", //$NON-NLS-1$ Type.getMethodDescriptor(TRUNCATE_BUILDER, typeOf(String.class)), false); }); Lang.forEach(spec.model.getOptions(), s -> { getConst(v, s); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, TRUNCATE_BUILDER.getInternalName(), "withOption", //$NON-NLS-1$ Type.getMethodDescriptor(TRUNCATE_BUILDER, typeOf(String.class)), false); }); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, TRUNCATE_BUILDER.getInternalName(), "build", //$NON-NLS-1$ Type.getMethodDescriptor(typeOf(Function.class)), false); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, typeOf(JdbcOutputProcessor.class).getInternalName(), "initialize", //$NON-NLS-1$ Type.getMethodDescriptor(typeOf(JdbcOutputProcessor.class), typeOf(String.class), typeOf(String.class), typeOf(Function.class)), false); } private static void output(ClassGeneratorContext context, MethodVisitor v, Spec spec) { if (spec.output == false) { return; } getConst(v, spec.id); getConst(v, spec.model.getProfileName()); getConst(v, spec.model.getProfileName()); getConst(v, spec.model.getTableName()); getList(v, Lang.project(spec.model.getColumnMappings(), Tuple::left)); getNew(v, getAdapter(context, spec)); v.visitMethodInsn(Opcodes.INVOKESTATIC, typeOf(WindGateJdbcDirect.class).getInternalName(), "output", Type.getMethodDescriptor(OUTPUT_BUILDER, typeOf(String.class), // profileName typeOf(String.class), // tableName typeOf(List.class), // columnNames typeOf(Supplier.class)), // adapter false); Lang.forEach(spec.model.getOptions(), s -> { getConst(v, s); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, OUTPUT_BUILDER.getInternalName(), "withOption", //$NON-NLS-1$ Type.getMethodDescriptor(OUTPUT_BUILDER, typeOf(String.class)), false); }); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, OUTPUT_BUILDER.getInternalName(), "build", //$NON-NLS-1$ Type.getMethodDescriptor(typeOf(Function.class)), false); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, typeOf(JdbcOutputProcessor.class).getInternalName(), "output", //$NON-NLS-1$ Type.getMethodDescriptor(typeOf(JdbcOutputProcessor.class), typeOf(String.class), typeOf(String.class), typeOf(Function.class)), false); } private static ClassDescription getAdapter(ClassGeneratorContext context, Spec spec) { return SupplierGenerator.get(context, PreparedStatementAdapterGenerator.get( context, new PreparedStatementAdapterGenerator.Spec( spec.model.getDataType(), Lang.project(spec.model.getColumnMappings(), Tuple::right)))); } /** * Represents an operation spec for {@link JdbcOutputProcessor}. * @since 0.4.0 */ public static class Spec { final String id; final WindGateJdbcOutputModel model; final boolean initialize; final boolean output; /** * Creates a new instance. * @param id the output ID * @param model the output model */ public Spec(String id, WindGateJdbcOutputModel model) { this(id, model, true, true); } private Spec(String id, WindGateJdbcOutputModel model, boolean initialize, boolean output) { Arguments.requireNonNull(id); Arguments.requireNonNull(model); this.id = id; this.model = model; this.initialize = initialize; this.output = output; } /** * Returns a copy of this. * @param doOutput {@code true} if the operation can output data, otherwise {@code false} * @return the copy */ public Spec withOutput(boolean doOutput) { return new Spec(id, model, initialize, doOutput); } } }
louis-tru/Ngui
deps/libjpeg/jconfigint.h
<reponame>louis-tru/Ngui<gh_stars>10-100 #include <ftr/util/env.h> /* jconfigint.h. Generated from jconfigint.h.in by configure. */ /* libjpeg-turbo build number */ #define BUILD "20160909" /* How to obtain function inlining. */ #define INLINE inline __attribute__((always_inline)) /* Define to the full name of this package. */ #define PACKAGE_NAME "libjpeg-turbo" /* Version number of package */ #define VERSION "1.5.1" /* The size of `size_t', as computed by sizeof. */ #if FX_ARCH_32BIT # define SIZEOF_SIZE_T 4 #else # define SIZEOF_SIZE_T 8 #endif
MRgzhen/sky-admin
sky-system/sky-generator/src/main/java/com/yanyu/sky/generator/config/EncodeResponseBodyAdvice.java
package com.yanyu.sky.generator.config; import cn.hutool.crypto.SecureUtil; import cn.hutool.crypto.symmetric.SymmetricAlgorithm; import cn.hutool.crypto.symmetric.SymmetricCrypto; import com.github.mrgzhen.core.exception.ServiceException; import com.github.mrgzhen.core.util.JSONUtil; import com.github.mrgzhen.core.web.Result; import com.yanyu.sky.generator.config.annotation.YanyuEncryption; import lombok.extern.slf4j.Slf4j; import org.springframework.core.MethodParameter; import org.springframework.http.MediaType; import org.springframework.http.server.ServerHttpRequest; import org.springframework.http.server.ServerHttpResponse; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyAdvice; import java.util.HashMap; import java.util.Map; /** * @author yanyu * @date 2021/1/7, */ @ControllerAdvice @Slf4j public class EncodeResponseBodyAdvice implements ResponseBodyAdvice { @Override public boolean supports(MethodParameter returnType, Class converterType) { return returnType.getMethod().isAnnotationPresent(YanyuEncryption.class) || returnType.getMethod().getDeclaringClass().isAnnotationPresent(YanyuEncryption.class) ; } @Override public Object beforeBodyWrite(Object body, MethodParameter returnType, MediaType selectedContentType, Class selectedConverterType, ServerHttpRequest request, ServerHttpResponse response) { if (body == null) { return body ; } try { Result result = JSONUtil.instant().readValue(JSONUtil.instant().writeValueAsString(body), Result.class) ; Object bean = result.getBean(); if(bean == null) { return result; } SymmetricCrypto aes = new SymmetricCrypto(SymmetricAlgorithm.AES); Map<String,String> encryptResult = new HashMap<>(); encryptResult.put("key",aes.encryptBase64(JSONUtil.instant().writeValueAsString(result.getBean()))); result.setBean(encryptResult); result.setCode("10001"); return result; } catch (Exception e) { throw new ServiceException("加密异常",e) ; } } }
AngelMilovski/SuftUniLearning
08.JSEssentials/02.ObjectsAndDOM/02.Exercise/03. Number-Convertor/solution.js
function solve() { let select = document.querySelector('#selectMenuTo'); let binary = document.createElement('option'); binary.setAttribute('value', 'binary'); binary.textContent = 'Binary'; let hexadecimal = document.createElement('option'); hexadecimal.setAttribute('value', 'hexadecimal'); hexadecimal.textContent = 'Hexadecimal'; select.appendChild(binary); select.appendChild(hexadecimal); document.querySelector('button').addEventListener('click', convert); function convert() { let input = document.getElementById('input').value; if (input) { let decimal = +input; let result = ''; if (select.value === 'binary') { result = decimal.toString(2); } else if (select.value === 'hexadecimal') { result = decimal.toString(16).toLocaleUpperCase(); } document.getElementById('result').setAttribute('value', `${result}`); } } }
filler/homebrew-cask
Casks/microsoft-lync.rb
cask 'microsoft-lync' do version '14.4.3_170308' sha256 '6e980b1eaa85a83d7c7cebbdb1135af82158cae26ae18d0dd171576bd9cc076a' url "https://download.microsoft.com/download/5/0/0/500C7E1F-3235-47D4-BC11-95A71A1BA3ED/lync_#{version}.dmg" name 'Microsoft Lync 2011' homepage 'https://www.microsoft.com/en-us/download/details.aspx?id=36517' pkg 'Lync Installer.pkg' uninstall pkgutil: 'com.microsoft.lync.all.*' zap pkgutil: [ 'com.microsoft.mau.all.autoupdate.*', 'com.microsoft.merp.all.errorreporting.*', ], delete: [ '~/Library/Preferences/com.microsoft.Lync.plist', '~/Library/Logs/Microsoft-Lync-0.log', '~/Documents/Microsoft User Data/Microsoft Lync Data', ], rmdir: '~/Documents/Microsoft User Data' end
mystickev/ctf-archives
PlaidCTF/2021/web/Carmen_Sandiego_Season2/instance/server-sensor/goahead/www/main.js
window.onload = async () => { let res = await fetch("/cgi-bin/latest"); let data = await res.json(); for (let sensor of Object.keys(data.sensors)) { let sensorElement = document.createElement("div"); sensorElement.className = "sensor"; let valueElement = document.createElement("div"); valueElement.className = "value"; valueElement.innerText = data.sensors[sensor].value; sensorElement.appendChild(valueElement); let nameElement = document.createElement("div"); nameElement.className = "name"; nameElement.innerText = sensor; sensorElement.appendChild(nameElement); let restartElement = document.createElement("div"); restartElement.className = "restart"; restartElement.innerHTML = "&#x21BB;"; restartElement.title = "Restart Sensor"; restartElement.addEventListener("click", async () => { restartElement.innerText = "..."; await fetch("/cgi-bin/restart?sensor=" + sensor, { method: "POST" }); restartElement.innerHTML = "&#x21BB;"; }); sensorElement.appendChild(restartElement); document.getElementById("sensor-list").appendChild(sensorElement); } };
STS-Emory/STS-Clover
modules/technician/client/controllers/admin/admin-stat.client.controller.js
<gh_stars>1-10 'use strict'; angular.module('technician.admin').controller('StatisticsController', ['$scope', '$http', function ($scope, $http) { var options = { responsive: true, maintainAspectRatio: true, scales: { yAxes: [{ ticks: { beginAtZero: true } }] } }; var createChart = function(id, type, data, options){ return new Chart(document.querySelector('#'+id), { type: type, data: data, options: options }); }; var createBarDataset = function(label, data){ var color = randomColor({ luminosity: 'dark', format: 'rgbArray' }).toString(); return { label: label, data: data, borderWidth: 1, backgroundColor: 'rgba('+color+',0.6)', borderColor: 'rgba('+color+',1.0)', hoverBackgroundColor: 'rgba('+color+',0.8)', hoverBorderColor: 'rgba('+color+',1.0)' }; }; $scope.initLibraryGuidanceStat = function(){ if($scope.LibraryGuidanceBarChart) $scope.LibraryGuidanceBarChart.clear(); if($scope.CategoryTotal) delete $scope.CategoryTotal; if(!$scope.year) $scope.year = new Date().getFullYear(); $http.get('/api/tech/library-guidance/stat/'+$scope.year) .success(function(stats){ var idx, label, data = { labels: stats.labels, datasets: [] }; delete stats.labels; var MonthlyTotal = new Array(data.labels.length).fill(0); for(label in stats) for(idx in stats[label]) MonthlyTotal[idx] += stats[label][idx]; stats.Total = MonthlyTotal; for(label in stats) data.datasets.push(createBarDataset(label, stats[label])); $scope.LibraryGuidanceBarChart = createChart('LibraryGuidanceBarChart', 'bar', data, options); var total, CategoryTotal = [], count = function(val){ total += val; }; for(label in stats){ total = 0; stats[label].forEach(count); CategoryTotal.push({ label: label, count: total }); } $scope.CategoryTotal = CategoryTotal; }); }; } ]);
allewwaly/dementia-forensics
DementiaKM/HookEngine.h
#ifndef __HOOKENGINE_H_VERSION__ #define __HOOKENGINE_H_VERSION__ 100 #if defined(_MSC_VER) && (_MSC_VER >= 1020) #pragma once #endif #ifdef __cplusplus extern "C"{ #endif #include <ntddk.h> #ifdef __cplusplus }; #endif #define DEFINE_HOOK_CALL(Name, Arguments, CallBuffer) \ __declspec(naked) Name(Arguments) \ { \ __asm mov eax, [CallBuffer] \ __asm jmp eax \ } \ #define TAG_EXEC_BUFFER ('bXE') #ifdef __cplusplus extern "C" { #endif // __cplusplus VOID HkInit(VOID); VOID HkUnInit(VOID); BOOLEAN HkIsInitialized(void); NTSTATUS HkHook(IN PVOID pOriginalFunction, IN PVOID pNewFunction, OUT PUCHAR *ppHookedFunctionCallBuffer); NTSTATUS HkMapMDL(IN PVOID pAddressToMap, IN ULONG uBufferLength, IN KPROCESSOR_MODE mode, OUT PMDL *ppMDL, OUT PVOID *ppMappedAddress); NTSTATUS HkUnhook(IN PVOID pOriginalFunction); #ifdef __cplusplus }; // extern "C" #endif // __cplusplus #endif // __HOOKENGINE_H_VERSION__
CPU-Code/linux_network
introduction/udp/udp_receive.c
/* * @Author: cpu_code * @Date: 2020-05-02 10:29:30 * @LastEditTime: 2020-05-02 21:58:11 * @FilePath: \linux_network\introduction\udp\udp_receive.c * @Gitee: https://gitee.com/cpu_code * @CSDN: https://blog.csdn.net/qq_44226094 */ /* *Binding server to port 8080 *receive data ... * *ip:192.168.43.76,port:8080 *data(2):sb * *ip:192.168.43.76,port:8080 *data(7):cpucode *^C */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> int main(int argc, char *argv[]) { unsigned short port = 8080; int sockfd; struct sockaddr_in my_addr; int err_log; if(argc > 1) //修改本程序的端口 { port = atoi(argv[1]); } sockfd =socket(AF_INET,SOCK_DGRAM,0); if(sockfd < 0) //创建套接字 { perror("socket"); exit(-1); } //填充本程序的信息 bzero(&my_addr,sizeof(my_addr)); my_addr.sin_family = AF_INET; my_addr.sin_port = htons(port); my_addr.sin_addr.s_addr = htonl(INADDR_ANY); printf("Binding server to port %d\n",port); err_log = bind(sockfd, (struct sockaddr *)&my_addr, sizeof(my_addr)); if(err_log != 0) //绑定本程序要使用的信息 { perror("bind"); close(sockfd); exit(-1); } printf("receive data ...\n"); while(1) //收取数据 { int recv_len; char recv_buf[512] = " "; struct sockaddr_in client_addr; char cli_ip[INET_ADDRSTRLEN] = " "; socklen_t cliaddr_len = sizeof(client_addr); recv_len = recvfrom(sockfd, recv_buf, sizeof(recv_buf), 0, (struct sockaddr *)&client_addr, &cliaddr_len); inet_ntop(AF_INET, &client_addr.sin_addr, cli_ip,INET_ADDRSTRLEN); printf("\nip:%s,port:%d\n",cli_ip ,ntohs(client_addr.sin_port)); printf("data(%d):%s\n",recv_len,recv_buf); } close(sockfd); return 0; }
RobPiwowarek/SAG-Simple-Cloud-Optimizer
src/main/scala/pl/rpw/core/DBInitializer.scala
<reponame>RobPiwowarek/SAG-Simple-Cloud-Optimizer package pl.rpw.core import slick.jdbc.H2Profile.api._ object DBInitializer { val db = Database.forConfig("h2mem1") }
Manny27nyc/azure-sdk-for-java
sdk/spring/azure-spring-cloud-context/src/main/java/com/azure/spring/cloud/context/core/util/Tuple.java
<filename>sdk/spring/azure-spring-cloud-context/src/main/java/com/azure/spring/cloud/context/core/util/Tuple.java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.spring.cloud.context.core.util; import java.util.Objects; /** * Tuple class. * @param <T> The type of the first element in the tuple. * @param <U> The type of the second element in the tuple. */ public final class Tuple<T, U> { private final T first; private final U second; private Tuple(T first, U second) { this.first = first; this.second = second; } public static <T, U> Tuple<T, U> of(T first, U second) { return new Tuple<>(first, second); } public T getFirst() { return first; } public U getSecond() { return second; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Tuple<?, ?> tuple = (Tuple<?, ?>) o; return Objects.equals(first, tuple.first) && Objects.equals(second, tuple.second); } @Override public int hashCode() { return Objects.hash(first, second); } }
wangcy6/weekly_read
code_reading/oceanbase-master/src/sql/engine/expr/ob_expr_substrb.h
<gh_stars>0 /** * Copyright (c) 2021 OceanBase * OceanBase CE is licensed under Mulan PubL v2. * You can use this software according to the terms and conditions of the Mulan PubL v2. * You may obtain a copy of Mulan PubL v2 at: * http://license.coscl.org.cn/MulanPubL-2.0 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. * See the Mulan PubL v2 for more details. */ #ifndef OCEANBASE_SQL_ENGINE_EXPR_SUBSTRB_ #define OCEANBASE_SQL_ENGINE_EXPR_SUBSTRB_ #include "sql/engine/expr/ob_expr_operator.h" namespace oceanbase { namespace sql { class ObExprSubstrb : public ObStringExprOperator { public: explicit ObExprSubstrb(common::ObIAllocator& alloc); virtual ~ObExprSubstrb(); virtual int calc_result2(common::ObObj& result, const common::ObObj& text, const common::ObObj& start_pos, common::ObExprCtx& expr_ctx) const; virtual int calc_result3(common::ObObj& result, const common::ObObj& text, const common::ObObj& start_pos, const common::ObObj& length, common::ObExprCtx& expr_ctx) const; virtual int calc_result_typeN( ObExprResType& type, ObExprResType* types_stack, int64_t param_num, common::ObExprTypeCtx& type_ctx) const; virtual int calc_resultN( common::ObObj& result, const common::ObObj* objs_array, int64_t param_num, common::ObExprCtx& expr_ctx) const; static int calc(common::ObObj& result, const common::ObString& text, int64_t start_pos, int64_t length, common::ObCollationType cs_type, const common::ObObjType& res_type, common::ObExprCtx& expr_ctx); static int calc(common::ObString& res_str, const common::ObString& text, int64_t start, int64_t length, common::ObCollationType cs_type, common::ObIAllocator& alloc); virtual int cg_expr(ObExprCGCtx& op_cg_ctx, const ObRawExpr& raw_expr, ObExpr& rt_expr) const; static int calc_substrb_expr(const ObExpr& expr, ObEvalCtx& ctx, ObDatum& res); private: int calc_result_length_in_byte(const ObExprResType& type, ObExprResType* types_array, int64_t param_num, common::ObCollationType cs_type, int64_t& res_len) const; static int handle_invalid_byte(char* ptr, const int64_t text_len, int64_t& start, int64_t& len, char reset_char, common::ObCollationType cs_type); static int ignore_invalid_byte( char* ptr, const int64_t text_len, int64_t& start, int64_t& len, common::ObCollationType cs_type); static int reset_invalid_byte( char* ptr, const int64_t text_len, int64_t start, int64_t len, char reset_char, common::ObCollationType cs_type); // return the well formatted boundary of charset, // set boundary_len to -1 if no valid boundary found (invalid character of charset). static int get_well_formatted_boundary(common::ObCollationType cs_type, char* ptr, const int64_t len, int64_t pos, int64_t& boundary_pos, int64_t& boundary_len); // disallow copy DISALLOW_COPY_AND_ASSIGN(ObExprSubstrb); }; } // namespace sql } // namespace oceanbase #endif /* OCEANBASE_SQL_ENGINE_EXPR_SUBSTRB_ */
teresa-n-schofield/gfw
jstest/config.js
<gh_stars>0 require.config({ // appDir: '', baseUrl: '../app/assets/javascripts', modules: [ { "name": "map" }, { "name": "static" }, { "name": "landing" }, { "name": "stories" }, { "name": "countries" } ], paths: { amplify: '../../../vendor/assets/bower_components/amplify/lib/amplify', underscore: '../../../vendor/assets/bower_components/underscore/underscore', backbone: '../../../vendor/assets/bower_components/backbone/backbone', jquery: '../../../vendor/assets/bower_components/jquery/dist/jquery', d3: '../../../vendor/assets/bower_components/d3/d3', topojson: '../../../vendor/assets/bower_components/topojson/topojson', cartodb: '../../../vendor/assets/bower_components/cartodb.js/dist/cartodb.full.uncompressed', moment: '../../../vendor/assets/bower_components/moment/moment', text: '../../../vendor/assets/bower_components/requirejs-text/text', mps: '../../../vendor/assets/bower_components/minpubsub/minpubsub', _string: '../../../vendor/assets/bower_components/underscore.string/lib/underscore.string', chosen: "../../../vendor/assets/bower_components/chosen/chosen.jquery", jqueryui: '../../../vendor/assets/javascripts/jquery-ui-1.10.4.custom.min', jqueryujs: "../../../vendor/assets/javascripts/jquery-ujs/src/rails", markerclusterer: '../../../vendor/assets/bower_components/gmaps-markerclusterer-plus/src/markerclusterer', geojsonArea: '../../../vendor/assets/javascripts/geojson-area', uri: '../../../vendor/assets/bower_components/uri-templates/uri-templates', handlebars: '../../../vendor/assets/bower_components/handlebars/handlebars', slick: "../../../vendor/assets/bower_components/slick.js/slick/slick.min", simplePagination: "../../../vendor/assets/bower_components/jquery.simplePagination/jquery.simplePagination", keymaster: "../../../vendor/assets/bower_components/keymaster/keymaster", enquire: "../../../vendor/assets/bower_components/enquire/dist/enquire", picker: "../../../vendor/assets/bower_components/pickadate/lib/picker", pickadate: "../../../vendor/assets/bower_components/pickadate/lib/picker.date", scrollit: "../../../vendor/assets/bower_components/ScrollIt.js/scrollIt", qtip: "../../../vendor/assets/bower_components/qtip2/jquery.qtip.min", jquery_fileupload: "../../../vendor/assets/bower_components/jquery-file-upload/js/jquery.fileupload", jquery_migrate: "../../../vendor/assets/bower_components/jquery-migrate/jquery-migrate", geojson: "../../../vendor/assets/bower_components/geojson-google-maps/GeoJSON", wax: "../../../vendor/assets/javascripts/wax.g.min", store: '../../../vendor/assets/javascripts/store', Class: '../../../vendor/assets/bower_components/Class.js/Class', jasmine: "../../../vendor/assets/bower_components/jasmine", sinon: "../../../vendor/assets/bower_components/sinon/lib/sinon", bluebird: "../../../vendor/assets/bower_components/bluebird/js/browser/bluebird", turf: "../../../vendor/assets/bower_components/turf/turf", helpers: "helpers", utils: "map/utils", abstract: "abstract", templates: "templates", views: "views", }, shim: { "underscore": { "exports": "_" }, "_string": { "exports": "_string", "deps": [ "underscore" ] }, "amplify": { "deps": [ "jquery" ], "exports": "amplify" }, "backbone": { "deps": [ "jquery", "underscore" ], "exports": "Backbone" }, "jqueryui": { "deps": [ "jquery" ], "exports": "$" }, "chosen": { "deps": [ "jquery" ], "exports": "chosen" }, "pagination": { "deps": [ "jquery" ], "exports": "simplePagination" }, "Class": { "exports": "Class" }, "uri": { "exports": "UriTemplate" }, "handlebars": { "exports": "Handlebars" }, "picker": { "deps": [ "jquery" ] }, "pickadate": { "deps": [ "jquery", "picker" ], "exports": "DatePicker" }, "geojson": { "exports": "geojson" }, "jqueryujs": { "deps": [ "jquery" ], "exports": "jqueryujs" }, "scrollit": { "deps": [ "jquery" ], "exports": "scrollit" }, "jquery_migrate": { "deps": [ "jquery" ], "exports": "jquery_migrate" }, "jquery_fileupload": { "deps": [ "jquery" ], "exports": "jquery_fileupload" } } });
SSteve/fprime
docs/UsersGuide/api/c++/html/navtreeindex7.js
<filename>docs/UsersGuide/api/c++/html/navtreeindex7.js var NAVTREEINDEX7 = { "svc_sched_port.html#autotoc_md452":[3,23,0,1,0,1], "svc_sched_port.html#autotoc_md453":[3,23,0,2], "svc_tlm_chan_component.html":[3,24], "svc_tlm_chan_component.html#autotoc_md454":[3,24,0], "svc_tlm_chan_component.html#autotoc_md455":[3,24,0,0], "svc_tlm_chan_component.html#autotoc_md456":[3,24,0,1], "svc_tlm_chan_component.html#autotoc_md457":[3,24,0,2], "svc_tlm_chan_component.html#autotoc_md458":[3,24,0,2,0], "svc_tlm_chan_component.html#autotoc_md459":[3,24,0,2,0,0], "svc_tlm_chan_component.html#autotoc_md460":[3,24,0,2,0,1], "svc_tlm_chan_component.html#autotoc_md461":[3,24,0,2,0,2], "svc_tlm_chan_component.html#autotoc_md462":[3,24,0,2,1], "svc_tlm_chan_component.html#autotoc_md463":[3,24,0,2,1,0], "svc_tlm_chan_component.html#autotoc_md464":[3,24,0,2,1,1], "svc_tlm_chan_component.html#autotoc_md465":[3,24,0,2,2], "svc_tlm_chan_component.html#autotoc_md466":[3,24,0,2,3], "svc_tlm_chan_component.html#autotoc_md467":[3,24,0,3], "svc_tlm_chan_component.html#autotoc_md468":[3,24,0,4], "svc_tlm_chan_component.html#autotoc_md469":[3,24,0,5], "svc_tlm_chan_component.html#autotoc_md470":[3,24,0,6], "svc_watch_dog_port.html":[3,25], "svc_watch_dog_port.html#autotoc_md471":[3,25,0], "svc_watch_dog_port.html#autotoc_md472":[3,25,0,0], "svc_watch_dog_port.html#autotoc_md473":[3,25,0,1], "svc_watch_dog_port.html#autotoc_md474":[3,25,0,1,0], "svc_watch_dog_port.html#autotoc_md475":[3,25,0,1,0,0], "svc_watch_dog_port.html#autotoc_md476":[3,25,0,1,0,1], "svc_watch_dog_port.html#autotoc_md477":[3,25,0,2], "t_i_m_s_p_2_standard_types_8hpp.html":[10,0,5,16,2,0], "t_i_m_s_p_2_standard_types_8hpp_source.html":[10,0,5,16,2,0], "task_8hpp.html":[10,0,6,30], "task_8hpp_source.html":[10,0,6,30], "task_common_8cpp.html":[10,0,6,31], "task_common_8cpp_source.html":[10,0,6,31], "task_id_8cpp.html":[10,0,6,3,6], "task_id_8cpp_source.html":[10,0,6,3,6], "task_id_8hpp.html":[10,0,6,32], "task_id_8hpp_source.html":[10,0,6,32], "task_id_repr_8hpp.html":[10,0,6,33], "task_id_repr_8hpp_source.html":[10,0,6,33], "task_lock_8hpp.html":[10,0,6,34], "task_lock_8hpp_source.html":[10,0,6,34], "task_root_8cpp.html":[10,0,6,3,7], "task_root_8cpp.html#a4ee8359e96d6555acca4f046dd66ef70":[10,0,6,3,7,1], "task_root_8cpp.html#a9e21a2a88f554186e2b2380b00eab944":[10,0,6,3,7,0], "task_root_8cpp_source.html":[10,0,6,3,7], "task_runner_8cpp.html":[10,0,6,0,0,1], "task_runner_8cpp_source.html":[10,0,6,0,0,1], "task_runner_8hpp.html":[10,0,6,0,0,2], "task_runner_8hpp.html#a4a2a7ba8303c96ef47acc9783ed234b1":[10,0,6,0,0,2,1], "task_runner_8hpp_source.html":[10,0,6,0,0,2], "task_string_8cpp.html":[10,0,6,35], "task_string_8cpp_source.html":[10,0,6,35], "task_string_8hpp.html":[10,0,6,36], "task_string_8hpp_source.html":[10,0,6,36], "test_utils_8hpp.html":[10,0,8,9], "test_utils_8hpp.html#a026662471a24fb2ad95e99778ba50621":[10,0,8,9,1], "test_utils_8hpp.html#a257ceb48c0a090f307102f2958ad8215":[10,0,8,9,0], "test_utils_8hpp.html#a51c1f12c70ee864ca74eab2b91f23c06":[10,0,8,9,4], "test_utils_8hpp.html#a8bd873cfdf7de3543a0a60216eafb58a":[10,0,8,9,2], "test_utils_8hpp.html#aa5bb1555274b632592d1cd8a011cbf65":[10,0,8,9,7], "test_utils_8hpp.html#ab4c847636a67a5578a604cd301f61b86":[10,0,8,9,8], "test_utils_8hpp.html#acc1b0e818a4ae9f18e6701b6c3704ef3":[10,0,8,9,5], "test_utils_8hpp.html#acdbc7aa305a0a284f1cb3597bb4cee4a":[10,0,8,9,3], "test_utils_8hpp.html#af02da5914ea23e3d0a3e5f12ea261bcf":[10,0,8,9,6], "test_utils_8hpp_source.html":[10,0,8,9], "text_log_string_8cpp.html":[10,0,5,7,9], "text_log_string_8cpp_source.html":[10,0,5,7,9], "text_log_string_8hpp.html":[10,0,5,7,10], "text_log_string_8hpp_source.html":[10,0,5,7,10], "time_8cpp.html":[10,0,5,13,1], "time_8cpp_source.html":[10,0,5,13,1], "time_8hpp.html":[10,0,5,13,2], "time_8hpp_source.html":[10,0,5,13,2], "time_component_ac_8cpp.html":[10,0,0,1,2,27,0], "time_component_ac_8cpp_source.html":[10,0,0,1,2,27,0], "time_component_ac_8hpp.html":[10,0,0,1,2,27,1], "time_component_ac_8hpp_source.html":[10,0,0,1,2,27,1], "time_port_ac_8cpp.html":[10,0,0,1,1,5,0], "time_port_ac_8cpp.html#a2ae03face6ffac8ca8f4bd3f6053ba76":[10,0,0,1,1,5,0,0], "time_port_ac_8cpp_source.html":[10,0,0,1,1,5,0], "time_port_ac_8hpp.html":[10,0,0,1,1,5,1], "time_port_ac_8hpp_source.html":[10,0,0,1,1,5,1], "timer_val_8cpp.html":[10,0,7,11,0], "timer_val_8cpp_source.html":[10,0,7,11,0], "timer_val_8hpp.html":[10,0,7,11,1], "timer_val_8hpp_source.html":[10,0,7,11,1], "tlm_buffer_8cpp.html":[10,0,5,14,1], "tlm_buffer_8cpp_source.html":[10,0,5,14,1], "tlm_buffer_8hpp.html":[10,0,5,14,2], "tlm_buffer_8hpp_source.html":[10,0,5,14,2], "tlm_chan_component_ac_8cpp.html":[10,0,0,1,2,28,0], "tlm_chan_component_ac_8cpp.html#a335357dff6c6c7017a5b0bcc4d4fa2f0":[10,0,0,1,2,28,0,0], "tlm_chan_component_ac_8cpp.html#ac5a1ef32ed11a5dbbc0ffcbc490ca95f":[10,0,0,1,2,28,0,2], "tlm_chan_component_ac_8cpp.html#af448c393dda57dda8fe55b956828f63e":[10,0,0,1,2,28,0,1], "tlm_chan_component_ac_8cpp_source.html":[10,0,0,1,2,28,0], "tlm_chan_component_ac_8hpp.html":[10,0,0,1,2,28,1], "tlm_chan_component_ac_8hpp_source.html":[10,0,0,1,2,28,1], "tlm_chan_impl_8cpp.html":[10,0,7,28,1], "tlm_chan_impl_8cpp_source.html":[10,0,7,28,1], "tlm_chan_impl_8hpp.html":[10,0,7,28,2], "tlm_chan_impl_8hpp_source.html":[10,0,7,28,2], "tlm_chan_impl_cfg_8hpp.html":[10,0,2,9], "tlm_chan_impl_cfg_8hpp_source.html":[10,0,2,9], "tlm_chan_impl_get_8cpp.html":[10,0,7,28,3], "tlm_chan_impl_get_8cpp_source.html":[10,0,7,28,3], "tlm_chan_impl_recv_8cpp.html":[10,0,7,28,4], "tlm_chan_impl_recv_8cpp_source.html":[10,0,7,28,4], "tlm_chan_impl_task_8cpp.html":[10,0,7,28,5], "tlm_chan_impl_task_8cpp_source.html":[10,0,7,28,5], "tlm_packet_8cpp.html":[10,0,5,14,3], "tlm_packet_8cpp_source.html":[10,0,5,14,3], "tlm_packet_8hpp.html":[10,0,5,14,4], "tlm_packet_8hpp_source.html":[10,0,5,14,4], "tlm_port_ac_8cpp.html":[10,0,0,1,1,6,0], "tlm_port_ac_8cpp.html#ac279bec091ed9d4094da29e96a0a4997":[10,0,0,1,1,6,0,0], "tlm_port_ac_8cpp_source.html":[10,0,0,1,1,6,0], "tlm_port_ac_8hpp.html":[10,0,0,1,1,6,1], "tlm_port_ac_8hpp_source.html":[10,0,0,1,1,6,1], "tlm_string_8cpp.html":[10,0,5,14,5], "tlm_string_8cpp_source.html":[10,0,5,14,5], "tlm_string_8hpp.html":[10,0,5,14,6], "tlm_string_8hpp_source.html":[10,0,5,14,6], "token_bucket_8cpp.html":[10,0,8,10], "token_bucket_8cpp_source.html":[10,0,8,10], "token_bucket_8hpp.html":[10,0,8,11], "token_bucket_8hpp.html#a8989e1931a56f7f3c08ac5c9ffbce499":[10,0,8,11,1], "token_bucket_8hpp_source.html":[10,0,8,11], "trap_handler_8hpp.html":[10,0,5,15,0], "trap_handler_8hpp_source.html":[10,0,5,15,0], "udp_receiver_component_impl_8cpp.html":[10,0,7,29,0], "udp_receiver_component_impl_8cpp.html#a9e21a2a88f554186e2b2380b00eab944":[10,0,7,29,0,0], "udp_receiver_component_impl_8cpp_source.html":[10,0,7,29,0], "udp_receiver_component_impl_8hpp.html":[10,0,7,29,1], "udp_receiver_component_impl_8hpp_source.html":[10,0,7,29,1], "udp_receiver_component_impl_cfg_8hpp.html":[10,0,2,10], "udp_receiver_component_impl_cfg_8hpp.html#a324a0d51a197935af1bbd9903f2bf035":[10,0,2,10,0], "udp_receiver_component_impl_cfg_8hpp_source.html":[10,0,2,10], "udp_sender_component_impl_8cpp.html":[10,0,7,30,0], "udp_sender_component_impl_8cpp.html#a9e21a2a88f554186e2b2380b00eab944":[10,0,7,30,0,0], "udp_sender_component_impl_8cpp_source.html":[10,0,7,30,0], "udp_sender_component_impl_8hpp.html":[10,0,7,30,1], "udp_sender_component_impl_8hpp_source.html":[10,0,7,30,1], "udp_sender_component_impl_cfg_8hpp.html":[10,0,2,11], "udp_sender_component_impl_cfg_8hpp.html#a3fc0b5fd329d5e81c8098d1102b196ad":[10,0,2,11,0], "udp_sender_component_impl_cfg_8hpp_source.html":[10,0,2,11], "utils.html":[5], "utils_hash_class.html":[5,0], "utils_hash_class.html#autotoc_md492":[5,0,0], "utils_hash_class.html#autotoc_md493":[5,0,0,0], "utils_hash_class.html#autotoc_md494":[5,0,0,1], "utils_hash_class.html#autotoc_md495":[5,0,0,2], "utils_lock_guard_class.html":[5,1], "utils_lock_guard_class.html#autotoc_md478":[5,1,0], "utils_lock_guard_class.html#autotoc_md479":[5,1,0,0], "utils_lock_guard_class.html#autotoc_md480":[5,1,0,1], "utils_lock_guard_class.html#autotoc_md481":[5,1,0,1,0], "utils_rate_limiter_class.html":[5,2], "utils_rate_limiter_class.html#autotoc_md482":[5,2,0], "utils_rate_limiter_class.html#autotoc_md483":[5,2,0,0], "utils_rate_limiter_class.html#autotoc_md484":[5,2,0,1], "utils_rate_limiter_class.html#autotoc_md485":[5,2,0,1,0], "utils_rate_limiter_class.html#autotoc_md486":[5,2,0,1,1], "utils_token_bucket_class.html":[5,3], "utils_token_bucket_class.html#autotoc_md487":[5,3,0], "utils_token_bucket_class.html#autotoc_md488":[5,3,0,0], "utils_token_bucket_class.html#autotoc_md489":[5,3,0,1], "utils_token_bucket_class.html#autotoc_md490":[5,3,0,1,0], "utils_token_bucket_class.html#autotoc_md491":[5,3,0,1,1], "validate_file_8hpp.html":[10,0,6,39], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1":[10,0,6,39,1], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1a05a25ab9c1043e8b2685b64e86da2c54":[10,0,6,39,1,3], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1a3719714fd40be1af2ad0813ff9267085":[10,0,6,39,1,0], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1a4435d316a9c7cf7a8f9ce14e035a6192":[10,0,6,39,1,5], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1a5e6778d8da99f0fa5464161a5ad4bf41":[10,0,6,39,1,2], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1a60f553cc9a588ee6b1a70fa56d55a88c":[10,0,6,39,1,1], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1a9eb9a7aeeacad9d855d64daa10ae10af":[10,0,6,39,1,9], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1ab7d148f9eabd4ef83b60de94d9c31e63":[10,0,6,39,1,4], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1ac020844df45b631c9ca60d0395194ee7":[10,0,6,39,1,8], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1af86262392ee5e8e496b418b6698dae10":[10,0,6,39,1,7], "validate_file_8hpp.html#a1ba410583104275caa3071c92e24f9b1afa1f7b10e9161f632490636919e5ac19":[10,0,6,39,1,6], "validate_file_8hpp.html#a56a150d3f0b755e4e383cdf8c843735d":[10,0,6,39,3], "validate_file_8hpp.html#a7b8ef004ebdae6ef83d6ab8c63e2b5f6":[10,0,6,39,0], "validate_file_8hpp.html#a8174613e8d564cd35491f134daa0464e":[10,0,6,39,2], "validate_file_8hpp.html#a8334a6a18fcdcba2358c023beb34ef4f":[10,0,6,39,4], "validate_file_8hpp.html#aafefcd3043d3757fb000d372f5cf2705":[10,0,6,39,5], "validate_file_8hpp_source.html":[10,0,6,39], "validate_file_common_8cpp.html":[10,0,6,40], "validate_file_common_8cpp.html#aaa40afab1b180436effc480719027ab5":[10,0,6,40,1], "validate_file_common_8cpp.html#ac34db4dbb26ce4dedc2ec9591502a89b":[10,0,6,40,2], "validate_file_common_8cpp.html#ac8ca2101117d8aff1fe5774f94d1a8d7":[10,0,6,40,4], "validate_file_common_8cpp.html#ae930b408addcfa27b935d9bef5fd7b0f":[10,0,6,40,0], "validate_file_common_8cpp.html#ae930b408addcfa27b935d9bef5fd7b0fa87bfe3cd684ef5b4f25624e4aeda7772":[10,0,6,40,0,1], "validate_file_common_8cpp.html#ae930b408addcfa27b935d9bef5fd7b0fac9eb045161c16a4ccb69660e66d5b5b8":[10,0,6,40,0,0], "validate_file_common_8cpp.html#ae9637ba59aad4e05a029a538f1e50a2c":[10,0,6,40,3], "validate_file_common_8cpp_source.html":[10,0,6,40], "validated_file_8cpp.html":[10,0,6,37], "validated_file_8cpp_source.html":[10,0,6,37], "validated_file_8hpp.html":[10,0,6,38], "validated_file_8hpp_source.html":[10,0,6,38], "vx_works_2_standard_types_8hpp.html":[10,0,5,16,3,0], "vx_works_2_standard_types_8hpp_source.html":[10,0,5,16,3,0], "vx_works_time_impl_8cpp.html":[10,0,7,19,4], "vx_works_time_impl_8cpp_source.html":[10,0,7,19,4], "watch_dog_port_ac_8cpp.html":[10,0,0,1,2,29,0], "watch_dog_port_ac_8cpp.html#a39ce03a6df36fbba34ba9469890a5753":[10,0,0,1,2,29,0,0], "watch_dog_port_ac_8cpp_source.html":[10,0,0,1,2,29,0], "watch_dog_port_ac_8hpp.html":[10,0,0,1,2,29,1], "watch_dog_port_ac_8hpp_source.html":[10,0,0,1,2,29,1], "watchdog_timer_8cpp.html":[10,0,6,1,4], "watchdog_timer_8cpp_source.html":[10,0,6,1,4], "watchdog_timer_8hpp.html":[10,0,6,41], "watchdog_timer_8hpp_source.html":[10,0,6,41], "x86_2_interval_timer_8cpp.html":[10,0,6,6,0], "x86_2_interval_timer_8cpp_source.html":[10,0,6,6,0], "annotated.html":[9,0], "class_c_f_d_p_1_1_checksum.html":[9,0,0,0], "class_c_f_d_p_1_1_checksum.html#a01e9037d9d8d9257c26d96f4f91f83c0":[9,0,0,0,2], "class_c_f_d_p_1_1_checksum.html#a198204216b63b699ab29f425fc2f3938":[9,0,0,0,1], "class_c_f_d_p_1_1_checksum.html#a6847627f018d2fbb8105926224be2ff9":[9,0,0,0,6], "class_c_f_d_p_1_1_checksum.html#a6e37e96f85d410603a42a8140eb23f9b":[9,0,0,0,8], "class_c_f_d_p_1_1_checksum.html#a7962701bf38d13f048f610558f0f2832":[9,0,0,0,5], "class_c_f_d_p_1_1_checksum.html#a86a92e548bff706d8d128ad0cf93bbf7":[9,0,0,0,4], "class_c_f_d_p_1_1_checksum.html#a8956445c5af56379212d4db396e667cd":[9,0,0,0,3], "class_c_f_d_p_1_1_checksum.html#a9d307bf51cbfe10b7678369f5d9f4aa9":[9,0,0,0,11], "class_c_f_d_p_1_1_checksum.html#aa74f73b1f170f6ef0b1f552873fb86f7":[9,0,0,0,7], "class_c_f_d_p_1_1_checksum.html#ab923247b59cf7b16cba1a0e0a2862711":[9,0,0,0,9], "class_c_f_d_p_1_1_checksum.html#acaf4bb5067657c99a1294052e5cd1e4e":[9,0,0,0,12], "class_c_f_d_p_1_1_checksum.html#af966b3330719a48a1976dd0ec24681e8":[9,0,0,0,0], "class_c_f_d_p_1_1_checksum.html#af9b17cd8248c102e848356188a513ba3":[9,0,0,0,10], "class_drv_1_1_block_driver_component_base.html":[9,0,1,0], "class_drv_1_1_block_driver_component_base.html#a075e850774bc8b9ca09ee4fd309c0219":[9,0,1,0,35], "class_drv_1_1_block_driver_component_base.html#a07f7bafb931518f1f1f4eef38403c7cd":[9,0,1,0,62], "class_drv_1_1_block_driver_component_base.html#a0d6902411a39ce44d8b0651c27826139":[9,0,1,0,22], "class_drv_1_1_block_driver_component_base.html#a0f29b2d80265a11942e5864d5fd2e8c6a26db961a65928ee61c3c493a0c7ac8bf":[9,0,1,0,2], "class_drv_1_1_block_driver_component_base.html#a0f29b2d80265a11942e5864d5fd2e8c6a6c1aafd46a5ca782a42d858c4a1ff513":[9,0,1,0,0], "class_drv_1_1_block_driver_component_base.html#a0f29b2d80265a11942e5864d5fd2e8c6ab8d6ea21c580b87ac1479f008e5a1d93":[9,0,1,0,1], "class_drv_1_1_block_driver_component_base.html#a145850642691c6ece8bf99ca1f835982":[9,0,1,0,43], "class_drv_1_1_block_driver_component_base.html#a170de941f4411f9e4b0951ab80412f42":[9,0,1,0,47], "class_drv_1_1_block_driver_component_base.html#a21bb933049dc0217ec4d63cc34ae8727":[9,0,1,0,34], "class_drv_1_1_block_driver_component_base.html#a2aadc3ba221dc6bf792fb4c75897ea02a12792ac06322c3c82b135776ca060950":[9,0,1,0,6], "class_drv_1_1_block_driver_component_base.html#a2aadc3ba221dc6bf792fb4c75897ea02a15349747f78c1a7ff79ddbd1dd910310":[9,0,1,0,5], "class_drv_1_1_block_driver_component_base.html#a2aadc3ba221dc6bf792fb4c75897ea02aa8e2aef8c33544a3b8fe4ec22ec6c71d":[9,0,1,0,3], "class_drv_1_1_block_driver_component_base.html#a2aadc3ba221dc6bf792fb4c75897ea02ac06b8c39b5b86d4c47143c3a61c2be8b":[9,0,1,0,4], "class_drv_1_1_block_driver_component_base.html#a2aadc3ba221dc6bf792fb4c75897ea02ad4afa310ec4176e8001ad81bcc5b3418":[9,0,1,0,7], "class_drv_1_1_block_driver_component_base.html#a307cb95864af7b493312db088d02d9f6":[9,0,1,0,31], "class_drv_1_1_block_driver_component_base.html#a31422b8637347c0bd59d183fe526b430":[9,0,1,0,27], "class_drv_1_1_block_driver_component_base.html#a3d59fed97da4190903fa0a753ac29472":[9,0,1,0,19], "class_drv_1_1_block_driver_component_base.html#a3e423a2195c580bc2a3dece3651e15b2":[9,0,1,0,38], "class_drv_1_1_block_driver_component_base.html#a41640831004d9ccbb9387d2d97a5e7fc":[9,0,1,0,26] };
tarsoqueiroz/NodeJS
Blockchain/Ethereum/web3App/anderson.js
/** * anderson * * Created by tarso on 29/11/18 */ var lst = require('./accountKeys/xunda'); console.log(lst); console.log('**** XUNDA ****') console.log(lst[0]); console.log(lst[0].DtFinalizacao);
benjaminadk/benjaminadk
src/components/seo.js
import React from 'react' import { StaticQuery, graphql } from 'gatsby' import Helmet from 'react-helmet' const getSchemaOrgJSONLD = ({ title, description, url, author, siteUrl, keywords, image, videoObject, isBlogPost, itemList, datePublished }) => { const schemaWebsite = [ { '@context': 'http://schema.org', '@type': 'WebSite', url, name: title, alternateName: author } ] const schemaVideoObject = videoObject ? [ { '@context': 'http://schema.org', '@type': 'VideoObject', name: videoObject[0], description: videoObject[1], contentUrl: videoObject[2], embedUrl: videoObject[3], thumbnailUrl: videoObject[4], uploadDate: datePublished } ] : [] const schemaBlogPosting = isBlogPost ? [ { '@context': 'http://schema.org', '@type': 'BlogPosting', url, name: title, alternateName: author, headline: title, image: { '@type': 'ImageObject', url: image }, description, keywords, author: { '@type': 'Person', name: author }, publisher: { '@type': 'Organization', url: siteUrl, logo: { '@type': 'ImageObject', url: `${siteUrl}/icons/icon-512x512.png`, width: '512', height: '512' }, name: author }, mainEntityOfPage: { '@type': 'WebSite', '@id': siteUrl }, datePublished, dateModified: datePublished } ] : [] const schemaItemList = itemList ? [itemList] : [] return [ ...schemaWebsite, ...schemaBlogPosting, ...schemaVideoObject, ...schemaItemList ] } function SEO({ title, subtitle, description, meta, image: metaImage, pathname, isBlogPost, videoObject, itemList, datePublished }) { return ( <StaticQuery query={graphql` { site { siteMetadata { author description siteUrl keywords social { twitterHandle } } } } `} render={data => { const { author, keywords, siteUrl } = data.site.siteMetadata const metaTitle = `${title} | ${subtitle}` const metaDescription = description || data.site.siteMetadata.description const image = metaImage && metaImage.src ? `${siteUrl}${metaImage.src}` : videoObject ? metaImage : null const metaUrl = `${siteUrl}${pathname}` const schemaOrgJSONLD = getSchemaOrgJSONLD({ title: metaTitle, description: metaDescription, url: metaUrl, author, siteUrl, keywords, image, videoObject, isBlogPost, itemList, datePublished }) return ( <Helmet htmlAttributes={{ lang: 'en' }} title={title} titleTemplate={`%s | ${subtitle}`} meta={[ { name: `description`, content: metaDescription }, { name: `keywords`, content: data.site.siteMetadata.keywords.join(',') }, { property: `og:type`, content: 'website' }, { property: `og:url`, content: metaUrl }, { property: `og:title`, content: metaTitle }, { property: `og:description`, content: metaDescription }, { name: `twitter:creator`, content: data.site.siteMetadata.twitterHandle }, { name: `twitter:title`, content: metaTitle }, { name: `twitter:description`, content: metaDescription } ] .concat( metaImage ? [ { property: `og:image`, content: image }, { property: `og:image:alt`, content: metaTitle }, { property: `og:image:width`, content: metaImage.width }, { property: `og:image:height`, content: metaImage.height }, { name: `twitter:card`, content: `summary_large_image` } ] : [ { name: `twitter:card`, content: `summary` } ] ) .concat(meta)} > <script type='application/ld+json'> {JSON.stringify(schemaOrgJSONLD)} </script> </Helmet> ) }} /> ) } SEO.defaultProps = { title: '<NAME>', subtitle: 'Home', description: 'A software development portfolio and blog. Post, projects and visualizations are built with JavaScript, React, Node, D3, GraphQL, Electron, PHP, WordPress, to name just a few. ', meta: [] } export default SEO // { // '@context': 'http://schema.org', // '@type': 'BreadcrumbList', // itemListElement: [ // { // '@type': 'ListItem', // position: 1, // item: { // '@id': url, // name: title, // image // } // } // ] // },
krattai/AEBL
blades/xbmc/xbmc/windowing/egl/wayland/WaylandLibraries.h
<gh_stars>1-10 #pragma once /* * Copyright (C) 2011-2013 Team XBMC * http://xbmc.org * * This Program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This Program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with XBMC; see the file COPYING. If not, see * <http://www.gnu.org/licenses/>. * */ #include <boost/noncopyable.hpp> #include "windowing/DllWaylandClient.h" #include "windowing/DllWaylandEgl.h" #include "windowing/DllXKBCommon.h" namespace xbmc { namespace wayland { template <class DllInterface, class Dll> class AutoloadDll : boost::noncopyable { public: AutoloadDll(); ~AutoloadDll(); DllInterface & Get(); private: Dll m_dll; }; class Libraries : boost::noncopyable { public: IDllWaylandClient & ClientLibrary(); IDllWaylandEGL & EGLLibrary(); IDllXKBCommon & XKBCommonLibrary(); private: AutoloadDll<IDllWaylandClient, DllWaylandClient> m_clientLibrary; AutoloadDll<IDllWaylandEGL, DllWaylandEGL> m_eglLibrary; AutoloadDll<IDllXKBCommon, DllXKBCommon> m_xkbCommonLibrary; }; void LoadLibrary(DllDynamic &dll); template <class DllInterface, class Dll> AutoloadDll<DllInterface, Dll>::AutoloadDll() { LoadLibrary(m_dll); } template <class DllInterface, class Dll> DllInterface & AutoloadDll<DllInterface, Dll>::Get() { return m_dll; } template <class DllInterface, class Dll> AutoloadDll<DllInterface, Dll>::~AutoloadDll() { m_dll.Unload(); } } }
rajakolluru/chenile
stm/src/main/java/org/chenile/stm/STMSecurityStrategy.java
<gh_stars>1-10 package org.chenile.stm; /** * An interface that would be used to invoke a security strategy. * The strategy would be used to check if a certain transition is allowed for a principal. The notion of principals is * not there in STM. Instead, we assume that this strategy has access to the invoking principal (via a thread local for instance) * that would allow it to determine if the principal has the requisite privileges. * @author <NAME> * */ public interface STMSecurityStrategy { public boolean isAllowed(String... acls); }
pavel-alay/drebedengi-api
src/main/java/com/alay/drebedengi/soap/responses/GetCurrentRevisionReturn.java
package com.alay.drebedengi.soap.responses; import com.alay.drebedengi.soap.responses.base.StringResponse; public class GetCurrentRevisionReturn extends StringResponse<GetCurrentRevisionReturn> { }
rasmus-rudling/crazyswarm
ros_ws/src/crazyswarm/externalDependencies/libmotioncapture/deps/vrpn/vrpn_android/comp523android/src/edu/unc/cs/vrpn/VrpnSeekBarChangeListener.java
<reponame>rasmus-rudling/crazyswarm package edu.unc.cs.vrpn; import jni.JniLayer; import android.widget.SeekBar; /** * Track changes to the value of a seek bar and notify VRPN clients. This code is fairly * self-explanatory and seekbar-specific, so it is not documented as extensively as * {@link Main} and {@link JniBuilder} * * @author Ted * */ public class VrpnSeekBarChangeListener implements SeekBar.OnSeekBarChangeListener { /** * The JniBuilder which has an analog for this seekbar */ private JniBuilder builder; /** * The JniLayer which {@link VrpnSeekBarChangeListener#builder} created */ private JniLayer layer = null; /** * A reference to this seekbar analog from {@link VrpnSeekBarChangeListener#builder}. * This could be recreated by going to that object, but it is cached here. */ private JniBuilder.Analog analog; /** * Another cached JniBuilder value */ private int cachedAnalog; /** * It is necessary for the SeekBarChangeListener to have a reference to the * {@link Main} activity. For obvious reasons this is not particularly loosely-coupled * so it would be best to refactor this before attempting serious modifications. */ private Main activity = null; private String msg; public VrpnSeekBarChangeListener(JniBuilder builder, JniBuilder.Analog analog) { this.builder = builder; this.analog = analog; this.cachedAnalog = this.builder.getAnalogIndex(this.analog); if (this.builder.isLocked()) this.layer = builder.toJni(); } public VrpnSeekBarChangeListener(JniBuilder builder, JniBuilder.Analog analog, Main activity) { this(builder , analog); this.activity = activity; } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { this.layer.updateAnalogVal(this.cachedAnalog , 0 , progress); // Should compute // these this.layer.reportAnalogChg(this.cachedAnalog); if (this.activity != null) { this.msg = "Seek bar at " + progress + "%"; this.activity.logSlider(msg); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { if (this.activity != null) this.activity.logSlider("Seekbar Pressed"); } @Override public void onStopTrackingTouch(SeekBar seekBar) { if (this.activity != null) { this.activity.logSlider(this.msg + " (released)"); } } }
Liyara/JGL
src/Event.cpp
#include "Event.h" namespace jgl { Event::Event(Type t, double xx, double yy) : type(t), x(xx), y(yy) {} Event::Event(Type t, int c, Action a, int m) : type(t), code(c), action(a), mods(m) {} Event::Event(Type t, int w, int h) : type(t), width(w), height(h) {} Event::Event(Type t) : type(t) {} }
demograph/monotonicmap
crdt/src/main/scala/io/demograph/crdt/delta/dot/DotStore.scala
<filename>crdt/src/main/scala/io/demograph/crdt/delta/dot/DotStore.scala /* * Copyright 2017 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.demograph.crdt.delta.dot /** * The state of a causal CRDT will use some kind of dot store, which acts as a container for data-type specific * information. A dot store can be queried about the set of event identifiers (events) corresponding to the relevant * operations in the container, by function events, which takes a dot store and returns a set of events */ trait DotStore[E] { def dots: Set[E] }
Pokoi/TortillaEngine
Documentation/html/structglm_1_1detail_1_1make__unsigned_3_01long_01_4.js
<reponame>Pokoi/TortillaEngine<gh_stars>1-10 var structglm_1_1detail_1_1make__unsigned_3_01long_01_4 = [ [ "type", "structglm_1_1detail_1_1make__unsigned_3_01long_01_4.html#ade0cc74f63e30969e7d7b42eb6ac8289", null ] ];
shir0tetsuo/avaira
com/observe.js
const settings = require('../settings.json') exports.run = (client, message, params, perms) => { let member = message.mentions.users.first() if (!message.mentions.users.first()) return; try { const tag = client.dbusers.create({ user_id: member.id, permission: 0, level: 1, silver: 10, gold: 0, mrecord: 1, }).catch(e => { //console.log(e) }) } catch (e) { if (e.name === 'SequelizeUniqueConstraintError') { console.log('Tag Exists') } else { //console.log(e) } } finally { const tag = client.dbusers.findOne({ where: { user_id: member.id } }).then(t => { message.author.send(`\`user_id ${t.user_id}\` LEVEL \`${t.level}\` AUTH**\`${t.permission}\`** S\`${t.silver}\` G\`${t.gold}\` MREC\`${t.mrecord}\``) }) } } exports.conf = { enabled: true, guildOnly: true, aliases: ['o'], permLevel: 1 }; exports.help = { name: 'observe', description: 'Display LEVEL, SILVER, GOLD, AUTH, MREC information about a user', usage: 'o [mention]' };
CarGuo/LazyRecyclerAdapter
app/src/main/java/com/shuyu/apprecycler/special/view/CustomRefreshHeader.java
package com.shuyu.apprecycler.special.view; import android.animation.ValueAnimator; import android.content.Context; import android.graphics.drawable.AnimationDrawable; import android.os.Handler; import android.util.AttributeSet; import android.view.Gravity; import android.view.LayoutInflater; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import com.jcodecraeer.xrecyclerview.base.BaseRefreshHeader; import com.shuyu.apprecycler.R; import butterknife.BindView; import butterknife.ButterKnife; /** * Created by guoshuyu on 2017/1/8. */ public class CustomRefreshHeader extends BaseRefreshHeader { @BindView(R.id.custom_refresh_img) ImageView mCustomRefreshImg; @BindView(R.id.custom_refresh_txt) TextView mCustomRefreshTxt; private RelativeLayout mContainer; private AnimationDrawable mAnimationDrawable; private int mState = STATE_NORMAL; public int mMeasuredHeight; public CustomRefreshHeader(Context context) { super(context); initView(); } /** * @param context * @param attrs */ public CustomRefreshHeader(Context context, AttributeSet attrs) { super(context, attrs); initView(); } private void initView() { // 初始情况,设置下拉刷新view高度为0 mContainer = (RelativeLayout) LayoutInflater.from(getContext()).inflate( R.layout.layout_horizontal_custom_refresh_header, null); LayoutParams lp = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); lp.setMargins(0, 0, 0, 0); this.setLayoutParams(lp); this.setPadding(0, 0, 0, 0); addView(mContainer, new LayoutParams(LayoutParams.MATCH_PARENT, 0)); setGravity(Gravity.BOTTOM); ButterKnife.bind(this, mContainer); measure(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); mMeasuredHeight = getMeasuredHeight(); mAnimationDrawable = (AnimationDrawable) mCustomRefreshImg.getDrawable(); } @Override public void setProgressStyle(int style) { } @Override public void setArrowImageView(int resid) { } @Override public void setState(int state) { if (state == mState) return; if (state == STATE_REFRESHING) { // 显示进度 mAnimationDrawable.start(); } else if (state == STATE_DONE) { mAnimationDrawable.stop(); } else { // 显示进度 mAnimationDrawable.stop(); } switch (state) { case STATE_NORMAL: if (mState == STATE_RELEASE_TO_REFRESH) { } if (mState == STATE_REFRESHING) { } mCustomRefreshTxt.setText("看到了我吧!"); break; case STATE_RELEASE_TO_REFRESH: if (mState != STATE_RELEASE_TO_REFRESH) { mCustomRefreshTxt.setText("放开我刷新!"); } break; case STATE_REFRESHING: mCustomRefreshTxt.setText("刷新中!"); break; case STATE_DONE: mCustomRefreshTxt.setText("刷新好了哟!"); break; default: } mState = state; } @Override public int getState() { return mState; } @Override public void refreshComplete() { setState(STATE_DONE); new Handler().postDelayed(new Runnable() { public void run() { reset(); } }, 200); } @Override public void setVisibleHeight(int height) { if (height < 0) height = 0; LayoutParams lp = (LayoutParams) mContainer.getLayoutParams(); lp.height = height; mContainer.setLayoutParams(lp); } @Override public int getVisibleHeight() { LayoutParams lp = (LayoutParams) mContainer.getLayoutParams(); return lp.height; } @Override public void onMove(float delta) { if (getVisibleHeight() > 0 || delta > 0) { setVisibleHeight((int) delta + getVisibleHeight()); if (mState <= STATE_RELEASE_TO_REFRESH) { // 未处于刷新状态,更新箭头 if (getVisibleHeight() > mMeasuredHeight) { setState(STATE_RELEASE_TO_REFRESH); } else { setState(STATE_NORMAL); } } } } @Override public boolean releaseAction() { boolean isOnRefresh = false; int height = getVisibleHeight(); if (height == 0) // not visible. isOnRefresh = false; if (getVisibleHeight() > mMeasuredHeight && mState < STATE_REFRESHING) { setState(STATE_REFRESHING); isOnRefresh = true; } // refreshing and header isn't shown fully. do nothing. if (mState == STATE_REFRESHING && height <= mMeasuredHeight) { //return; } int destHeight = 0; // default: scroll back to dismiss header. // is refreshing, just scroll back to show all the header. if (mState == STATE_REFRESHING) { destHeight = mMeasuredHeight; } smoothScrollTo(destHeight); return isOnRefresh; } @Override public void reset() { smoothScrollTo(0); new Handler().postDelayed(new Runnable() { public void run() { setState(STATE_NORMAL); } }, 500); } private void smoothScrollTo(int destHeight) { ValueAnimator animator = ValueAnimator.ofInt(getVisibleHeight(), destHeight); animator.setDuration(300).start(); animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { setVisibleHeight((int) animation.getAnimatedValue()); } }); animator.start(); } }
MariaKhantech/Envira
client/src/components/ProfileImage/index.js
import React, { Component } from 'react' import { Auth } from 'aws-amplify'; import { Storage } from "aws-amplify"; import Axios from 'axios'; import "./profileImage.css" export default class index extends Component { state = { profile: [], selectedFile: '', imagePreviewUrl: '', selectedFileName: 'Choose file', imageName: [], successMessage: "" } // this function is to display the success message setSuccessMessage(message) { this.setState({ successMessage: message }); setTimeout(() => { this.setState({ successMessage: '' }); }, 3000) } // When the page loads for the first time get the logged in user info async componentDidMount() { try { // get the current logged in user details const user = await Auth.currentAuthenticatedUser(); // get username from user object const userDetail = user.username; // get the user details for logged in user from the User table Axios.get(`/api/auth/user/${userDetail}`) .then( (response) => { console.log(response) this.setState({ profile: response.data, }); // call this function to get the logged in user's existing image this.getImage() }) .catch(err => console.log(err)) } catch (error) { if (error !== "No current user") { console.log(error); } } } // get the user image image form database getImage = () => { const UserId = this.state.profile.id Axios.get(`/api/auth/image/${UserId}`) .then( (response) => { this.setState({ imageName: response.data }); // call this function to get the image from S3 this.getImageFromS3() }) .catch(err => console.log(err)) } // get the image from S3 getImageFromS3 = () => { let fileName = this.state.imageName.image_name Storage.get(fileName) .then( (data) => { this.setState({ imagePreviewUrl: data }); }) .catch(err => console.log(err)) } // upload image into S3 also save image name in image model handleImageUpload = async (event) => { event.preventDefault(); // save image in S3 await Storage.put(this.state.selectedFileName, this.state.selectedFile); const { selectedFileName } = this.state const UserId = this.state.profile.id; // post image name in image model Axios.post("/api/auth/image", { selectedFileName, UserId }).then(() => { this.setSuccessMessage('Image uploaded successfully'); }).catch(err => console.log(err.message)) } // upload image into S3 also update image name in image model handleImageUpdate = async (event) => { event.preventDefault(); // save image in S3 await Storage.put(this.state.selectedFileName, this.state.selectedFile); const { selectedFileName } = this.state const UserId = this.state.profile.id; // post image name in image model Axios.put(`/api/auth/image/${UserId}`, { selectedFileName, }).then(() => { this.setSuccessMessage('Image uploaded successfully'); }).catch(err => console.log(err.message)) } // call this function on the input change and update the state handleImageChange = (event) => { event.preventDefault(); let reader = new FileReader(); let file = event.target.files[0]; reader.onloadend = () => { this.setState({ selectedFile: file, selectedFileName: file.name, imagePreviewUrl: reader.result, displayUploadButton: true }); } reader.readAsDataURL(file) } render() { let { imagePreviewUrl } = this.state; let $imagePreview = null; if (imagePreviewUrl) { $imagePreview = (<img className="profile-img-update" src={imagePreviewUrl} />); } else { $imagePreview = (<div className="previewText">Please select an Image for Preview</div>); } return ( <> <div className="text-success">{this.state.successMessage}</div> <div className="previewComponent "> <div className="imgPreview"> {$imagePreview} </div> <div className="row justify-content-center mt-5"> <div className="input-group mb-3 px-2 py-2 rounded-pill bg-white shadow-sm"> <input id="upload" type="file" onChange={this.handleImageChange} className="form-control border-0" /> <label id="upload-label" htmlFor="upload" className="font-weight-light text-muted"> {this.state.selectedFileName} </label> <div className="input-group-append"> <label htmlFor="upload" className="btn btn-secondary m-0 rounded-pill px-2"> {' '} <i className="fa fa-cloud-upload mr-1 text-white" /> <small className="text-uppercase font-weight-bold text-white"> Choose file </small> </label> </div> </div> </div> {!this.state.imageName && ( <div className="row justify-content-center mt-2 "> <button onClick={this.handleImageUpload} className="btn btn-primary">Upload</button> </div> )} {this.state.imageName && ( <div className="row justify-content-center mt-2 "> <button onClick={this.handleImageUpdate} className="btn btn-primary">Update</button> </div> )} </div> </> ) } }
NEPOLIX/Misha-Android
MishaLDB/src/main/java/com/nepolix/misha/db/json/JSONHelper.java
<filename>MishaLDB/src/main/java/com/nepolix/misha/db/json/JSONHelper.java /****************************************************************************** * Copyright © 2015-7532 NOX, Inc. [NEPOLIX]-(<NAME>) * All rights * reserved. * * The source * code, other & all material, and documentation * contained herein are, and * remains the property of HEX * Inc. * and its suppliers, if any. The intellectual and technical * concepts * contained herein are * proprietary to NOX Inc. and its * suppliers and may be covered by U.S. and Foreign * Patents, patents * * in process, and are protected by trade secret or copyright law. * Dissemination of the * foregoing material or * reproduction of this * material is strictly forbidden forever. * ******************************************************************************/ package com.nepolix.misha.db.json; import com.nepolix.misha.android.sdk.json.JSONArray; import com.nepolix.misha.android.sdk.json.JSONException; import com.nepolix.misha.android.sdk.json.JSONObject; import com.nepolix.misha.db.common.DBCommons; import com.nepolix.misha.db.exception.DBException; import java.util.HashMap; import java.util.List; import static com.nepolix.misha.db.common.DBCommons.*; /** * @author <NAME> * @since 1/5/17 */ public class JSONHelper { public static FlattenJSONObject flattenObjectFields ( JSONObject object ) { FlattenJSONObject flattenJSONObject = new FlattenJSONObject ( ); try { FlattenJSONObject.flattenFields ( flattenJSONObject, object, "" ); } catch ( JSONException e ) { e.printStackTrace ( ); } return flattenJSONObject; } /** * keeps the original json untouched * * @param jsonObject * @param previousFieldNChainName * * @return */ public static JSONObject deleteFieldChain ( JSONObject jsonObject, String previousFieldNChainName ) { String fields[] = previousFieldNChainName.split ( "\\." ); JSONObject object = jsonObject.clone ( ); deleteOneFieldInChain$JSONObject ( object, fields, 0 ); return object; } private static void deleteOneFieldInChain$JSONObject ( JSONObject jsonObject, String fieldChainArray[], int idxField ) { List< String > keyFields = jsonObject.keys ( ); for ( String field : keyFields ) { if ( field.equals ( fieldChainArray[ idxField ] ) ) { Object object = jsonObject.opt ( field ); if ( object != null ) { if ( object.getClass ( ) .equals ( JSONObject.class ) ) { deleteOneFieldInChain$JSONObject ( ( JSONObject ) object, fieldChainArray, idxField + 1 ); } else { if ( object.getClass ( ) .equals ( JSONArray.class ) ) { deleteOneFieldInChain$JSONArray ( ( JSONArray ) object, fieldChainArray, idxField ); } else { jsonObject.remove ( field ); } } } } } } private static void deleteOneFieldInChain$JSONArray ( JSONArray jsonArray, String fieldChainArray[], int idxField ) { for ( int i = 0 ; i < jsonArray.length ( ) ; ++i ) { Object o = jsonArray.opt ( i ); if ( o != null ) { if ( o.getClass ( ) .equals ( JSONObject.class ) ) { deleteOneFieldInChain$JSONObject ( ( JSONObject ) o, fieldChainArray, idxField + 1 ); } else { if ( o.getClass ( ) .equals ( JSONArray.class ) ) { //WE DO NOT SUPPORT } else { jsonArray.remove ( i ); --i; } } } } } public static JSONObject updateFieldChainValue ( JSONObject jsonObject, String previousFieldNChainName, String newFieldType, HashMap< String, String > mapValue ) throws DBException { String fields[] = previousFieldNChainName.split ( "\\." ); updateFieldValues ( jsonObject, fields, 0, newFieldType, mapValue ); return jsonObject; } public static JSONObject updateFieldChainName ( JSONObject jsonObject, String previousFieldNChainName, String newFieldChain ) throws DBException { if ( newFieldChain == null || newFieldChain.isEmpty ( ) ) return deleteFieldChain ( jsonObject, previousFieldNChainName ); String fields[] = previousFieldNChainName.split ( "\\." ); String newFields[] = newFieldChain.split ( "\\." ); if ( newFields.length != fields.length ) { throw new DBException ( "new field chain does not have " + "the same elements as old " + "field chain to rename them" ); } updateFieldChainName ( jsonObject, fields, newFields, 0 ); return jsonObject; } private static void updateFieldChainName ( JSONObject object, String[] fieldChainArray, String[] newFieldChainArray, int idxField ) { String fieldName = fieldChainArray[ idxField ]; String newFieldName = newFieldChainArray[ idxField ]; Object o = object.opt ( fieldName ); if ( object.has ( fieldName ) && o != null ) { object.putOpt ( newFieldName, o ); if ( o.getClass ( ) .equals ( JSONObject.class ) ) { updateFieldChainName ( ( JSONObject ) o, fieldChainArray, newFieldChainArray, idxField + 1 ); } else { if ( o.getClass ( ) .equals ( JSONArray.class ) ) { JSONArray array = ( JSONArray ) o; for ( int i = 0 ; i < array.length ( ) ; ++i ) { Object o1 = array.opt ( i ); if ( o1 != null && o1.getClass ( ) .equals ( JSONObject.class ) ) { updateFieldChainName ( ( JSONObject ) o1, fieldChainArray, newFieldChainArray, idxField + 1 ); } } } } } } private static void updateFieldValues ( JSONObject object, String fieldChainArray[], int idxField, String newFieldType, HashMap< String, String > mapValue ) throws DBException { String fieldName = fieldChainArray[ idxField ]; if ( idxField == fieldChainArray.length - 1 ) { Object dbRawValue = object.opt ( fieldName ); if ( dbRawValue.getClass ( ) .equals ( JSONObject.class ) ) { //WE DO NOT SUPPORT System.err.println ( "we do not support update of field chain for json field, you " + "need to update the inner fields" ); throw new DBException ( "we do not support update of field chain for json field, you " + "need to update the inner fields" ); } else { if ( dbRawValue.getClass ( ) .equals ( JSONArray.class ) ) { JSONArray array = ( JSONArray ) dbRawValue; for ( int i = 0 ; i < array.length ( ) ; ++i ) { Object o = array.opt ( i ); if ( o == null ) { array.remove ( i ); --i; } else { if ( o.getClass ( ) .equals ( JSONObject.class ) ) { throw new DBException ( "we do not support update of field chain of " + "json inside array" ); } else { if ( o.getClass ( ) .equals ( JSONArray.class ) ) { throw new DBException ( "we do not support update of field chain of " + "array inside array" ); } else { array.putOpt ( i, updateObject ( o, mapValue, newFieldType ) ); } } } } } else { object.putOpt ( fieldName, updateObject ( dbRawValue, mapValue, newFieldType ) ); } } } else { Object o = object.opt ( fieldName ); if ( o != null ) { if ( o.getClass ( ) .equals ( JSONObject.class ) ) { updateFieldValues ( ( JSONObject ) o, fieldChainArray, idxField + 1, newFieldType, mapValue ); } else { if ( o.getClass ( ) .equals ( JSONArray.class ) ) { JSONArray array = ( JSONArray ) o; for ( int i = 0 ; i < array.length ( ) ; ++i ) { Object o1 = array.opt ( i ); if ( o1 != null ) { if ( o1.getClass ( ) .equals ( JSONObject.class ) ) { updateFieldValues ( ( JSONObject ) o1, fieldChainArray, idxField + 1, newFieldType, mapValue ); } else { throw new DBException ( "we do not support array inside array" ); } } } } else { //DO NOTHING GO UP... } } } } } private static Object updateObject ( Object data, HashMap< String, String > mapValue, String newFieldType ) { String dbValue = convertObjectValueToString ( data ); Object result = null; String newValue = mapValue.get ( dbValue ); if ( newValue == null ) newValue = dbValue; if ( newValue.equals ( MIGRATION_DELETION_VALUE ) || dbValue == null ) newValue = null; if ( newFieldType.equals ( FIELD_TYPE_INTEGER ) ) { if ( newValue == null ) newValue = "0"; result = Integer.parseInt ( newValue ); } if ( newFieldType.equals ( FIELD_TYPE_LONG ) ) { if ( newValue == null ) newValue = "0"; result = Long.parseLong ( newValue ); } if ( newFieldType.equals ( FIELD_TYPE_DOUBLE ) ) { if ( newValue == null ) newValue = "0.0"; result = Double.parseDouble ( newValue ); } if ( newFieldType.equals ( FIELD_TYPE_STRING ) ) { if ( newValue == null ) newValue = ""; result = newValue; } if ( newFieldType.equals ( FIELD_TYPE_BOOLEAN ) ) { if ( newValue == null ) newValue = "false"; result = Boolean.parseBoolean ( newValue ); } return result; } public static JSONObject updateFieldChainValueName ( JSONObject jsonObject, String previousFieldNChainName, String newFieldChain, String newFieldType, HashMap< String, String > mapValue ) throws DBException { JSONObject object = jsonObject.clone ( ); object = updateFieldChainValue ( object, previousFieldNChainName, newFieldType, mapValue ); object = updateFieldChainName ( object, previousFieldNChainName, newFieldChain ); return object; } private static String convertObjectValueToString ( Object value ) { String valueString = null; if ( value != null ) { try { String valueType = DBCommons.getDBFieldType ( value ); if ( valueType.equals ( FIELD_TYPE_LONG ) || valueType.equals ( FIELD_TYPE_INTEGER ) ) { Long aLong = ( Long ) value; valueString = aLong.toString ( ); } if ( valueType.equals ( FIELD_TYPE_DOUBLE ) ) { Double aDouble = ( Double ) value; valueString = aDouble.toString ( ); } if ( valueType.equals ( FIELD_TYPE_BOOLEAN ) ) { Boolean aBoolean = ( Boolean ) value; valueString = aBoolean.toString ( ); } if ( valueType.equals ( FIELD_TYPE_STRING ) ) { valueString = value.toString ( ); } } catch ( Exception ignored ) { } } return valueString; } }
agustinhenze/mibs.snmplabs.com
pysnmp/JUNIPER-WX-GLOBAL-REG.py
# # PySNMP MIB module JUNIPER-WX-GLOBAL-REG (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/JUNIPER-WX-GLOBAL-REG # Produced by pysmi-0.3.4 at Mon Apr 29 19:50:41 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint") ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup") MibScalar, MibTable, MibTableRow, MibTableColumn, iso, ModuleIdentity, TimeTicks, MibIdentifier, ObjectIdentity, Gauge32, IpAddress, Counter32, Bits, Integer32, NotificationType, Counter64, enterprises, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "ModuleIdentity", "TimeTicks", "MibIdentifier", "ObjectIdentity", "Gauge32", "IpAddress", "Counter32", "Bits", "Integer32", "NotificationType", "Counter64", "enterprises", "Unsigned32") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") jnxWxGlobalRegModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 1, 1)) jnxWxGlobalRegModule.setRevisions(('2007-11-17 10:00', '2007-11-17 10:00', '2007-11-14 01:30', '2006-06-08 18:00', '2005-05-09 10:12', '2004-03-15 14:00', '2003-06-26 20:00', '2001-07-29 22:00',)) if mibBuilder.loadTexts: jnxWxGlobalRegModule.setLastUpdated('200107292200Z') if mibBuilder.loadTexts: jnxWxGlobalRegModule.setOrganization('Juniper Networks, Inc') juniperWxRoot = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239)) if mibBuilder.loadTexts: juniperWxRoot.setStatus('current') jnxWxReg = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1)) if mibBuilder.loadTexts: jnxWxReg.setStatus('current') jnxWxModules = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 1)) if mibBuilder.loadTexts: jnxWxModules.setStatus('current') jnxWxMibs = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 2)) if mibBuilder.loadTexts: jnxWxMibs.setStatus('current') jnxWxCaps = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 3)) if mibBuilder.loadTexts: jnxWxCaps.setStatus('current') jnxWxReqs = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 4)) if mibBuilder.loadTexts: jnxWxReqs.setStatus('current') jnxWxExpr = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 5)) if mibBuilder.loadTexts: jnxWxExpr.setStatus('current') jnxWxCommonMib = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 2, 1)) if mibBuilder.loadTexts: jnxWxCommonMib.setStatus('current') jnxWxSpecificMib = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 2, 2)) if mibBuilder.loadTexts: jnxWxSpecificMib.setStatus('current') jnxWxProduct = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2)) if mibBuilder.loadTexts: jnxWxProduct.setStatus('current') jnxWxProductWx50 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 1)) if mibBuilder.loadTexts: jnxWxProductWx50.setStatus('current') jnxWxProductWx55 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 2)) if mibBuilder.loadTexts: jnxWxProductWx55.setStatus('current') jnxWxProductWx20 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 3)) if mibBuilder.loadTexts: jnxWxProductWx20.setStatus('current') jnxWxProductWx80 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 4)) if mibBuilder.loadTexts: jnxWxProductWx80.setStatus('current') jnxWxProductWx100 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 5)) if mibBuilder.loadTexts: jnxWxProductWx100.setStatus('current') jnxWxProductWxc500 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 6)) if mibBuilder.loadTexts: jnxWxProductWxc500.setStatus('current') jnxWxProductWx15 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 7)) if mibBuilder.loadTexts: jnxWxProductWx15.setStatus('current') jnxWxProductWxc250 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 8)) if mibBuilder.loadTexts: jnxWxProductWxc250.setStatus('current') jnxWxProductWx60 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 9)) if mibBuilder.loadTexts: jnxWxProductWx60.setStatus('current') jnxWxProductWxc590 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 10)) if mibBuilder.loadTexts: jnxWxProductWxc590.setStatus('current') jnxWxProductIsm200Wxc = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 11)) if mibBuilder.loadTexts: jnxWxProductIsm200Wxc.setStatus('current') jnxWxProductWxc1800 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 12)) if mibBuilder.loadTexts: jnxWxProductWxc1800.setStatus('current') jnxWxProductWxc2600 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 13)) if mibBuilder.loadTexts: jnxWxProductWxc2600.setStatus('current') jnxWxProductWxc3400 = ObjectIdentity((1, 3, 6, 1, 4, 1, 8239, 1, 2, 14)) if mibBuilder.loadTexts: jnxWxProductWxc3400.setStatus('current') mibBuilder.exportSymbols("JUNIPER-WX-GLOBAL-REG", jnxWxMibs=jnxWxMibs, jnxWxProductWxc250=jnxWxProductWxc250, jnxWxProductWxc590=jnxWxProductWxc590, jnxWxProductWx80=jnxWxProductWx80, jnxWxReqs=jnxWxReqs, jnxWxModules=jnxWxModules, jnxWxProductWxc3400=jnxWxProductWxc3400, juniperWxRoot=juniperWxRoot, jnxWxProductIsm200Wxc=jnxWxProductIsm200Wxc, jnxWxExpr=jnxWxExpr, jnxWxProductWx50=jnxWxProductWx50, jnxWxProductWx100=jnxWxProductWx100, jnxWxSpecificMib=jnxWxSpecificMib, jnxWxReg=jnxWxReg, jnxWxProductWxc2600=jnxWxProductWxc2600, jnxWxGlobalRegModule=jnxWxGlobalRegModule, jnxWxProductWx15=jnxWxProductWx15, jnxWxCaps=jnxWxCaps, PYSNMP_MODULE_ID=jnxWxGlobalRegModule, jnxWxProductWxc500=jnxWxProductWxc500, jnxWxProductWx60=jnxWxProductWx60, jnxWxCommonMib=jnxWxCommonMib, jnxWxProductWxc1800=jnxWxProductWxc1800, jnxWxProduct=jnxWxProduct, jnxWxProductWx20=jnxWxProductWx20, jnxWxProductWx55=jnxWxProductWx55)
alirezaghey/leetcode-solutions
go/minimum_number_of_arrows_to_burst_balloons.go
<reponame>alirezaghey/leetcode-solutions<filename>go/minimum_number_of_arrows_to_burst_balloons.go<gh_stars>1-10 package leetcode import ( "sort" ) // Time complexity: O(n * log n) // Space complexity: O(1) or whatever the space complexity of the sorting algorithm is func findMinArrowShots(points [][]int) int { sort.Slice(points, func(i, j int) bool { return points[i][0] < points[j][0] }) var curr []int = nil res := 0 for _, point := range points { if curr == nil { curr = point } else if point[0] <= curr[1] { curr = []int{max(point[0], curr[0]), min(point[1], curr[1])} } else { res += 1 curr = point } } return res + 1 } func min(x, y int) int { if x < y { return x } else { return y } } func max(x, y int) int { if x > y { return x } else { return y } }
MCMattCaldwell/FinancialTracker
ExpenseTracker/app/src/main/java/com/pedrocarrillo/expensetracker/adapters/CategoriesAdapter.java
package com.pedrocarrillo.expensetracker.adapters; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.TextView; import com.pedrocarrillo.expensetracker.ExpenseTrackerApp; import com.pedrocarrillo.expensetracker.R; import com.pedrocarrillo.expensetracker.custom.BaseViewHolder; import com.pedrocarrillo.expensetracker.entities.Category; import java.util.List; /** * Created by pcarrillo on 17/09/2015. */ public class CategoriesAdapter extends BaseRecyclerViewAdapter<CategoriesAdapter.CategoryViewHolder> { private List<Category> mCategoryList; private int lastPosition = -1; private BaseViewHolder.RecyclerClickListener onRecyclerClickListener; public static class CategoryViewHolder extends BaseViewHolder { public TextView tvTitle; public CategoryViewHolder(View v, RecyclerClickListener onRecyclerClickListener) { super(v, onRecyclerClickListener); tvTitle = (TextView)v.findViewById(R.id.tv_title); } } public CategoriesAdapter(List<Category> categoryList, BaseViewHolder.RecyclerClickListener recyclerClickListener) { this.mCategoryList = categoryList; this.onRecyclerClickListener = recyclerClickListener; } @Override public CategoryViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View v = LayoutInflater.from(parent.getContext()) .inflate(R.layout.layout_category_item, parent, false); return new CategoryViewHolder(v, onRecyclerClickListener); } @Override public void onBindViewHolder(CategoriesAdapter.CategoryViewHolder holder, int position) { final Category category = mCategoryList.get(position); holder.tvTitle.setText(category.getName()); holder.itemView.setTag(category); holder.itemView.setSelected(isSelected(position)); setAnimation(holder, position); } @Override public int getItemCount() { return mCategoryList.size(); } public void updateCategories(List<Category> categoryList) { this.mCategoryList = categoryList; notifyDataSetChanged(); } private void setAnimation(CategoryViewHolder holder, int position) { if (position > lastPosition) { Animation animation = AnimationUtils.loadAnimation(ExpenseTrackerApp.getContext(), R.anim.push_left_in); holder.itemView.startAnimation(animation); lastPosition = position; } } }
google-ar/chromium
ash/common/wm/wm_window_animations.cc
<reponame>google-ar/chromium // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/common/wm/wm_window_animations.h" #include "ui/compositor/layer.h" #include "ui/gfx/transform.h" namespace ash { void SetTransformForScaleAnimation(ui::Layer* layer, LayerScaleAnimationDirection type) { // Scales for windows above and below the current workspace. const float kLayerScaleAboveSize = 1.1f; const float kLayerScaleBelowSize = .9f; const float scale = type == LAYER_SCALE_ANIMATION_ABOVE ? kLayerScaleAboveSize : kLayerScaleBelowSize; gfx::Transform transform; transform.Translate(-layer->bounds().width() * (scale - 1.0f) / 2, -layer->bounds().height() * (scale - 1.0f) / 2); transform.Scale(scale, scale); layer->SetTransform(transform); } } // namespace ash
OkMarvin/okmarvin
packages/okmarvin/lib/read/readFiles/promiseFilesPath.test.js
const path = require('path') const promiseFilesPath = require('./promiseFilesPath') const root = path.join(__dirname, '..', 'fixtures') test('returns all markdown files', () => { return promiseFilesPath(root, '{_posts,_pages}/**/*.{md,markdown}').then( data => { expect(data).toEqual([ '_pages/about-okmarvin.md', '_posts/hello-okmarvin.md', '_posts/why-okmarvin/index.md' ]) } ) }) test('returns all files', () => { return promiseFilesPath(root, '{_posts,_pages}/**/*').then( data => { expect(data).toEqual([ '_pages/about-okmarvin.md', '_posts/hello-okmarvin.md', '_posts/why-okmarvin/a.txt', '_posts/why-okmarvin/dir/b.txt', '_posts/why-okmarvin/dir/c/e.txt', '_posts/why-okmarvin/index.md' ]) } ) })
roomanidzee/monix-connect
gcs/src/it/scala/monix/connect/gcp/storage/GcsBlobSuite.scala
<gh_stars>1-10 package monix.connect.gcp.storage import java.io.File import java.nio.file.{Files, Path} import com.google.cloud.storage.{Blob, BlobId, BlobInfo, Storage, Option => _} import monix.execution.Scheduler.Implicits.global import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper import monix.reactive.Observable import monix.eval.Task import org.scalacheck.Gen import org.scalatest.BeforeAndAfterAll import org.apache.commons.io.FileUtils class GcsBlobSuite extends AnyWordSpecLike with IdiomaticMockito with Matchers with ArgumentMatchersSugar with BeforeAndAfterAll { val storage: Storage = LocalStorageHelper.getOptions.getService val dir = new File("gcs/tmp").toPath val genLocalPath = Gen.identifier.map(s => dir.toAbsolutePath.toString + "/" + s) val testBucketName = Gen.identifier.sample.get override def beforeAll(): Unit = { FileUtils.deleteDirectory(dir.toFile) Files.createDirectory(dir) super.beforeAll() } override def afterAll(): Unit = { super.beforeAll() } s"${GcsBlob}" should { "return true if exists" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val content: Array[Byte] = Gen.identifier.sample.get.getBytes() val blob: Blob = storage.create(blobInfo, content) val gcsBlob = new GcsBlob(blob) //when val t = gcsBlob.exists() //then t.runSyncUnsafe() shouldBe true } "return delete if exists" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val content: Array[Byte] = Gen.identifier.sample.get.getBytes() val blob: Blob = storage.create(blobInfo, content) val gcsBlob = new GcsBlob(blob) val existedBefore = gcsBlob.exists().runSyncUnsafe() //when val t = gcsBlob.delete() //then val deleted = t.runSyncUnsafe() val existsAfterDeletion = gcsBlob.exists().runSyncUnsafe() existedBefore shouldBe true deleted shouldBe true existsAfterDeletion shouldBe false } "download a small blob in form of observable" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val content: Array[Byte] = Gen.identifier.sample.get.getBytes() val blob: Blob = storage.create(blobInfo, content) val gcsBlob = new GcsBlob(blob) //when val ob: Observable[Array[Byte]] = gcsBlob.download() val r: Array[Byte] = ob.headL.runSyncUnsafe() //then val exists = gcsBlob.exists().runSyncUnsafe() exists shouldBe true r shouldBe content } "download blob from a GcsBlob that resides within a task" in { //given val content: Array[Byte] = Gen.identifier.sample.get.getBytes() val gcsStorage = GcsStorage(storage) val blob: Task[GcsBlob] = gcsStorage.createBlob("myBucket", "myBlob").memoize blob.flatMap(b => Observable.now(content).consumeWith(b.upload())).runSyncUnsafe() val ob: Observable[Array[Byte]] = Observable.fromTask(blob) .flatMap(_.download()) val r = ob.headL.runSyncUnsafe() //then r shouldBe content } "download to file" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val filePath: Path = new File(genLocalPath.sample.get).toPath val content: Array[Byte] = Gen.identifier.sample.get.getBytes() val blob: Blob = storage.create(blobInfo, content) val gcsBlob = new GcsBlob(blob) //when val t: Task[Unit] = gcsBlob.downloadToFile(filePath) t.runSyncUnsafe() //then val exists = gcsBlob.exists().runSyncUnsafe() val r = Files.readAllBytes(filePath) exists shouldBe true r shouldBe content } "upload to the blob" when { "it is empty" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val blob: Blob = storage.create(blobInfo) val gcsBlob = new GcsBlob(blob) val content: Array[Byte] = Gen.identifier.sample.get.getBytes() //when val downloader: Observable[Array[Byte]] = gcsBlob.download() val contentBefore: Option[Array[Byte]] = downloader.headOptionL.runSyncUnsafe() Observable.pure(content).consumeWith(gcsBlob.upload()).runSyncUnsafe() //then val exists = gcsBlob.exists().runSyncUnsafe() val r: Array[Byte] = downloader.headL.runSyncUnsafe() exists shouldBe true contentBefore.isEmpty shouldBe true r shouldBe content } "it is not empty" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val oldContent: Array[Byte] = Gen.identifier.sample.get.getBytes() val newContent: Array[Byte] = Gen.identifier.sample.get.getBytes() val blob: Blob = storage.create(blobInfo, oldContent) val gcsBlob = new GcsBlob(blob) //when val downloader: Observable[Array[Byte]] = gcsBlob.download() val contentBefore: Option[Array[Byte]] = downloader.headOptionL.runSyncUnsafe() Observable.now(newContent).consumeWith(gcsBlob.upload()).runSyncUnsafe() //then val exists = gcsBlob.exists().runSyncUnsafe() val r: Array[Byte] = downloader.headL.runSyncUnsafe() exists shouldBe true contentBefore.isEmpty shouldBe false r shouldBe newContent } "the consumed observable is empty" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val blob: Blob = storage.create(blobInfo) val gcsBlob = new GcsBlob(blob) //when val downloader: Observable[Array[Byte]] = gcsBlob.download() val contentBefore: Option[Array[Byte]] = downloader.headOptionL.runSyncUnsafe() Observable.pure(Array.emptyByteArray).consumeWith(gcsBlob.upload()).runSyncUnsafe() //then val r: Option[Array[Byte]] = downloader.headOptionL.runSyncUnsafe() contentBefore.isEmpty shouldBe true r.isEmpty shouldBe true } } "uploads to the blob from a file" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val blob: Blob = storage.create(blobInfo) val gcsBlob = new GcsBlob(blob) val sourcePath = new File(genLocalPath.sample.get).toPath val targetPath = new File(genLocalPath.sample.get).toPath val content: Array[Byte] = Gen.identifier.sample.get.getBytes() Files.write(sourcePath, content) //when val downloader: Observable[Array[Byte]] = gcsBlob.download() val contentBefore: Option[Array[Byte]] = downloader.headOptionL.runSyncUnsafe() val t = gcsBlob.uploadFromFile(sourcePath) //then t.runSyncUnsafe() shouldBe a[Unit] val exists = gcsBlob.exists().runSyncUnsafe() exists shouldBe true contentBefore.isDefined shouldBe false gcsBlob.underlying.downloadTo(targetPath) val r = Files.readAllBytes(targetPath) r shouldBe content } "return a failed task when uploading from a non existent file" in { //given val blobPath = Gen.identifier.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val blob: Blob = storage.create(blobInfo) val gcsBlob = new GcsBlob(blob) val sourcePath = new File(genLocalPath.sample.get).toPath //when val dowloader: Observable[Array[Byte]] = gcsBlob.download() val contentBefore: Option[Array[Byte]] = dowloader.headOptionL.runSyncUnsafe() val f = gcsBlob.uploadFromFile(sourcePath).runToFuture(global) //then f.value.get.isFailure shouldBe true val r = gcsBlob.download().headOptionL.runSyncUnsafe() contentBefore.isDefined shouldBe false r.isDefined shouldBe false } /** not supported by the [[LocalStorageHelper]] "create and lists acls" in { //given val blobPath = nonEmptyString.sample.get val blobInfo: BlobInfo = BlobInfo.newBuilder(BlobId.of(testBucketName, blobPath)).build val blob: Blob = storage.create(blobInfo) val gcsBlob = new GcsBlob(blob) val userAcl = Acl.of(new User("<EMAIL>"), Role.OWNER) val groupAcl = Acl.of(new Group("<EMAIL>"), Role.READER) //when val r1: Acl = gcsBlob.createAcl(userAcl).runSyncUnsafe() val r2: Acl = gcsBlob.createAcl(groupAcl).runSyncUnsafe() val l: List[Acl] = gcsBlob.listAcls().toListL.runSyncUnsafe() //then r1 shouldBe userAcl r2 shouldBe groupAcl l should contain theSameElementsAs List(userAcl, groupAcl) }*/ } }
npocmaka/Windows-Server-2003
drivers/wdm/input/hidparse/query.c
/*++ Copyright (c) 1996 Microsoft Corporation Module Name: query.c Abstract: This module contains the code for querying HID report packets. Environment: Kernel & user mode Revision History: Aug-96 : created by <NAME> --*/ #define NT_SUCCESS(Status) ((NTSTATUS)(Status) >= 0) #include <wtypes.h> #include "hidsdi.h" #include "hidparse.h" #ifdef ALLOC_PRAGMA #pragma alloc_text(PAGE, HidP_GetCaps) #pragma alloc_text(PAGE, HidP_GetLinkCollectionNodes) #pragma alloc_text(PAGE, HidP_GetButtonCaps) #pragma alloc_text(PAGE, HidP_GetSpecificButtonCaps) #pragma alloc_text(PAGE, HidP_GetValueCaps) #pragma alloc_text(PAGE, HidP_GetSpecificValueCaps) #pragma alloc_text(PAGE, HidP_MaxUsageListLength) #pragma alloc_text(PAGE, HidP_InitializeReportForID) #pragma alloc_text(PAGE, HidP_GetExtendedAttributes) #endif #define PAGED_CODE() #ifndef HIDPARSE_USERMODE #if DBG typedef UCHAR KIRQL; KIRQL KeGetCurrentIrql(); #define APC_LEVEL 0x1 ULONG _cdecl DbgPrint (PCH Format, ...); NTSYSAPI VOID NTAPI RtlAssert(PVOID, PVOID, ULONG, PCHAR); #define ASSERT( exp ) \ if (!(exp)) RtlAssert( #exp, __FILE__, __LINE__, NULL ) #undef PAGED_CODE #define PAGED_CODE() \ if (KeGetCurrentIrql() > APC_LEVEL) { \ HidP_KdPrint(2, ( "EX: Pageable code called at IRQL %d\n", KeGetCurrentIrql() )); \ ASSERT(FALSE); \ } #else // DBG #define ASSERT(x) #endif // DBG #else // HIDPARSE_USERMODE #define ASSERT(x) #endif // HIDPARSE_USERMODE #define CHECK_PPD(_x_) \ if ((HIDP_PREPARSED_DATA_SIGNATURE1 != (_x_)->Signature1) ||\ (HIDP_PREPARSED_DATA_SIGNATURE2 != (_x_)->Signature2)) \ { return HIDP_STATUS_INVALID_PREPARSED_DATA; } ULONG HidP_ExtractData ( IN USHORT ByteOffset, IN USHORT BitOffset, IN USHORT BitLength, IN PUCHAR Report ) /*++ Routine Description: Given a HID report a byte offset, bit offset and bitlength extract the bits from the report in little endian BIT order. --*/ { ULONG inspect = 0; USHORT tmpByte = 0; USHORT tmpBit = 0; // Start with the high bits and work our way down. // // Little endian (by bit) // Byte 2 |Byte 1 |Byte 0 // 765432107654321076543210 (bits) // // Get low byte first. (need the higher bits) // Offset is from bit zero. // tmpByte = (ByteOffset << 3) + BitOffset + BitLength; tmpBit = tmpByte & 7; tmpByte >>= 3; if (BitLength < tmpBit) { inspect = (UCHAR) Report [tmpByte] & ((1 << tmpBit) - 1); inspect >>= BitOffset; return inspect; } if (tmpBit) { // Not Byte alligned! inspect = (UCHAR) Report [tmpByte] & ((1 << tmpBit) - 1); BitLength -= tmpBit; } tmpByte--; while (BitLength >= 8) { inspect <<= 8; inspect |= (UCHAR) Report[tmpByte]; BitLength -= 8; tmpByte--; } if (BitLength) { inspect <<= BitLength; inspect |= (UCHAR) ( (Report [tmpByte] >> (8 - BitLength)) & ((1 << BitLength) - 1)); } return inspect; } void HidP_InsertData ( IN USHORT ByteOffset, IN USHORT BitOffset, IN USHORT BitLength, // Length of the value set in bits. IN OUT PUCHAR Report, IN ULONG Value ) /*++ Routine Description: Given a HID report a byte offset, bit offset and bitlength set those bits in little endian BIT order to the value provided. --*/ { ULONG mask; ULONG tmpBit; // // Little endian (by bit) // Byte 2 |Byte 1 |Byte 0 // 765432107654321076543210 (bits) // // Get low byte first. (need the higher bits) // Offset is from bit zero. // tmpBit = BitLength + BitOffset; if (tmpBit < 8) { mask = (1 << tmpBit) - (1 << BitOffset); Report [ByteOffset] &= ~mask; Report [ByteOffset] |= (UCHAR) ((Value << BitOffset) & mask); return; } if (BitOffset) { // Not byte aligned, deal with the last partial byte. Report [ByteOffset] &= ((1 << BitOffset) - 1); // Zap upper bits Report [ByteOffset] |= (UCHAR) (Value << BitOffset); BitLength -= (8 - BitOffset); Value >>= (8 - BitOffset); ByteOffset++; } while (BitLength >= 8) { Report [ByteOffset] = (UCHAR) Value; Value >>= 8; BitLength -= 8; ByteOffset++; } if (BitLength) { Report [ByteOffset] &= ((UCHAR) 0 - (UCHAR) (1 << BitLength)); // Zap lower bits. Report [ByteOffset] |= (Value & ((1 << BitLength) - 1)); } } HidP_DeleteArrayEntry ( IN ULONG BitPos, IN USHORT BitLength, // Length of the value set in bits. IN USHORT ReportCount, IN ULONG Value, // Value to delete. IN OUT PUCHAR Report ) /*++ Routine Description: Given a HID report a byte offset, bit offset and bitlength remove that data item from the report, by shifting all data items left until the last item finally setting that one to zero. In otherwards clear the given entry from the hid array. NOTE: If there are two such values set we only eliminate the first one. --*/ { ULONG tmpValue; ULONG localBitPos; // for debugging only. Compiler should kill this line. ULONG localRemaining; ULONG nextBitPos; localBitPos = BitPos; tmpValue = 0; localRemaining = ReportCount; ASSERT (0 < ReportCount); ASSERT (0 != Value); // // Find the data. // while (0 < localRemaining) { tmpValue = HidP_ExtractData ((USHORT) (localBitPos >> 3), (USHORT) (localBitPos & 7), BitLength, Report); if (Value == tmpValue) { break; } localBitPos += BitLength; localRemaining--; } if (Value != tmpValue) { return HIDP_STATUS_BUTTON_NOT_PRESSED; } while (1 < localRemaining) { nextBitPos = localBitPos + BitLength; tmpValue = HidP_ExtractData ((USHORT) (nextBitPos >> 3), (USHORT) (nextBitPos & 7), BitLength, Report); HidP_InsertData ((USHORT) (localBitPos >> 3), (USHORT) (localBitPos & 7), BitLength, Report, tmpValue); localBitPos = nextBitPos; localRemaining--; } HidP_InsertData ((USHORT) (localBitPos >> 3), (USHORT) (localBitPos & 7), BitLength, Report, 0); return HIDP_STATUS_SUCCESS; } NTSTATUS __stdcall HidP_GetCaps ( IN PHIDP_PREPARSED_DATA PreparsedData, OUT PHIDP_CAPS Capabilities ) /*++ Routine Description: Please see Hidpi.h for routine description Notes: --*/ { ULONG i; HIDP_CHANNEL_DESC * data; PAGED_CODE(); CHECK_PPD (PreparsedData); RtlZeroMemory (Capabilities, sizeof(HIDP_CAPS)); Capabilities->UsagePage = PreparsedData->UsagePage; Capabilities->Usage = PreparsedData->Usage; Capabilities->InputReportByteLength = PreparsedData->Input.ByteLen; Capabilities->OutputReportByteLength = PreparsedData->Output.ByteLen; Capabilities->FeatureReportByteLength = PreparsedData->Feature.ByteLen; // Reserved fields go here Capabilities->NumberLinkCollectionNodes = PreparsedData->LinkCollectionArrayLength; Capabilities->NumberInputButtonCaps = 0; Capabilities->NumberInputValueCaps = 0; Capabilities->NumberOutputButtonCaps = 0; Capabilities->NumberOutputValueCaps = 0; Capabilities->NumberFeatureButtonCaps = 0; Capabilities->NumberFeatureValueCaps = 0; i=PreparsedData->Input.Offset; data = &PreparsedData->Data[i]; Capabilities->NumberInputDataIndices = 0; for (; i < PreparsedData->Input.Index; i++, data++) { if (data->IsButton) { Capabilities->NumberInputButtonCaps++; } else { Capabilities->NumberInputValueCaps++; } Capabilities->NumberInputDataIndices += data->Range.DataIndexMax - data->Range.DataIndexMin + 1; } i=PreparsedData->Output.Offset; data = &PreparsedData->Data[i]; Capabilities->NumberOutputDataIndices = 0; for (; i < PreparsedData->Output.Index; i++, data++) { if (data->IsButton) { Capabilities->NumberOutputButtonCaps++; } else { Capabilities->NumberOutputValueCaps++; } Capabilities->NumberOutputDataIndices += data->Range.DataIndexMax - data->Range.DataIndexMin + 1; } i=PreparsedData->Feature.Offset; data = &PreparsedData->Data[i]; Capabilities->NumberFeatureDataIndices = 0; for (; i < PreparsedData->Feature.Index; i++, data++) { if (data->IsButton) { Capabilities->NumberFeatureButtonCaps++; } else { Capabilities->NumberFeatureValueCaps++; } Capabilities->NumberFeatureDataIndices += data->Range.DataIndexMax - data->Range.DataIndexMin + 1; } return HIDP_STATUS_SUCCESS; } NTSTATUS __stdcall HidP_GetLinkCollectionNodes ( OUT PHIDP_LINK_COLLECTION_NODE LinkCollectionNodes, IN OUT PULONG LinkCollectionNodesLength, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see Hidpi.h for routine description --*/ { PHIDP_PRIVATE_LINK_COLLECTION_NODE nodeArray; ULONG length; ULONG i; NTSTATUS status = HIDP_STATUS_SUCCESS; PAGED_CODE(); CHECK_PPD (PreparsedData); if (*LinkCollectionNodesLength < PreparsedData->LinkCollectionArrayLength) { length = *LinkCollectionNodesLength; status = HIDP_STATUS_BUFFER_TOO_SMALL; } else { length = PreparsedData->LinkCollectionArrayLength; } *LinkCollectionNodesLength = PreparsedData->LinkCollectionArrayLength; nodeArray = (PHIDP_PRIVATE_LINK_COLLECTION_NODE) (PreparsedData->RawBytes + PreparsedData->LinkCollectionArrayOffset); for (i = 0; i < length; i++, LinkCollectionNodes++, nodeArray++ ) { // *LinkCollectionNodes = *nodeArray; LinkCollectionNodes->LinkUsage = nodeArray->LinkUsage; LinkCollectionNodes->LinkUsagePage = nodeArray->LinkUsagePage; LinkCollectionNodes->Parent = nodeArray->Parent; LinkCollectionNodes->NumberOfChildren = nodeArray->NumberOfChildren; LinkCollectionNodes->NextSibling = nodeArray->NextSibling; LinkCollectionNodes->FirstChild = nodeArray->FirstChild; LinkCollectionNodes->CollectionType = nodeArray->CollectionType; LinkCollectionNodes->IsAlias = nodeArray->IsAlias; } return status; } #undef HidP_GetButtonCaps NTSTATUS __stdcall HidP_GetButtonCaps ( IN HIDP_REPORT_TYPE ReportType, OUT PHIDP_BUTTON_CAPS ButtonCaps, IN OUT PUSHORT ButtonCapsLength, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see Hidpi.h for routine description Notes: --*/ { return HidP_GetSpecificButtonCaps (ReportType, 0, 0, 0, ButtonCaps, ButtonCapsLength, PreparsedData); } NTSTATUS __stdcall HidP_GetSpecificButtonCaps ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, // Optional (0 => ignore) IN USHORT LinkCollection, // Optional (0 => ignore) IN USAGE Usage, // Optional (0 => ignore) OUT PHIDP_BUTTON_CAPS ButtonCaps, IN OUT PUSHORT ButtonCapsLength, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see Hidpi.h for routine description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; NTSTATUS status = HIDP_STATUS_USAGE_NOT_FOUND; USHORT i, j; PAGED_CODE(); CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } for (i = iof->Offset, j = 0; i < iof->Index ; i++) { channel = &PreparsedData->Data[i]; if ((channel->IsButton) && ((!UsagePage || (UsagePage == channel->UsagePage)) && (!LinkCollection || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) && (!Usage || ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax))))) { status = HIDP_STATUS_SUCCESS; if (j < *ButtonCapsLength) { ButtonCaps[j].UsagePage = channel->UsagePage; ButtonCaps[j].LinkCollection = channel->LinkCollection; ButtonCaps[j].LinkUsagePage = channel->LinkUsagePage; ButtonCaps[j].LinkUsage = channel->LinkUsage; ButtonCaps[j].IsRange = (BOOLEAN) channel->IsRange; ButtonCaps[j].IsStringRange = (BOOLEAN) channel->IsStringRange; ButtonCaps[j].IsDesignatorRange=(BOOLEAN)channel->IsDesignatorRange; ButtonCaps[j].ReportID = channel->ReportID; ButtonCaps[j].BitField = (USHORT) channel->BitField; ButtonCaps[j].IsAbsolute = (BOOLEAN) channel->IsAbsolute; ButtonCaps[j].IsAlias = (BOOLEAN) channel->IsAlias; // if (channel->IsRange) // { ButtonCaps[j].Range.UsageMin = channel->Range.UsageMin; ButtonCaps[j].Range.UsageMax = channel->Range.UsageMax; ButtonCaps[j].Range.DataIndexMin = channel->Range.DataIndexMin; ButtonCaps[j].Range.DataIndexMax = channel->Range.DataIndexMax; // } else // { // ButtonCaps[j].NotRange.Usage = channel->NotRange.Usage; // } // if (channel->IsStringRange) // { ButtonCaps[j].Range.StringMin = channel->Range.StringMin; ButtonCaps[j].Range.StringMax = channel->Range.StringMax; // } else // { // ButtonCaps[j].NotRange.StringIndex // = channel->NotRange.StringIndex; // } // if (channel->IsDesignatorRange) // { ButtonCaps[j].Range.DesignatorMin = channel->Range.DesignatorMin; ButtonCaps[j].Range.DesignatorMax = channel->Range.DesignatorMax; // } else // { // ButtonCaps[j].NotRange.DesignatorIndex // = channel->NotRange.DesignatorIndex; // } } else { status = HIDP_STATUS_BUFFER_TOO_SMALL; } j++; } } *ButtonCapsLength = j; return status; } #undef HidP_GetValueCaps NTSTATUS __stdcall HidP_GetValueCaps ( IN HIDP_REPORT_TYPE ReportType, OUT PHIDP_VALUE_CAPS ValueCaps, IN OUT PUSHORT ValueCapsLength, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see Hidpi.h for routine description Notes: --*/ { return HidP_GetSpecificValueCaps (ReportType, 0, 0, 0, ValueCaps, ValueCapsLength, PreparsedData); } NTSTATUS __stdcall HidP_GetSpecificValueCaps ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, // Optional (0 => ignore) IN USHORT LinkCollection, // Optional (0 => ignore) IN USAGE Usage, // Optional (0 => ignore) OUT PHIDP_VALUE_CAPS ValueCaps, IN OUT PUSHORT ValueCapsLength, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see Hidpi.h for routine description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; NTSTATUS status = HIDP_STATUS_USAGE_NOT_FOUND; USHORT i, j; CHECK_PPD (PreparsedData); PAGED_CODE (); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } for (i = iof->Offset, j = 0; i < iof->Index ; i++) { channel = &PreparsedData->Data[i]; if ((!channel->IsButton) && ((!UsagePage || (UsagePage == channel->UsagePage)) && (!LinkCollection || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) && (!Usage || ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax))))) { status = HIDP_STATUS_SUCCESS; if (j < *ValueCapsLength) { ValueCaps[j].UsagePage = channel->UsagePage; ValueCaps[j].LinkCollection = channel->LinkCollection; ValueCaps[j].LinkUsagePage = channel->LinkUsagePage; ValueCaps[j].LinkUsage = channel->LinkUsage; ValueCaps[j].IsRange = (BOOLEAN) channel->IsRange; ValueCaps[j].IsStringRange = (BOOLEAN) channel->IsStringRange; ValueCaps[j].IsDesignatorRange =(BOOLEAN)channel->IsDesignatorRange; ValueCaps[j].ReportID = channel->ReportID; ValueCaps[j].BitField = (USHORT) channel->BitField; ValueCaps[j].BitSize = channel->ReportSize; ValueCaps[j].IsAbsolute = (BOOLEAN) channel->IsAbsolute; ValueCaps[j].HasNull = channel->Data.HasNull; ValueCaps[j].Units = channel->Units; ValueCaps[j].UnitsExp = channel->UnitExp; ValueCaps[j].LogicalMin = channel->Data.LogicalMin; ValueCaps[j].LogicalMax = channel->Data.LogicalMax; ValueCaps[j].PhysicalMin = channel->Data.PhysicalMin; ValueCaps[j].PhysicalMax = channel->Data.PhysicalMax; ValueCaps[j].IsAlias = (BOOLEAN) channel->IsAlias; // if (channel->IsRange) // { ValueCaps[j].Range.UsageMin = channel->Range.UsageMin; ValueCaps[j].Range.UsageMax = channel->Range.UsageMax; ValueCaps[j].Range.DataIndexMin = channel->Range.DataIndexMin; ValueCaps[j].Range.DataIndexMax = channel->Range.DataIndexMax; // } else // { // ValueCaps[j].NotRange.Usage = channel->NotRange.Usage; // } // if (channel->IsStringRange) // { ValueCaps[j].Range.StringMin = channel->Range.StringMin; ValueCaps[j].Range.StringMax = channel->Range.StringMax; // } else // { // ValueCaps[j].NotRange.StringIndex // = channel->NotRange.StringIndex; // } // if (channel->IsDesignatorRange) // { ValueCaps[j].Range.DesignatorMin = channel->Range.DesignatorMin; ValueCaps[j].Range.DesignatorMax = channel->Range.DesignatorMax; // } else // { // ValueCaps[j].NotRange.DesignatorIndex // = channel->NotRange.DesignatorIndex; // } ValueCaps[j].ReportCount = (channel->IsRange) ? 1 : channel->ReportCount; } else { status = HIDP_STATUS_BUFFER_TOO_SMALL; } j++; } } *ValueCapsLength = j; return status; } NTSTATUS __stdcall HidP_GetExtendedAttributes ( IN HIDP_REPORT_TYPE ReportType, IN USHORT DataIndex, IN PHIDP_PREPARSED_DATA PreparsedData, OUT PHIDP_EXTENDED_ATTRIBUTES Attributes, IN OUT PULONG LengthAttributes ) /*++ Routine Description: Please See hidpi.h for description. --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; HIDP_EXTENDED_ATTRIBUTES buffer; ULONG channelIndex = 0; NTSTATUS status = HIDP_STATUS_DATA_INDEX_NOT_FOUND; ULONG i; ULONG actualLen, copyLen = 0; CHECK_PPD (PreparsedData); PAGED_CODE (); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } for (channelIndex = iof->Offset, channel = PreparsedData->Data; channelIndex < iof->Index; channelIndex++, channel++) { if ((channel->Range.DataIndexMin <= DataIndex) && (DataIndex <= channel->Range.DataIndexMax)) { RtlZeroMemory (Attributes, *LengthAttributes); RtlZeroMemory (&buffer, sizeof (buffer)); // // Set the fixed parameters // buffer.NumGlobalUnknowns = (UCHAR) channel->NumGlobalUnknowns; // buffer.GlobalUnknowns = channel->GlobalUnknowns; // // Set the length // actualLen = FIELD_OFFSET (HIDP_EXTENDED_ATTRIBUTES, Data) + (buffer.NumGlobalUnknowns * sizeof(HIDP_UNKNOWN_TOKEN)); // // Copy over the fixed paramters // copyLen = MIN (*LengthAttributes, sizeof (buffer)); RtlCopyMemory (Attributes, &buffer, copyLen); // // Copy over the data. // copyLen = MIN (*LengthAttributes, actualLen) - FIELD_OFFSET (HIDP_EXTENDED_ATTRIBUTES, Data); if (copyLen && copyLen <= (MIN(*LengthAttributes, actualLen))) { RtlCopyMemory ((PVOID) Attributes->Data, (PVOID) channel->GlobalUnknowns, copyLen); } if (*LengthAttributes < actualLen) { status = HIDP_STATUS_BUFFER_TOO_SMALL; } else { status = HIDP_STATUS_SUCCESS; } break; } } return status; } NTSTATUS __stdcall HidP_InitializeReportForID ( IN HIDP_REPORT_TYPE ReportType, IN UCHAR ReportID, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please See hidpi.h for description. --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; NTSTATUS status = HIDP_STATUS_REPORT_DOES_NOT_EXIST; ULONG channelIndex = 0; ULONG reportBitIndex = 0; ULONG nullMask = 0; LONG nullValue = 0; ULONG i; CHECK_PPD (PreparsedData); PAGED_CODE (); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } RtlZeroMemory (Report, ReportLength); // Set the report ID for this report Report[0] = ReportID; for (channelIndex = iof->Offset, channel = PreparsedData->Data; channelIndex < iof->Index; channelIndex++, channel++) { // // Walk the list of channels looking for fields that need initialization // if (channel->ReportID != ReportID) { continue; } status = HIDP_STATUS_SUCCESS; if ((channel->IsButton) || (channel->IsConst) || (channel->IsAlias)) { // // Buttons are initialized to zero // Constants cannot be set // Aliases are referenced by their first entries // continue; } if (channel->Data.HasNull) { if (32 == channel->ReportSize) { nullMask = -1; } else { nullMask = (1 << channel->ReportSize) - 1; } // // Note logical values are always unsigned. // (Not to be confused with physical values which are signed.) // if (channel->Data.LogicalMax < channel->Data.LogicalMin) { // // This is really an error. I'm not sure what I should do here. // nullValue = 0; } else { nullValue = (channel->Data.LogicalMin - 1) & nullMask; } if ((channel->Data.LogicalMin <= nullValue) && (nullValue <= channel->Data.LogicalMax)) { // // // // Now what? // nullValue = 0; } } else { // // I don't know what I should do in this case: the device has no // reported nul state. // // For now let's just leave it zero // nullValue = 0; } if (0 == nullValue) { // // Nothing to do on this pass // continue; } if (channel->IsRange) { for (i = 0, reportBitIndex = (channel->ByteOffset << 3) + (channel->BitOffset); i < channel->ReportCount; i++, reportBitIndex += channel->ReportSize) { // // Set all the fields in the range // HidP_InsertData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report, nullValue); } } else { HidP_InsertData (channel->ByteOffset, channel->BitOffset, channel->ReportSize, Report, nullValue); } } return status; } USAGE HidP_Index2Usage ( PHIDP_CHANNEL_DESC Channels, ULONG Index ) /*++ Routine Description: Given an array of channels convert an index (the likes of which you might find in an array field of a HID report) into a usage value. --*/ { USHORT len; PHIDP_CHANNEL_DESC startChannel = Channels; USAGE usageMin; USAGE usageMax; if (!Index) { return 0; } while (Channels->MoreChannels) { // The channels are listed in reverse order. Channels++; } while (Index) { if (Channels->IsRange) { usageMin = Channels->Range.UsageMin; usageMin = (usageMin ? usageMin : 1); // Index is 1 based (an index of zero is no usage at all) // But a UsageMin of zero means that UsageMin is exclusive. // That means that if the index is 1 and UsageMin is non-zero, // than this function should return UsageMin usageMax = Channels->Range.UsageMax; len = (usageMax + 1) - usageMin; // ^^^ Usage Max is inclusive. if (Index <= len) { return ((USAGE) Index) + usageMin - 1; } else { Index -= len; } } else if (1 == Index) { return Channels->NotRange.Usage; } else { Index--; } if (startChannel != Channels) { Channels--; continue; } return 0; } return 0; } ULONG HidP_Usage2Index ( PHIDP_CHANNEL_DESC Channels, USAGE Usage ) /*++ Routine Description: Given an usage convert it into an index suitable for placement into an array main item. --*/ { PHIDP_CHANNEL_DESC startChannel; ULONG index = 0; USAGE UsageMin; USAGE UsageMax; startChannel = Channels; while (Channels->MoreChannels) { Channels++; } for (; startChannel <= Channels; Channels--) { if (Channels->IsRange) { UsageMin = Channels->Range.UsageMin; UsageMin = (UsageMin ? UsageMin : 1); // Index is 1 based (an index of zero is no usage at all) // But a UsageMin of zero means that UsageMin is exclusive. // That means that if the index is 1 and UsageMin is non-zero, // than this function should return UsageMin UsageMax = Channels->Range.UsageMax; if ((UsageMin <= Usage) && (Usage <= UsageMax)) { return (index + 1 + Usage - UsageMin); } index += 1 + (UsageMax - UsageMin); } else { index++; if (Usage == Channels->NotRange.Usage) { return index; } } } return 0; } NTSTATUS __stdcall HidP_SetUnsetOneUsage ( struct _CHANNEL_REPORT_HEADER *, USAGE, USHORT, USAGE, PHIDP_PREPARSED_DATA, PCHAR, BOOLEAN); NTSTATUS __stdcall HidP_SetUsages ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, IN PUSAGE UsageList, IN OUT PULONG UsageLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please See hidpi.h for description. Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; NTSTATUS status = HIDP_STATUS_SUCCESS; ULONG usageIndex = 0; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (usageIndex = 0; usageIndex < *UsageLength; usageIndex++) { if (0 == UsageList [usageIndex]) { continue; } status = HidP_SetUnsetOneUsage (iof, UsagePage, LinkCollection, UsageList [usageIndex], PreparsedData, Report, TRUE); if (!NT_SUCCESS(status)) { break; } } *UsageLength = usageIndex; return status; } NTSTATUS __stdcall HidP_UnsetUsages ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, IN PUSAGE UsageList, IN OUT PULONG UsageLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please See hidpi.h for description. Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; NTSTATUS status = HIDP_STATUS_SUCCESS; ULONG usageIndex = 0; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (usageIndex = 0; usageIndex < *UsageLength; usageIndex++) { if (0 == UsageList [usageIndex]) { continue; } status = HidP_SetUnsetOneUsage (iof, UsagePage, LinkCollection, UsageList [usageIndex], PreparsedData, Report, FALSE); if (!NT_SUCCESS(status)) { break; } } *UsageLength = usageIndex; return status; } NTSTATUS __stdcall HidP_SetUnsetOneUsage ( struct _CHANNEL_REPORT_HEADER * IOF, USAGE UsagePage, USHORT LinkCollection, USAGE Usage, PHIDP_PREPARSED_DATA PreparsedData, PCHAR Report, BOOLEAN Set ) /*++ Routine Description: Perform the work of SetUsage one usage at a time. Yes this is slow but it works. Notes: This function assumes the report length has already been verified. --*/ { PHIDP_CHANNEL_DESC channel = 0; PHIDP_CHANNEL_DESC priChannel = 0; PHIDP_CHANNEL_DESC firstChannel = 0; // the channel where the array starts ULONG channelIndex = 0; ULONG reportByteIndex = 0; ULONG inspect = 0; USHORT reportBitIndex = 0; BOOLEAN wrongReportID = FALSE; BOOLEAN noArraySpace = FALSE; BOOLEAN notPressed = FALSE; NTSTATUS status = HIDP_STATUS_SUCCESS; for (channelIndex = IOF->Offset; channelIndex < IOF->Index; channelIndex++) { channel = (PreparsedData->Data + channelIndex); if (priChannel) { if (!priChannel->MoreChannels) { firstChannel = channel; } } else { firstChannel = channel; } priChannel = channel; if ((!channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if ( ((channel->IsRange) && (channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) || ((!channel->IsRange) && (channel->NotRange.Usage == Usage))) { // // Test the report ID to see if it is compatible. // if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } Report[0] = (CHAR) channel->ReportID; // // Set the report ID for this report // if (1 == channel->ReportSize) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + (USHORT) (Usage - channel->Range.UsageMin); if (Set) { Report [reportBitIndex >> 3] |= (1 << (reportBitIndex & 7)); } else if (Report [reportBitIndex >> 3] & (1 << (reportBitIndex & 7))) { Report [reportBitIndex >> 3] &= ~(1 << (reportBitIndex & 7)); } else { return HIDP_STATUS_BUTTON_NOT_PRESSED; } return HIDP_STATUS_SUCCESS; } else if (Set) { // usage array for (reportBitIndex = channel->BitOffset; reportBitIndex < (channel->BitOffset + channel->BitLength); reportBitIndex += channel->ReportSize) { inspect = HidP_ExtractData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); if (inspect) { // // Distinguish between errors HIDP_USAGE_NOT_FOUND and // HIDP_BUFFER_TOO_SMALL // noArraySpace = TRUE; continue; } inspect = HidP_Usage2Index (firstChannel, Usage); if (!inspect) { // // Gads! We should NEVER get here! // We already know that the given usage falls into the // current channel, so it should translate into an index. // return HIDP_STATUS_INTERNAL_ERROR; } HidP_InsertData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report, inspect); return HIDP_STATUS_SUCCESS; } // If we got to this point then there was no room to add this // usage into the given array. However there might be another // array later into which the given usage might fit. Let's continue // looking. while (channel->MoreChannels) { // Skip by all the additional channels that describe this // same data field. channelIndex++; channel = (PreparsedData->Data + channelIndex); } priChannel = channel; } else { // Set a Usage Array inspect = HidP_Usage2Index (firstChannel, Usage); reportBitIndex += channel->ByteOffset << 3; status = HidP_DeleteArrayEntry (reportBitIndex, channel->ReportSize, channel->ReportCount, inspect, Report); if (HIDP_STATUS_BUTTON_NOT_PRESSED == status) { notPressed = TRUE; continue; } if (NT_SUCCESS (status)) { return status; } else { ASSERT (0 == status); } } // end byte aray } } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } if (notPressed) { return HIDP_STATUS_BUTTON_NOT_PRESSED; } if (noArraySpace) { return HIDP_STATUS_BUFFER_TOO_SMALL; } return HIDP_STATUS_USAGE_NOT_FOUND; } NTSTATUS __stdcall HidP_GetUsagesEx ( IN HIDP_REPORT_TYPE ReportType, IN USHORT LinkCollection, // Optional OUT PUSAGE_AND_PAGE ButtonList, IN OUT ULONG * UsageLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description. --*/ { return HidP_GetUsages (ReportType, 0, LinkCollection, (PUSAGE) ButtonList, UsageLength, PreparsedData, Report, ReportLength); } NTSTATUS __stdcall HidP_GetUsages ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, OUT USAGE * UsageList, IN OUT ULONG * UsageLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description. Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; USHORT channelIndex = 0; USHORT usageListIndex = 0; USHORT reportBitIndex = 0; USHORT tmpBitIndex; NTSTATUS status = HIDP_STATUS_SUCCESS; ULONG data = 0; USHORT inspect = 0; BOOLEAN wrongReportID = FALSE; BOOLEAN found = FALSE; PUSAGE_AND_PAGE usageAndPage = (PUSAGE_AND_PAGE) UsageList; CHECK_PPD (PreparsedData); memset (UsageList, '\0', *UsageLength * sizeof (USAGE)); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++){ channel = (PreparsedData->Data + channelIndex); if ((!channel->IsButton) || ((UsagePage) && (channel->UsagePage != UsagePage))) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } found = TRUE; if (1 == channel->ReportSize) { // A bitfield // // Little endian (by bit) // Byte 2 |Byte 1 |Byte 0 // 765432107654321076543210 (bits) // // Get low byte first. (need the higher bits) // Offset is from bit zero. // for (reportBitIndex = channel->BitOffset; reportBitIndex < (channel->BitLength + channel->BitOffset); reportBitIndex++) { // Check it one bit at a time. tmpBitIndex = reportBitIndex + (channel->ByteOffset << 3); inspect = Report [tmpBitIndex >> 3] & (1 << (tmpBitIndex & 7)); tmpBitIndex = reportBitIndex - channel->BitOffset; if (inspect) { if (channel->IsRange) { inspect = channel->Range.UsageMin + tmpBitIndex; } else { inspect = channel->NotRange.Usage; } if (usageListIndex < *UsageLength) { if (0 == UsagePage) { usageAndPage[usageListIndex].UsagePage = channel->UsagePage; usageAndPage[usageListIndex].Usage = inspect; } else { UsageList[usageListIndex] = inspect; } } usageListIndex++; } } continue; } for (reportBitIndex = channel->BitOffset; reportBitIndex < (channel->BitOffset + channel->BitLength); reportBitIndex += channel->ReportSize) { // an array of usages. data = HidP_ExtractData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); if (data) { inspect = HidP_Index2Usage (channel, data); if (!inspect) { // We found an invalid index. I'm not quite sure what // we should do with it. But lets just ignore it since // we cannot convert it into a real usage. continue; } if (usageListIndex < *UsageLength) { if (0 == UsagePage) { usageAndPage[usageListIndex].UsagePage = channel->UsagePage; usageAndPage[usageListIndex].Usage = inspect; } else { UsageList[usageListIndex] = inspect; } } usageListIndex++; } } while (channel->MoreChannels) { // Skip by all the additional channels that describe this // same data field. channelIndex++; channel = (PreparsedData->Data + channelIndex); } } // end for channel if (*UsageLength < usageListIndex) { status = HIDP_STATUS_BUFFER_TOO_SMALL; } *UsageLength = usageListIndex; if (!found) { if (wrongReportID) { status = HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } else { status = HIDP_STATUS_USAGE_NOT_FOUND; } } return status; } ULONG __stdcall HidP_MaxUsageListLength ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see hidpi.h for description. Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; USHORT channelIndex = 0; ULONG len = 0; PAGED_CODE (); if ((HIDP_PREPARSED_DATA_SIGNATURE1 != PreparsedData->Signature1) && (HIDP_PREPARSED_DATA_SIGNATURE2 != PreparsedData->Signature2)) { return 0; } switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return 0; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++){ channel = (PreparsedData->Data + channelIndex); if (channel->IsButton && ((!UsagePage) || (channel->UsagePage == UsagePage))) { // How many buttons can show up in this device? // If this is a bitmap then the max number of buttons is the length // aka the count, if this is an array then the max number of buttons // is the number of array positions aka the count. len += channel->ReportCount; } } return len; } ULONG __stdcall HidP_MaxDataListLength ( IN HIDP_REPORT_TYPE ReportType, IN PHIDP_PREPARSED_DATA PreparsedData ) /*++ Routine Description: Please see hidpi.h for description. Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; USHORT channelIndex = 0; ULONG len = 0; PAGED_CODE (); if ((HIDP_PREPARSED_DATA_SIGNATURE1 != PreparsedData->Signature1) && (HIDP_PREPARSED_DATA_SIGNATURE2 != PreparsedData->Signature2)) { return 0; } switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return 0; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++){ channel = (PreparsedData->Data + channelIndex); if (channel->IsButton) { // How many buttons can show up in this device? // If this is a bitmap then the max number of buttons is the length // aka the count, if this is an array then the max number of buttons // is the number of array positions aka the count. len += channel->ReportCount; } else if (channel->IsRange) { len += channel->ReportCount; } else { len += 1; } } return len; } NTSTATUS __stdcall HidP_SetUsageValue ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, // Optional IN USAGE Usage, IN ULONG UsageValue, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG channelIndex = 0; ULONG reportBitIndex = 0; NTSTATUS status = HIDP_STATUS_SUCCESS; BOOLEAN wrongReportID = FALSE; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++) { channel = (PreparsedData->Data + channelIndex); if ((channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if (channel->IsRange) { if ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + ( (Usage - channel->Range.UsageMin) * channel->ReportSize); } else { continue; } } else { if (channel->NotRange.Usage == Usage) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset; } else { continue; } } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } Report[0] = (CHAR) channel->ReportID; // Set the report ID for this report HidP_InsertData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report, UsageValue); return HIDP_STATUS_SUCCESS; } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } return HIDP_STATUS_USAGE_NOT_FOUND; } NTSTATUS __stdcall HidP_SetUsageValueArray ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, // Optional IN USAGE Usage, OUT PCHAR UsageValue, IN USHORT UsageValueByteLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG channelIndex = 0; ULONG reportBitIndex; ULONG i,j; NTSTATUS status = HIDP_STATUS_SUCCESS; BOOLEAN wrongReportID = FALSE; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++){ channel = (PreparsedData->Data + channelIndex); if ((channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if (channel->IsRange) { if ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) { return HIDP_STATUS_NOT_VALUE_ARRAY; } else { continue; } } else { if (channel->NotRange.Usage == Usage) { if (1 == channel->ReportCount) { return HIDP_STATUS_NOT_VALUE_ARRAY; } reportBitIndex =(channel->ByteOffset << 3) + channel->BitOffset; } else { continue; } } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } Report[0] = (CHAR) channel->ReportID; // Set the report ID for this report if ((UsageValueByteLength * 8) < (channel->ReportCount * channel->ReportSize)) { return HIDP_STATUS_BUFFER_TOO_SMALL; } if (0 == (channel->ReportSize % 8)) { // // set the data the easy way: one byte at a time. // for (i = 0; i < channel->ReportCount; i++) { for (j = 0; j < (UCHAR) (channel->ReportSize / 8); j++) { HidP_InsertData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), 8, Report, *UsageValue); reportBitIndex += 8; UsageValue++; } } } else { // // Do it the hard way: one bit at a time. // return HIDP_STATUS_NOT_IMPLEMENTED; } return HIDP_STATUS_SUCCESS; } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } return HIDP_STATUS_USAGE_NOT_FOUND; } NTSTATUS __stdcall HidP_SetScaledUsageValue ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, // Optional IN USAGE Usage, IN LONG UsageValue, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG channelIndex = 0; ULONG reportBitIndex = 0; NTSTATUS status = HIDP_STATUS_USAGE_NOT_FOUND; LONG logicalMin, logicalMax; LONG physicalMin, physicalMax; LONG value; BOOLEAN wrongReportID = FALSE; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++) { channel = (PreparsedData->Data + channelIndex); if ((channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if (channel->IsRange) { if ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + ( (Usage - channel->Range.UsageMin) * channel->ReportSize); } else { continue; } } else { if (channel->NotRange.Usage == Usage) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset; } else { continue; } } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } Report[0] = (CHAR) channel->ReportID; // Set the report ID for this report logicalMin = channel->Data.LogicalMin; logicalMax = channel->Data.LogicalMax; physicalMin = channel->Data.PhysicalMin; physicalMax = channel->Data.PhysicalMax; // // The code path here is ALWAYS the same, we should test it once // and then use some sort of switch statement to do the calculation. // if ((0 == physicalMin) && (0 == physicalMax) && (logicalMin != logicalMax)) { // // The device did not set the physical min and max values // if ((logicalMin <= UsageValue) && (UsageValue <= logicalMax)) { value = UsageValue; // // fix the sign bit // I should store away the sign bit somewhere so I don't // have to calculate it all the time. // if (value & 0x80000000) { value |= (1 << (channel->ReportSize - 1)); } else { value &= ((1 << (channel->ReportSize - 1)) - 1); } } else { if (channel->Data.HasNull) { value = (1 << (channel->ReportSize - 1));// Most negitive value status = HIDP_STATUS_NULL; } else { return HIDP_STATUS_VALUE_OUT_OF_RANGE; } } } else { // // The device has physical descriptors. // if ((logicalMax <= logicalMin) || (physicalMax <= physicalMin)) { return HIDP_STATUS_BAD_LOG_PHY_VALUES; } if ((physicalMin <= UsageValue) && (UsageValue <= physicalMax)) { value = logicalMin + ((UsageValue - physicalMin) * (logicalMax - logicalMin + 1) / (physicalMax - physicalMin + 1)); } else { if (channel->Data.HasNull) { value = (1 << (channel->ReportSize - 1));// Most negitive value status = HIDP_STATUS_NULL; } else { return HIDP_STATUS_VALUE_OUT_OF_RANGE; } } } HidP_InsertData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report, (ULONG) value); return HIDP_STATUS_SUCCESS; } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } return status; } NTSTATUS __stdcall HidP_GetUsageValue ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, // Optional IN USAGE Usage, OUT PULONG UsageValue, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG channelIndex = 0; ULONG reportBitIndex = 0; ULONG reportByteIndex = 0; NTSTATUS status = HIDP_STATUS_SUCCESS; ULONG inspect = 0; BOOLEAN wrongReportID = FALSE; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++) { channel = (PreparsedData->Data + channelIndex); if ((channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if (channel->IsRange) { if ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + ( (Usage - channel->Range.UsageMin) * channel->ReportSize); } else { continue; } } else { if (channel->NotRange.Usage == Usage) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset; } else { continue; } } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } inspect = HidP_ExtractData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); *UsageValue = inspect; return HIDP_STATUS_SUCCESS; } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } return HIDP_STATUS_USAGE_NOT_FOUND; } NTSTATUS __stdcall HidP_GetUsageValueArray ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, // Optional IN USAGE Usage, OUT PCHAR UsageValue, IN USHORT UsageValueByteLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG channelIndex = 0; ULONG reportBitIndex; ULONG i,j; NTSTATUS status = HIDP_STATUS_SUCCESS; ULONG inspect = 0; BOOLEAN wrongReportID = FALSE; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++){ channel = (PreparsedData->Data + channelIndex); if ((channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if (channel->IsRange) { if ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) { return HIDP_STATUS_NOT_VALUE_ARRAY; } else { continue; } } else { if (channel->NotRange.Usage == Usage) { if (1 == channel->ReportCount) { return HIDP_STATUS_NOT_VALUE_ARRAY; } reportBitIndex =(channel->ByteOffset << 3) + channel->BitOffset; } else { continue; } } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } if ((UsageValueByteLength * 8) < (channel->ReportCount * channel->ReportSize)) { return HIDP_STATUS_BUFFER_TOO_SMALL; } if (0 == (channel->ReportSize % 8)) { // // Retrieve the data the easy way // for (i = 0; i < channel->ReportCount; i++) { for (j = 0; j < (USHORT) (channel->ReportSize / 8); j++) { *UsageValue = (CHAR) HidP_ExtractData ( (USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), 8, Report); reportBitIndex += 8; UsageValue++; } } } else { // // Do it the hard way // return HIDP_STATUS_NOT_IMPLEMENTED; } return HIDP_STATUS_SUCCESS; } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } return HIDP_STATUS_USAGE_NOT_FOUND; } NTSTATUS __stdcall HidP_GetScaledUsageValue ( IN HIDP_REPORT_TYPE ReportType, IN USAGE UsagePage, IN USHORT LinkCollection, // Optional IN USAGE Usage, OUT PLONG UsageValue, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG channelIndex = 0; ULONG reportBitIndex = 0; ULONG reportByteIndex = 0; NTSTATUS status = HIDP_STATUS_SUCCESS; ULONG inspect = 0; LONG logicalMin, logicalMax; LONG physicalMin, physicalMax; LONG value; BOOLEAN wrongReportID = FALSE; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++) { channel = (PreparsedData->Data + channelIndex); if ((channel->IsButton) || (channel->UsagePage != UsagePage)) { continue; } // // If LinkCollection is zero we will not filter by link collections // If channel->LinkCollection is zero this is the root collection. // Therefore if LinkCollection == channel->LinkCollection then this is OK // if ((!LinkCollection) || (LinkCollection == channel->LinkCollection) || ((HIDP_LINK_COLLECTION_ROOT == LinkCollection) && (0 == channel->LinkCollection))) { ; } else { continue; } if (channel->IsRange) { if ((channel->Range.UsageMin <= Usage) && (Usage <= channel->Range.UsageMax)) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + ( (Usage - channel->Range.UsageMin) * channel->ReportSize); } else { continue; } } else { if (channel->NotRange.Usage == Usage) { reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset; } else { continue; } } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { // Distinguish between the errors HIDP_USAGE_NOT_FOUND and // HIDP_INCOMPATIBLE_REPORT_ID. wrongReportID = TRUE; continue; } logicalMin = channel->Data.LogicalMin; logicalMax = channel->Data.LogicalMax; physicalMin = channel->Data.PhysicalMin; physicalMax = channel->Data.PhysicalMax; inspect = HidP_ExtractData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); // // Sign extend the value; // Find the top most bit of the field. // (logical and with 1 shifted by bit length minus one) // based on that, set the upper most bits. // value = (LONG) (inspect | ((inspect & (1 << (channel->ReportSize - 1))) ? ((~(1 << (channel->ReportSize - 1))) + 1) : 0)); // // the code path here is ALWAYS the same, we should test it once // and then use some sort of switch statement to do the calculation. // if ((0 == physicalMin) && (0 == physicalMax) && (logicalMin != logicalMax)) { // // The Device did not set the physical Min and Max Values // *UsageValue = value; } else if ((logicalMax <= logicalMin) || (physicalMax <= physicalMin)) { *UsageValue = 0; return HIDP_STATUS_BAD_LOG_PHY_VALUES; } else { // the Min and Max are both inclusive. // The value is in range // *UsageValue = physicalMin + (((value - logicalMin) * // (physicalMax - physicalMin)) / // (logicalMax - logicalMin)); // not enough accuracy. // *UsageValue = physicalMin + (LONG)(((LONGLONG)(value - logicalMin) * (LONGLONG)(physicalMax - physicalMin)) / (LONGLONG)(logicalMax - logicalMin)); } if ((logicalMin <= value) && (value <= logicalMax)) { return HIDP_STATUS_SUCCESS; } else { // The value is not in range *UsageValue = 0; if (channel->Data.HasNull) { return HIDP_STATUS_NULL; } else { return HIDP_STATUS_VALUE_OUT_OF_RANGE; } } } if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } return HIDP_STATUS_USAGE_NOT_FOUND; } NTSTATUS __stdcall HidP_SetOneData ( struct _CHANNEL_REPORT_HEADER * Iof, IN PHIDP_DATA Data, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report ) /*++ Routine Description: Please see hidpi.h for description Notes: --*/ { PHIDP_CHANNEL_DESC channel; ULONG inspect; NTSTATUS status = HIDP_STATUS_SUCCESS; USHORT channelIndex = 0; USHORT dataListIndex = 0; USHORT reportBitIndex; BOOLEAN wrongReportID = FALSE; BOOLEAN noArraySpace = FALSE; BOOLEAN notPressed = FALSE; for (channelIndex = Iof->Offset; channelIndex < Iof->Index; channelIndex++){ channel = (PreparsedData->Data + channelIndex); if ((channel->Range.DataIndexMin <= Data->DataIndex) && (Data->DataIndex <= channel->Range.DataIndexMax)) { if ((!channel->IsRange) && (1 != channel->ReportCount)) { // // This value array. We cannot access this here. // return HIDP_STATUS_IS_VALUE_ARRAY; } // Test the report ID to see if it is compatible. if (0 != Report[0]) { if (channel->ReportID != (UCHAR) Report[0]) { wrongReportID = TRUE; continue; } } else { Report[0] = (CHAR) channel->ReportID; } if (channel->IsButton) { if (1 == channel->ReportSize) { // A bitfield // // Little endian (by bit) // Byte 2 |Byte 1 |Byte 0 // 765432107654321076543210 (bits) // // Get low byte first. (need the higher bits) // Offset is from bit zero. // reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + (USHORT) (Data->DataIndex - channel->Range.DataIndexMin); if (Data->On) { Report [reportBitIndex >> 3] |= (1 << (reportBitIndex & 7)); } else if (Report [reportBitIndex >> 3] & (1 << (reportBitIndex & 7))) { Report [reportBitIndex >> 3] &= ~(1 << (reportBitIndex & 7)); } else { return HIDP_STATUS_BUTTON_NOT_PRESSED; } return HIDP_STATUS_SUCCESS; } // // Not a bit field // an array of usages then. // // // Are we clearing a usage from this array? // if (FALSE == Data->On) { // // NB Wizard Time (tm) // // We know that data indices are assigned consecutively // for every control, and that the array channels // are reversed in the channel array. // // inspect is the index (1 based not zero based) into the // channel array. // // Skip to the last channel that describes this same data // fild; // while (channel->MoreChannels) { channelIndex++; channel++; } inspect = Data->DataIndex - channel->Range.DataIndexMin + 1; if (0 == channel->Range.UsageMin) { inspect--; } // Clear the value of inspect which is the usage translated // to the index in the array. reportBitIndex = channel->BitOffset + (channel->ByteOffset << 3); status = HidP_DeleteArrayEntry (reportBitIndex, channel->ReportSize, channel->ReportCount, inspect, Report); if (HIDP_STATUS_BUTTON_NOT_PRESSED == status) { notPressed = TRUE; continue; } if (NT_SUCCESS (status)) { return status; } else { ASSERT (0 == status); } } // // We are clearly setting a usage into an array. // for (reportBitIndex = channel->BitOffset; reportBitIndex < (channel->BitOffset + channel->BitLength); reportBitIndex += channel->ReportSize) { // Search for an empty entry in this array inspect = (USHORT) HidP_ExtractData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); if (inspect) { // // Distinguish between errors HIDP_INDEX_NOT_FOUND and // HIDP_BUFFER_TOO_SMALL // noArraySpace = TRUE; continue; } // // NB Wizard Time (tm) // // We know that data indices are assigned consecutively // for every control, and that the array channels // are reversed in the channel array. // // inspect is the index (1 based not zero based) into the // channel array. // // Skip to the last channel that describes this same data // fild; // while (channel->MoreChannels) { channelIndex++; channel++; } inspect = Data->DataIndex - channel->Range.DataIndexMin + 1; if (0 == channel->Range.UsageMin) { inspect--; } HidP_InsertData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report, inspect); return HIDP_STATUS_SUCCESS; } // end of search for entry continue; } // // Not a button therefore a value. // reportBitIndex = (channel->ByteOffset << 3) + channel->BitOffset + ( (Data->DataIndex - channel->Range.DataIndexMin) * channel->ReportSize); HidP_InsertData ((USHORT) (reportBitIndex >> 3), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report, Data->RawValue); return HIDP_STATUS_SUCCESS; } // end matched data index } // end for loop if (wrongReportID) { return HIDP_STATUS_INCOMPATIBLE_REPORT_ID; } if (notPressed) { return HIDP_STATUS_BUTTON_NOT_PRESSED; } if (noArraySpace) { return HIDP_STATUS_BUFFER_TOO_SMALL; } return HIDP_STATUS_DATA_INDEX_NOT_FOUND; } NTSTATUS HidP_SetData ( IN HIDP_REPORT_TYPE ReportType, IN PHIDP_DATA DataList, IN OUT PULONG DataLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN OUT PCHAR Report, IN ULONG ReportLength ) { ULONG dataIndex; NTSTATUS status; struct _CHANNEL_REPORT_HEADER * iof; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (dataIndex = 0; dataIndex < *DataLength; dataIndex++, DataList++) { status = HidP_SetOneData (iof, DataList, PreparsedData, Report); if (!NT_SUCCESS (status)) { break; } } *DataLength = dataIndex; return status; } NTSTATUS __stdcall HidP_GetData ( IN HIDP_REPORT_TYPE ReportType, OUT PHIDP_DATA DataList, IN OUT PULONG DataLength, IN PHIDP_PREPARSED_DATA PreparsedData, IN PCHAR Report, IN ULONG ReportLength ) { struct _CHANNEL_REPORT_HEADER * iof; PHIDP_CHANNEL_DESC channel; ULONG inspect; USHORT channelIndex = 0; USHORT dataListIndex = 0; USHORT reportBitIndex; USHORT tmpBitIndex; USHORT tmpDataIndex; NTSTATUS status = HIDP_STATUS_SUCCESS; CHECK_PPD (PreparsedData); switch (ReportType) { case HidP_Input: iof = &PreparsedData->Input; break; case HidP_Output: iof = &PreparsedData->Output; break; case HidP_Feature: iof = &PreparsedData->Feature; break; default: return HIDP_STATUS_INVALID_REPORT_TYPE; } if ((USHORT) ReportLength != iof->ByteLen) { return HIDP_STATUS_INVALID_REPORT_LENGTH; } if (0 == iof->ByteLen) { return HIDP_STATUS_REPORT_DOES_NOT_EXIST; } for (channelIndex = iof->Offset; channelIndex < iof->Index; channelIndex++) { channel = (PreparsedData->Data + channelIndex); if ((!channel->IsRange) && (1 != channel->ReportCount)) { // // This value array. We cannot access this here. // continue; } // Test the report ID to see if it is compatible. if ((0 != Report[0]) && (channel->ReportID != (UCHAR) Report[0])) { continue; } if (channel->IsButton) { if (1 == channel->ReportSize) { // A bitfield // // Little endian (by bit) // Byte 2 |Byte 1 |Byte 0 // 765432107654321076543210 (bits) // // Get low byte first. (need the higher bits) // Offset is from bit zero. // for (reportBitIndex = channel->BitOffset; reportBitIndex < (channel->BitLength + channel->BitOffset); reportBitIndex++) { // Check it one bit at a time. tmpBitIndex = reportBitIndex + (channel->ByteOffset << 3); inspect = Report [tmpBitIndex >> 3] & (1 << (tmpBitIndex & 7)); tmpBitIndex = reportBitIndex - channel->BitOffset; if (inspect) { if (channel->IsRange) { inspect = channel->Range.DataIndexMin + tmpBitIndex; } else { inspect = channel->NotRange.DataIndex; } if (dataListIndex < *DataLength) { DataList[dataListIndex].On = TRUE; DataList[dataListIndex].DataIndex = (USHORT)inspect; } dataListIndex++; } } continue; } // // Not a bit field // an array of usages. // for (reportBitIndex = channel->BitOffset; reportBitIndex < (channel->BitOffset + channel->BitLength); reportBitIndex += channel->ReportSize) { inspect = (USHORT) HidP_ExtractData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); if (inspect) { // // NB Wizard Time (tm) // // We know that data indices are assigned consecutively // for every control, and that the array channels // are reversed in the channel array. // // inspect is the index (1 based not zero based) into the // channel array. // if (0 == inspect) { continue; } // // Skip to the last channel that describes this same data // fild; // while (channel->MoreChannels) { channelIndex++; channel++; } inspect += channel->Range.DataIndexMin - 1; if (0 == channel->Range.UsageMin) { inspect++; } if (dataListIndex < *DataLength) { DataList [dataListIndex].On = TRUE; DataList [dataListIndex].DataIndex = (USHORT) inspect; } dataListIndex++; } } continue; } // // Not a button so therefore a value. // for (reportBitIndex = channel->BitOffset, tmpDataIndex = 0; reportBitIndex < (channel->BitOffset + channel->BitLength); reportBitIndex += channel->ReportSize, tmpDataIndex++) { inspect = HidP_ExtractData ( (USHORT) ((reportBitIndex >> 3) + channel->ByteOffset), (USHORT) (reportBitIndex & 7), channel->ReportSize, Report); if (dataListIndex < *DataLength) { ASSERT(tmpDataIndex + channel->Range.DataIndexMin <= channel->Range.DataIndexMax); DataList [dataListIndex].RawValue = inspect; DataList [dataListIndex].DataIndex = channel->Range.DataIndexMin + tmpDataIndex; } dataListIndex++; } } if (*DataLength < dataListIndex) { status = HIDP_STATUS_BUFFER_TOO_SMALL; } *DataLength = dataListIndex; return status; }
mindcloud69/Sakura_mod
src/main/java/cn/mcmod/sakura/potion/PotionExp.java
<reponame>mindcloud69/Sakura_mod<gh_stars>1-10 package cn.mcmod.sakura.potion; import cn.mcmod.sakura.SakuraMain; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.Gui; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraftforge.event.entity.living.LivingExperienceDropEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public class PotionExp extends Potion { protected PotionExp() { super(false, 0xff00ff); setPotionName("sakura.effect.exp_up"); setRegistryName(SakuraMain.MODID, "exp_up"); } @SubscribeEvent public void onDropExp(LivingExperienceDropEvent event) { if(event.getAttackingPlayer()!=null){ EntityPlayer player = event.getAttackingPlayer(); if(player.isPotionActive(this)){ int exp =(event.getOriginalExperience()/2)*player.getActivePotionEffect(this).getAmplifier(); event.setDroppedExperience(event.getOriginalExperience()+exp); } } } @SideOnly(Side.CLIENT) public void renderInventoryEffect(int x, int y, PotionEffect effect, Minecraft mc) { mc.getTextureManager().bindTexture(PotionLoader.res); mc.currentScreen.drawTexturedModalRect(x + 6, y + 7, 0, 0, 18, 18); } @SideOnly(Side.CLIENT) public void renderHUDEffect(int x, int y, PotionEffect effect, Minecraft mc, float alpha) { mc.getTextureManager().bindTexture(PotionLoader.res); Gui.drawModalRectWithCustomSizedTexture(x + 3, y + 3, 0, 0, 18, 18, 256.0F, 256.0F); } }
ricunidad/curso-javascript-001
backend/calculadora.js
<reponame>ricunidad/curso-javascript-001 'use strict' var params= process.argv.slice(2); var numero1 = parseFloat(params[0]); var numero2 = parseFloat(params[1]); var plantilla = ` La suma es: ${numero1 + numero2} La resta es: ${numero1 - numero2} La multiplicasión es: ${numero1 * numero2} La división es: ${numero1 / numero2} `; console.log(plantilla);
shanti-uva/kmaps_engine
app/controllers/admin/phonetic_systems_controller.rb
class Admin::PhoneticSystemsController < AclController resource_controller include KmapsEngine::SimplePropsControllerHelper protected # Only allow a trusted parameter "white list" through. def phonetic_system_params params.require(:phonetic_system).permit(:name, :code, :description) end end
thezeeck/ZUI
src/ZUI/components/Grid/index.js
<filename>src/ZUI/components/Grid/index.js<gh_stars>0 import { Grid } from "./Container"; import { Item } from "./Item"; Grid.Item = Item; export default Grid;
vk9/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/annotator/ScImportExprAnnotator.scala
package org.jetbrains.plugins.scala.lang.psi.annotator import com.intellij.codeInspection.ProblemHighlightType import com.intellij.lang.annotation.{Annotation, AnnotationHolder} import org.jetbrains.plugins.scala.ScalaBundle import org.jetbrains.plugins.scala.lang.psi.api.Annotatable import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr trait ScImportExprAnnotator extends Annotatable { self: ScImportExpr => abstract override def annotate(holder: AnnotationHolder, typeAware: Boolean): Unit = { super.annotate(holder, typeAware) if (qualifier == null) { val annotation: Annotation = holder.createErrorAnnotation(getTextRange, ScalaBundle.message("import.expr.should.be.qualified")) annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR) } } }
yusank/goim
apps/gateway/internal/router/v1/send_msg_router.go
package v1 import ( "github.com/gin-gonic/gin" messagev1 "github.com/yusank/goim/api/message/v1" "github.com/yusank/goim/apps/gateway/internal/service" "github.com/yusank/goim/pkg/mid" "github.com/yusank/goim/pkg/util" ) func handleSendSingleUserMsg(c *gin.Context) { req := new(messagev1.SendMessageReq) if err := c.ShouldBindJSON(req); err != nil { util.ErrorResp(c, err) return } rsp, err := service.GetSendMessageService().SendMessage(mid.GetContext(c), req) if err != nil { util.ErrorResp(c, err) return } util.Success(c, rsp) } func handleSendBroadcastMsg(c *gin.Context) { req := new(messagev1.SendMessageReq) if err := c.ShouldBindJSON(req); err != nil { util.ErrorResp(c, err) return } rsp, err := service.GetSendMessageService().Broadcast(mid.GetContext(c), req) if err != nil { util.ErrorResp(c, err) return } util.Success(c, rsp) }
open-hand/test-manager-service
src/main/java/io/choerodon/test/manager/api/vo/TestFileLoadHistoryWebsocketVO.java
<reponame>open-hand/test-manager-service package io.choerodon.test.manager.api.vo; import io.swagger.annotations.ApiModelProperty; import org.hzero.starter.keyencrypt.core.Encrypt; /** * @author <EMAIL> 2020/9/10 下午8:57 */ public class TestFileLoadHistoryWebsocketVO { @ApiModelProperty(value = "主键ID") @Encrypt private Long id; @ApiModelProperty(value = "状态") private Long status; @ApiModelProperty(value = "进度") private Double rate; @ApiModelProperty(value = "错误消息编码") private String code; private String message; public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Long getStatus() { return status; } public void setStatus(Long status) { this.status = status; } public Double getRate() { return rate; } public void setRate(Double rate) { this.rate = rate; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } }
keirlawson/backpack
packages/bpk-svgs/dist/js/icons/lg/account--permit.js
<filename>packages/bpk-svgs/dist/js/icons/lg/account--permit.js import React from "react"; export default (({ styles = {}, ...props }) => <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" style={{ width: "1.5rem", height: "1.5rem" }} {...props}><path d="M18 2H6a2 2 0 0 0-2 2v18l8-4 8 4V4a2 2 0 0 0-2-2zm-7.515 12.929l-3.707-3.861L8.22 9.683l2.293 2.389 5.278-5.278 1.414 1.414-6.72 6.721z" /></svg>);
sdqali/FreeBuilder
src/test/java/org/inferred/freebuilder/processor/MethodFinderTest.java
<gh_stars>0 /* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.inferred.freebuilder.processor; import static com.google.common.truth.Truth.assertThat; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import org.inferred.freebuilder.processor.Analyser.CannotGenerateCodeException; import org.inferred.freebuilder.processor.util.testing.ModelRule; import org.junit.ClassRule; import org.junit.Test; import java.io.IOException; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.SimpleTypeVisitor6; public class MethodFinderTest { @ClassRule public static ModelRule model = new ModelRule(); abstract static class ClassOne { abstract void method(); abstract void method(int x); abstract int method(double x); } @Test public void testNoInheritanceClass() { assertThat(methodsOn(ClassOne.class)).containsExactly( "void ClassOne::method()", "void ClassOne::method(int)", "int ClassOne::method(double)"); } private interface InterfaceOne { void method(); } @Test public void testNoInheritanceInterface() { assertThat(methodsOn(InterfaceOne.class)).containsExactly("void InterfaceOne::method()"); } private abstract static class SingleInterface implements InterfaceOne { } @Test public void testSingleInterface() { assertThat(methodsOn(SingleInterface.class)).containsExactly("void InterfaceOne::method()"); } private abstract static class SingleSuperclass extends ClassOne { } @Test public void testSingleSuperclassMethodInterface() { assertThat(methodsOn(SingleSuperclass.class)).containsExactly( "void ClassOne::method()", "void ClassOne::method(int)", "int ClassOne::method(double)"); } interface InterfaceTwo extends InterfaceOne { } @Test public void testSimpleInterfaceHierarchy() { assertThat(methodsOn(InterfaceTwo.class)).containsExactly("void InterfaceOne::method()"); } private abstract static class DiamondInheritance extends SingleInterface implements InterfaceTwo { } @Test public void testDiamondInheritance() { assertThat(methodsOn(DiamondInheritance.class)).containsExactly("void InterfaceOne::method()"); } private interface InterfaceThree { void method(); } private interface InterfaceFour { void method(); } private abstract static class MultipleMethodsSameSignature implements InterfaceOne, InterfaceTwo, InterfaceThree, InterfaceFour { } @Test public void testMultipleMethodsSameSignature() { ImmutableList<String> methods = methodsOn(MultipleMethodsSameSignature.class); // When choosing between multiple unrelated interfaces defining the same method, pick any assertThat(methods).containsAnyOf( "void InterfaceOne::method()", "void InterfaceThree::method()", "void InterfaceFour::method()"); assertThat(methods).hasSize(1); } private abstract static class MultipleMethodsSameSignatureWithSuperclass extends ClassOne implements InterfaceOne { } @Test public void testMultipleMethodsSameSignatureWithSuperclass() { // When choosing between InterfaceOne::method and ClassOne::method, pick the concrete type. assertThat(methodsOn(MultipleMethodsSameSignatureWithSuperclass.class)).containsExactly( "void ClassOne::method()", "void ClassOne::method(int)", "int ClassOne::method(double)"); } private interface MultipleMethodsSameSignatureRedeclared extends InterfaceOne, InterfaceTwo, InterfaceThree, InterfaceFour { @Override void method(); } @Test public void testMultipleMethodsSameSignatureRedeclared() { ImmutableList<String> methods = methodsOn(MultipleMethodsSameSignatureRedeclared.class); // When choosing between multiple interfaces defining the same method, pick the most derived // one. assertThat(methods).containsExactly("void MultipleMethodsSameSignatureRedeclared::method()"); } private abstract static class WideMethodsSuperclass { abstract Object doSomething(Integer x) throws IOException; } private static class NarrowMethodSubclass extends WideMethodsSuperclass { @Override Integer doSomething(Integer x) { throw new UnsupportedOperationException(); } } @Test public void testSignatureNarrowing() { assertThat(methodsOn(NarrowMethodSubclass.class)) .containsExactly("Integer NarrowMethodSubclass::doSomething(Integer)"); } private interface Receiver<T> { void accept(T object); } private static class MySink implements Receiver<String> { @Override public void accept(String object) { throw new UnsupportedOperationException(); } } @Test public void testGenericSignatureOverriding() { assertThat(methodsOn(MySink.class)).containsExactly("void MySink::accept(String)"); } // Utility methods /////////////////////////////////////////////////////////////////////////////////////////////// private static ImmutableList<String> methodsOn(Class<?> cls) { try { return toStrings(MethodFinder.methodsOn(model.typeElement(cls), model.elementUtils())); } catch (CannotGenerateCodeException e) { throw new AssertionError(e); } } private static ImmutableList<String> toStrings(Iterable<? extends ExecutableElement> methods) { ImmutableList.Builder<String> resultBuilder = ImmutableList.builder(); for (ExecutableElement method : methods) { resultBuilder.add( STRINGIFY.visit(method.getReturnType()) + " " + method.getEnclosingElement().getSimpleName() + "::" + method.getSimpleName() + "(" + Joiner.on(", ").join(variablesToStrings(method.getParameters())) + ")"); } return resultBuilder.build(); } private static ImmutableList<CharSequence> variablesToStrings( Iterable<? extends VariableElement> variables) { ImmutableList.Builder<CharSequence> resultBuilder = ImmutableList.builder(); for (VariableElement variable : variables) { resultBuilder.add(STRINGIFY.visit(variable.asType())); } return resultBuilder.build(); } private static final SimpleTypeVisitor6<CharSequence, ?> STRINGIFY = new SimpleTypeVisitor6<CharSequence, Void>() { @Override public Name visitDeclared(DeclaredType t, Void p) { return t.asElement().getSimpleName(); } @Override protected String defaultAction(TypeMirror e, Void p) { return e.toString(); } }; }
ghsecuritylab/tomato_egg
release/src/linux/linux/include/asm-ia64/sn/pci/pciio_private.h
<reponame>ghsecuritylab/tomato_egg /* $Id$ * * This file is subject to the terms and conditions of the GNU General Public * License. See the file "COPYING" in the main directory of this archive * for more details. * * Copyright (C) 1992 - 1997, 2000-2003 Silicon Graphics, Inc. All rights reserved. */ #ifndef _ASM_SN_PCI_PCIIO_PRIVATE_H #define _ASM_SN_PCI_PCIIO_PRIVATE_H #include <asm/sn/pci/pciio.h> #include <asm/sn/pci/pci_defs.h> /* * pciio_private.h -- private definitions for pciio * PCI drivers should NOT include this file. */ #ident "sys/PCI/pciio_private: $Revision: 1.13 $" /* * All PCI providers set up PIO using this information. */ struct pciio_piomap_s { unsigned pp_flags; /* PCIIO_PIOMAP flags */ vertex_hdl_t pp_dev; /* associated pci card */ pciio_slot_t pp_slot; /* which slot the card is in */ pciio_space_t pp_space; /* which address space */ iopaddr_t pp_pciaddr; /* starting offset of mapping */ size_t pp_mapsz; /* size of this mapping */ caddr_t pp_kvaddr; /* kernel virtual address to use */ }; /* * All PCI providers set up DMA using this information. */ struct pciio_dmamap_s { unsigned pd_flags; /* PCIIO_DMAMAP flags */ vertex_hdl_t pd_dev; /* associated pci card */ pciio_slot_t pd_slot; /* which slot the card is in */ }; /* * All PCI providers set up interrupts using this information. */ struct pciio_intr_s { unsigned pi_flags; /* PCIIO_INTR flags */ vertex_hdl_t pi_dev; /* associated pci card */ device_desc_t pi_dev_desc; /* override device descriptor */ pciio_intr_line_t pi_lines; /* which interrupt line(s) */ intr_func_t pi_func; /* handler function (when connected) */ intr_arg_t pi_arg; /* handler parameter (when connected) */ cpuid_t pi_mustruncpu; /* Where we must run. */ int pi_irq; /* IRQ assigned */ int pi_cpu; /* cpu assigned */ }; /* PCIIO_INTR (pi_flags) flags */ #define PCIIO_INTR_CONNECTED 1 /* interrupt handler/thread has been connected */ #define PCIIO_INTR_NOTHREAD 2 /* interrupt handler wants to be called at interrupt level */ /* * Some PCI provider implementations keep track of PCI window Base Address * Register (BAR) address range assignment via the rmalloc()/rmfree() arena * management routines. These implementations use the following data * structure for each allocation address space (e.g. memory, I/O, small * window, etc.). * * The ``page size'' encodes the minimum allocation unit and must be a power * of 2. The main use of this allocation ``page size'' is to control the * number of free address ranges that the mapping allocation software will * need to track. Smaller values will allow more efficient use of the address * ranges but will result in much larger allocation map structures ... For * instance, if we want to manage allocations for a 256MB address range, * choosing a 1MB allocation page size will result in up to 1MB being wasted * for allocation requests smaller than 1MB. The worst case allocation * pattern for the allocation software to track would be a pattern of 1MB * allocated, 1MB free. This results in the need to track up to 128 free * ranges. */ struct pciio_win_map_s { struct map *wm_map; /* window address map */ int wm_page_size; /* allocation ``page size'' */ }; /* * Opaque structure used to keep track of window allocation information. */ struct pciio_win_alloc_s { struct resource *wa_resource; /* window map allocation resource */ unsigned long wa_base; /* allocation starting page number */ size_t wa_pages; /* number of pages in allocation */ }; /* * Each PCI Card has one of these. */ struct pciio_info_s { char *c_fingerprint; vertex_hdl_t c_vertex; /* back pointer to vertex */ pciio_bus_t c_bus; /* which bus the card is in */ pciio_slot_t c_slot; /* which slot the card is in */ pciio_function_t c_func; /* which func (on multi-func cards) */ pciio_vendor_id_t c_vendor; /* PCI card "vendor" code */ pciio_device_id_t c_device; /* PCI card "device" code */ vertex_hdl_t c_master; /* PCI bus provider */ arbitrary_info_t c_mfast; /* cached fastinfo from c_master */ pciio_provider_t *c_pops; /* cached provider from c_master */ error_handler_f *c_efunc; /* error handling function */ error_handler_arg_t c_einfo; /* first parameter for efunc */ struct pciio_win_info_s { /* state of BASE regs */ pciio_space_t w_space; iopaddr_t w_base; size_t w_size; int w_devio_index; /* DevIO[] register used to access this window */ struct pciio_win_alloc_s w_win_alloc; /* window allocation cookie */ } c_window[PCI_CFG_BASE_ADDRS + 1]; #define c_rwindow c_window[PCI_CFG_BASE_ADDRS] /* EXPANSION ROM window */ #define c_rbase c_rwindow.w_base /* EXPANSION ROM base addr */ #define c_rsize c_rwindow.w_size /* EXPANSION ROM size (bytes) */ pciio_piospace_t c_piospace; /* additional I/O spaces allocated */ }; extern char pciio_info_fingerprint[]; #endif /* _ASM_SN_PCI_PCIIO_PRIVATE_H */
nus-ncl/common-interface
adapter-openstack/src/main/java/sg/ncl/adapter/openstack/exception/ServerNotFoundException.java
<reponame>nus-ncl/common-interface package sg.ncl.adapter.openstack.exception; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.ResponseStatus; import sg.ncl.common.exception.base.NotFoundException; @ResponseStatus(code = HttpStatus.NOT_FOUND, reason = "Server Lists not found") public class ServerNotFoundException extends NotFoundException{ public ServerNotFoundException(final String message) { super(message); } }
appirits/comable
backend/app/controllers/comable/admin/stocks_controller.rb
<filename>backend/app/controllers/comable/admin/stocks_controller.rb require_dependency 'comable/admin/application_controller' module Comable module Admin class StocksController < Comable::Admin::ApplicationController load_and_authorize_resource :stock, class: Comable::Stock.name, except: [:new, :create] load_and_authorize_resource :product, class: Comable::Product.name, only: [:new, :create] load_and_authorize_resource :stock, class: Comable::Stock.name, through: :product, only: [:new, :create] def index @q = @stocks.ransack(params[:q]) @stocks = @q.result.includes(variant: [:product, :option_values]).page(params[:page]).accessible_by(current_ability).by_newest end def show render :edit end def new end def create # TODO: Remove @stock.build_variant(product: @product) unless @stock.variant if @stock.save redirect_to comable.admin_stock_path(@stock), notice: Comable.t('successful') else flash.now[:alert] = Comable.t('failure') render :new end end def edit end def update if @stock.update_attributes(stock_params) redirect_to comable.admin_stock_path(@stock), notice: Comable.t('successful') else flash.now[:alert] = Comable.t('failure') render :edit end end def destroy if @stock.destroy redirect_to comable.admin_stocks_path, notice: Comable.t('successful') else flash.now[:alert] = Comable.t('failure') render :edit end end def export q = @stocks.ransack(params[:q]) stocks = q.result.includes(variant: :product).accessible_by(current_ability) respond_to_export_with stocks end def import ActiveRecord::Base.transaction do Comable::Stock.import_from(params[:file]) end redirect_to comable.admin_stocks_path, notice: Comable.t('successful') rescue Comable::Importable::Exception => e redirect_to comable.admin_stocks_path, alert: e.message end private def stock_params params.require(:stock).permit( :code, :quantity, :sku_h_choice_name, :sku_v_choice_name, :stock_location_id ) end end end end
DevPlant/spring-beginner-training-v2
trail/8-adding-value/src/main/java/com/devplant/basics/security/service/EmailSendService.java
package com.devplant.basics.security.service; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; import javax.mail.internet.InternetAddress; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import com.google.common.collect.Lists; import it.ozimov.springboot.mail.model.Email; import it.ozimov.springboot.mail.model.defaultimpl.DefaultEmail; import it.ozimov.springboot.mail.service.EmailService; import it.ozimov.springboot.mail.service.exception.CannotSendEmailException; import lombok.Builder; import lombok.Data; import lombok.extern.slf4j.Slf4j; @Slf4j @Service public class EmailSendService { @Value("${spring.mail.username:<EMAIL>}") private String severEmailAddress; @Autowired private EmailService emailService; @Data @Builder public static class EmailModel { private String subject; private String username; private String message; } public void sendEmail(EmailModel emailModel) { try { Email email = DefaultEmail.builder().from(new InternetAddress(severEmailAddress, "DevPlant")).to(Lists .newArrayList(new InternetAddress(emailModel.getUsername(), emailModel.getUsername()))) .subject("Activate your DevPlant Library Account").body("").encoding("UTF-8").build(); final Map<String, Object> modelObject = new HashMap<>(); modelObject.put("username", emailModel.getUsername()); modelObject.put("message", emailModel.getMessage()); emailService.send(email, "notify-user.ftl", modelObject); } catch (UnsupportedEncodingException e) { log.error("Something is miss-configured, as this cannot happen", e); } catch (CannotSendEmailException e) { log.error("Cloud not send activation-email", e); } } }
caixie-ltd/cx-website
src/components/indexBlocks/timelineBlock.js
import { Component } from "react" import React from "react" import cls from './timeline.module.scss' export default class TimelineBlock extends Component { render() { return ( <div className={cls.timeline}> <div className={cls.items}> <div className={cls.timeline__item}> <h6 className={cls.timeline__heading}>2015</h6> <p className={cls.timeline__title}>北京采撷科技有限公司成立</p> <p className={cls.timeline__subtitle}> 为传统教育机构提供在线教育平台系统服务 </p> </div> <div className={cls.timeline__item}> <h6 className={cls.timeline__heading}>2017</h6> <p className={cls.timeline__title}>稳步发展</p> <p className={cls.timeline__subtitle}> 深化定位,更专注于产品技术创新服务 </p> </div> <div className={cls.timeline__item}> <h6 className={cls.timeline__heading}>2018</h6> <p className={cls.timeline__title}>厚积薄发</p> <p className={cls.timeline__subtitle}> 自主企业内容云 Headless 系统 </p> </div> </div> </div> ) } }
hpjmlgy/fake_weixing
src/module/grzx/Grzx.js
<filename>src/module/grzx/Grzx.js import React from 'react'; import Info_card from './Info_card'; import Edit_mobile from './Edit_mobile' var $ = require('jquery'); var injectTapEventPlugin = require("react-tap-event-plugin"); injectTapEventPlugin(); var Grzx = React.createClass({ getInitialState: function() { return { null } }, componentDidMount: function(e) { $('.edit_mobile_wrapper').css("display", "none") }, render: function() { var o = this; return ( <section className="grzx_wrapper"> <Info_card class_name="nickName" title="设置昵称" data={{name:"王小明"}}/> <Info_card class_name="edit_phone" title="更改登陆手机" data={{name:"15811360584"}}/> <Edit_mobile/> </section> ) } }); export default Grzx;
kobelb/elasticsearch
core/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java
<reponame>kobelb/elasticsearch /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.support.replication; import org.elasticsearch.test.ESTestCase; import java.util.Locale; import static org.hamcrest.CoreMatchers.equalTo; public class ReplicationResponseTests extends ESTestCase { public void testShardInfoToString() { final int total = 5; final int successful = randomIntBetween(1, total); final ReplicationResponse.ShardInfo shardInfo = new ReplicationResponse.ShardInfo(total, successful); assertThat( shardInfo.toString(), equalTo(String.format(Locale.ROOT, "ShardInfo{total=5, successful=%d, failures=[]}", successful))); } }
amsnyder/resource-watch
components/datasets/table/td/code/component.js
<filename>components/datasets/table/td/code/component.js<gh_stars>1-10 import React, { PureComponent } from 'react'; import PropTypes from 'prop-types'; class CodeTD extends PureComponent { static propTypes = { row: PropTypes.object.isRequired } render() { const { row: { metadata } } = this.props; const code = metadata && (metadata[0] || {}).info ? metadata[0].info.rwId : ''; return ( <td> <span>{code}</span> </td> ); } } export default CodeTD;
MaxSchambach/colour
colour/constants/codata.py
# -*- coding: utf-8 -*- """ Fundamental Physical Constants ============================== Defines various constants from recommended values by the Committee on Data for Science and Technology (CODATA). """ from __future__ import division, unicode_literals from colour.utilities.documentation import DocstringFloat __author__ = '<NAME>' __copyright__ = 'Copyright (C) 2013-2019 - Colour Developers' __license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause' __maintainer__ = 'Colour Developers' __email__ = '<EMAIL>' __status__ = 'Production' __all__ = [ 'AVOGADRO_CONSTANT', 'BOLTZMANN_CONSTANT', 'LIGHT_SPEED', 'PLANCK_CONSTANT' ] AVOGADRO_CONSTANT = DocstringFloat(6.02214179e23) AVOGADRO_CONSTANT.__doc__ = """ Avogadro constant. AVOGADRO_CONSTANT : numeric """ BOLTZMANN_CONSTANT = DocstringFloat(1.38065e-23) BOLTZMANN_CONSTANT.__doc__ = """ Boltzmann constant. BOLTZMANN_CONSTANT : numeric """ LIGHT_SPEED = DocstringFloat(299792458) LIGHT_SPEED.__doc__ = """ Speed of light in vacuum. LIGHT_SPEED : numeric """ PLANCK_CONSTANT = DocstringFloat(6.62607e-34) PLANCK_CONSTANT.__doc__ = """ Planck constant. PLANCK_CONSTANT : numeric """
williammustaffa/dragon-book
src/store/api/database/index.js
import localStorageDB from "localstoragedb"; const database = new localStorageDB("DragonBook", localStorage); // Create mocked data if its not initialized if (database.isNew()) { database.createTableWithData("profiles", require("./profiles.json")); database.commit(); } export default database;
miguelbuca/canopus-api
node_modules/underscore/amd/pluck.js
define(['./map', './property'], function (map, property) { // Convenience version of a common use case of `_.map`: fetching a property. function pluck(obj, key) { return map(obj, property(key)); } return pluck; });
Elojah/game_02
pkg/room/room.pb.go
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: github.com/elojah/game_02/pkg/room/room.proto package room import ( bytes "bytes" fmt "fmt" github_com_elojah_game_02_pkg_ulid "github.com/elojah/game_02/pkg/ulid" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" io "io" math "math" math_bits "math/bits" reflect "reflect" strings "strings" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type R struct { ID github_com_elojah_game_02_pkg_ulid.ID `protobuf:"bytes,1,opt,name=ID,proto3,customtype=github.com/elojah/game_02/pkg/ulid.ID" json:"ID"` Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"` Password []byte `protobuf:"bytes,3,opt,name=Password,proto3" json:"Password,omitempty"` Owner github_com_elojah_game_02_pkg_ulid.ID `protobuf:"bytes,4,opt,name=Owner,proto3,customtype=github.com/elojah/game_02/pkg/ulid.ID" json:"Owner"` WorldID github_com_elojah_game_02_pkg_ulid.ID `protobuf:"bytes,5,opt,name=WorldID,proto3,customtype=github.com/elojah/game_02/pkg/ulid.ID" json:"WorldID"` } func (m *R) Reset() { *m = R{} } func (*R) ProtoMessage() {} func (*R) Descriptor() ([]byte, []int) { return fileDescriptor_6253672ef60b3e0d, []int{0} } func (m *R) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *R) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_R.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *R) XXX_Merge(src proto.Message) { xxx_messageInfo_R.Merge(m, src) } func (m *R) XXX_Size() int { return m.Size() } func (m *R) XXX_DiscardUnknown() { xxx_messageInfo_R.DiscardUnknown(m) } var xxx_messageInfo_R proto.InternalMessageInfo func (m *R) GetName() string { if m != nil { return m.Name } return "" } func (m *R) GetPassword() []byte { if m != nil { return m.Password } return nil } func init() { proto.RegisterType((*R)(nil), "room.R") } func init() { proto.RegisterFile("github.com/elojah/game_02/pkg/room/room.proto", fileDescriptor_6253672ef60b3e0d) } var fileDescriptor_6253672ef60b3e0d = []byte{ // 283 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4d, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcd, 0xc9, 0xcf, 0x4a, 0xcc, 0xd0, 0x4f, 0x4f, 0xcc, 0x4d, 0x8d, 0x37, 0x30, 0xd2, 0x2f, 0xc8, 0x4e, 0xd7, 0x2f, 0xca, 0xcf, 0xcf, 0x05, 0x13, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x2c, 0x20, 0xb6, 0x14, 0xb2, 0xa6, 0xf4, 0xfc, 0xf4, 0x7c, 0x7d, 0xb0, 0x64, 0x52, 0x69, 0x1a, 0x98, 0x07, 0xe6, 0x80, 0x59, 0x10, 0x4d, 0x4a, 0x6d, 0x4c, 0x5c, 0x8c, 0x41, 0x42, 0xb6, 0x5c, 0x4c, 0x9e, 0x2e, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x3c, 0x4e, 0xba, 0x27, 0xee, 0xc9, 0x33, 0xdc, 0xba, 0x27, 0xaf, 0x8a, 0xdf, 0xf6, 0xd2, 0x9c, 0xcc, 0x14, 0x3d, 0x4f, 0x97, 0x20, 0x26, 0x4f, 0x17, 0x21, 0x21, 0x2e, 0x16, 0xbf, 0xc4, 0xdc, 0x54, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x30, 0x5b, 0x48, 0x81, 0x8b, 0x23, 0x20, 0xb1, 0xb8, 0xb8, 0x3c, 0xbf, 0x28, 0x45, 0x82, 0x19, 0x6c, 0x30, 0xcb, 0x89, 0x7b, 0xf2, 0x8c, 0x41, 0x70, 0x51, 0x21, 0x67, 0x2e, 0x56, 0xff, 0xf2, 0xbc, 0xd4, 0x22, 0x09, 0x16, 0x72, 0xec, 0x85, 0xe8, 0x15, 0x72, 0xe7, 0x62, 0x0f, 0xcf, 0x2f, 0xca, 0x49, 0xf1, 0x74, 0x91, 0x60, 0x25, 0xc7, 0x18, 0x98, 0x6e, 0x27, 0x87, 0x0b, 0x0f, 0xe5, 0x18, 0x6e, 0x3c, 0x94, 0x63, 0xf8, 0xf0, 0x50, 0x8e, 0xf1, 0xc7, 0x43, 0x39, 0xc6, 0x86, 0x47, 0x72, 0x8c, 0x2b, 0x1e, 0xc9, 0x31, 0xee, 0x78, 0x24, 0xc7, 0x78, 0xe0, 0x91, 0x1c, 0xe3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0xf8, 0xe2, 0x91, 0x1c, 0xc3, 0x87, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x12, 0x1b, 0x38, 0x44, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x5c, 0xcd, 0xf7, 0xb7, 0x01, 0x00, 0x00, } func (this *R) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*R) if !ok { that2, ok := that.(R) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if !this.ID.Equal(that1.ID) { return false } if this.Name != that1.Name { return false } if !bytes.Equal(this.Password, that1.Password) { return false } if !this.Owner.Equal(that1.Owner) { return false } if !this.WorldID.Equal(that1.WorldID) { return false } return true } func (this *R) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 9) s = append(s, "&room.R{") s = append(s, "ID: "+fmt.Sprintf("%#v", this.ID)+",\n") s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n") s = append(s, "Password: "+fmt.Sprintf("%#v", this.Password)+",\n") s = append(s, "Owner: "+fmt.Sprintf("%#v", this.Owner)+",\n") s = append(s, "WorldID: "+fmt.Sprintf("%#v", this.WorldID)+",\n") s = append(s, "}") return strings.Join(s, "") } func valueToGoStringRoom(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { return "nil" } pv := reflect.Indirect(rv).Interface() return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv) } func (m *R) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *R) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *R) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l { size := m.WorldID.Size() i -= size if _, err := m.WorldID.MarshalTo(dAtA[i:]); err != nil { return 0, err } i = encodeVarintRoom(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x2a { size := m.Owner.Size() i -= size if _, err := m.Owner.MarshalTo(dAtA[i:]); err != nil { return 0, err } i = encodeVarintRoom(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 if len(m.Password) > 0 { i -= len(m.Password) copy(dAtA[i:], m.Password) i = encodeVarintRoom(dAtA, i, uint64(len(m.Password))) i-- dAtA[i] = 0x1a } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintRoom(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0x12 } { size := m.ID.Size() i -= size if _, err := m.ID.MarshalTo(dAtA[i:]); err != nil { return 0, err } i = encodeVarintRoom(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa return len(dAtA) - i, nil } func encodeVarintRoom(dAtA []byte, offset int, v uint64) int { offset -= sovRoom(v) base := offset for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return base } func NewPopulatedR(r randyRoom, easy bool) *R { this := &R{} v1 := github_com_elojah_game_02_pkg_ulid.NewPopulatedID(r) this.ID = *v1 this.Name = string(randStringRoom(r)) v2 := r.Intn(100) this.Password = make([]byte, v2) for i := 0; i < v2; i++ { this.Password[i] = byte(r.Intn(256)) } v3 := github_com_elojah_game_02_pkg_ulid.NewPopulatedID(r) this.Owner = *v3 v4 := github_com_elojah_game_02_pkg_ulid.NewPopulatedID(r) this.WorldID = *v4 if !easy && r.Intn(10) != 0 { } return this } type randyRoom interface { Float32() float32 Float64() float64 Int63() int64 Int31() int32 Uint32() uint32 Intn(n int) int } func randUTF8RuneRoom(r randyRoom) rune { ru := r.Intn(62) if ru < 10 { return rune(ru + 48) } else if ru < 36 { return rune(ru + 55) } return rune(ru + 61) } func randStringRoom(r randyRoom) string { v5 := r.Intn(100) tmps := make([]rune, v5) for i := 0; i < v5; i++ { tmps[i] = randUTF8RuneRoom(r) } return string(tmps) } func randUnrecognizedRoom(r randyRoom, maxFieldNumber int) (dAtA []byte) { l := r.Intn(5) for i := 0; i < l; i++ { wire := r.Intn(4) if wire == 3 { wire = 5 } fieldNumber := maxFieldNumber + r.Intn(100) dAtA = randFieldRoom(dAtA, r, fieldNumber, wire) } return dAtA } func randFieldRoom(dAtA []byte, r randyRoom, fieldNumber int, wire int) []byte { key := uint32(fieldNumber)<<3 | uint32(wire) switch wire { case 0: dAtA = encodeVarintPopulateRoom(dAtA, uint64(key)) v6 := r.Int63() if r.Intn(2) == 0 { v6 *= -1 } dAtA = encodeVarintPopulateRoom(dAtA, uint64(v6)) case 1: dAtA = encodeVarintPopulateRoom(dAtA, uint64(key)) dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256))) case 2: dAtA = encodeVarintPopulateRoom(dAtA, uint64(key)) ll := r.Intn(100) dAtA = encodeVarintPopulateRoom(dAtA, uint64(ll)) for j := 0; j < ll; j++ { dAtA = append(dAtA, byte(r.Intn(256))) } default: dAtA = encodeVarintPopulateRoom(dAtA, uint64(key)) dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256))) } return dAtA } func encodeVarintPopulateRoom(dAtA []byte, v uint64) []byte { for v >= 1<<7 { dAtA = append(dAtA, uint8(uint64(v)&0x7f|0x80)) v >>= 7 } dAtA = append(dAtA, uint8(v)) return dAtA } func (m *R) Size() (n int) { if m == nil { return 0 } var l int _ = l l = m.ID.Size() n += 1 + l + sovRoom(uint64(l)) l = len(m.Name) if l > 0 { n += 1 + l + sovRoom(uint64(l)) } l = len(m.Password) if l > 0 { n += 1 + l + sovRoom(uint64(l)) } l = m.Owner.Size() n += 1 + l + sovRoom(uint64(l)) l = m.WorldID.Size() n += 1 + l + sovRoom(uint64(l)) return n } func sovRoom(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozRoom(x uint64) (n int) { return sovRoom(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (this *R) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&R{`, `ID:` + fmt.Sprintf("%v", this.ID) + `,`, `Name:` + fmt.Sprintf("%v", this.Name) + `,`, `Password:` + fmt.Sprintf("%v", this.Password) + `,`, `Owner:` + fmt.Sprintf("%v", this.Owner) + `,`, `WorldID:` + fmt.Sprintf("%v", this.WorldID) + `,`, `}`, }, "") return s } func valueToStringRoom(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { return "nil" } pv := reflect.Indirect(rv).Interface() return fmt.Sprintf("*%v", pv) } func (m *R) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowRoom } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: R: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: R: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowRoom } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthRoom } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthRoom } if postIndex > l { return io.ErrUnexpectedEOF } if err := m.ID.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowRoom } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthRoom } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthRoom } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Password", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowRoom } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthRoom } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthRoom } if postIndex > l { return io.ErrUnexpectedEOF } m.Password = append(m.Password[:0], dAtA[iNdEx:postIndex]...) if m.Password == nil { m.Password = []byte{} } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Owner", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowRoom } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthRoom } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthRoom } if postIndex > l { return io.ErrUnexpectedEOF } if err := m.Owner.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field WorldID", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowRoom } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthRoom } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthRoom } if postIndex > l { return io.ErrUnexpectedEOF } if err := m.WorldID.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipRoom(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthRoom } if (iNdEx + skippy) < 0 { return ErrInvalidLengthRoom } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipRoom(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 depth := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowRoom } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowRoom } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } case 1: iNdEx += 8 case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowRoom } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if length < 0 { return 0, ErrInvalidLengthRoom } iNdEx += length case 3: depth++ case 4: if depth == 0 { return 0, ErrUnexpectedEndOfGroupRoom } depth-- case 5: iNdEx += 4 default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } if iNdEx < 0 { return 0, ErrInvalidLengthRoom } if depth == 0 { return iNdEx, nil } } return 0, io.ErrUnexpectedEOF } var ( ErrInvalidLengthRoom = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowRoom = fmt.Errorf("proto: integer overflow") ErrUnexpectedEndOfGroupRoom = fmt.Errorf("proto: unexpected end of group") )
solosTec/node
nodes/master/src/session.h
<filename>nodes/master/src/session.h /* * The MIT License (MIT) * * Copyright (c) 2018 <NAME> * */ #ifndef NODE_MASTER_SESSION_H #define NODE_MASTER_SESSION_H #include "client.h" #include "cluster.h" #include <cyng/async/mux.h> #include <cyng/log.h> #include <cyng/store/db.h> #include <cyng/vm/controller.h> #include <cyng/io/parser/parser.h> #include <cyng/io/serializer/serialize.hpp> #include <boost/process/child.hpp> namespace node { class connection; class watchdog; class session { friend class connection; friend class client; friend class cluster; friend class watchdog; public: session(cyng::async::mux& mux , cyng::logging::log_ptr logger , boost::uuids::uuid mtag , cyng::store::db& , std::string const& account , std::string const& pwd , boost::uuids::uuid stag , std::chrono::seconds monitor , std::atomic<std::uint64_t>& global_configuration , boost::filesystem::path); session(session const&) = delete; session& operator=(session const&) = delete; /** * @return session specific hash based in internal tag */ std::size_t hash() const noexcept; /** * Fast session shutdown. No cleanup. * Session watchdog calls this method during service * shutdown. * * @param obj reference object of this session to keep session alive */ void stop(cyng::object obj); private: void bus_req_login(cyng::context& ctx); void bus_req_login_impl(cyng::context& ctx , cyng::version , std::string const& , std::string const& , boost::uuids::uuid // [3] session tag , std::string const& // [4] class , std::chrono::minutes // [5] delta , std::chrono::system_clock::time_point // [6] timestamp , bool // [7] autologin , std::uint32_t // [8] group , boost::asio::ip::tcp::endpoint // [9] remote ep , std::string // [10] platform , boost::process::pid_t); void bus_req_subscribe(cyng::context& ctx); void bus_req_unsubscribe(cyng::context& ctx); void bus_start_watchdog(cyng::context& ctx); void res_watchdog(cyng::context& ctx); void bus_req_stop_client_impl(cyng::context& ctx); void cleanup(cyng::context& ctx); void bus_insert_msg(cyng::context& ctx); void bus_req_push_data(cyng::context& ctx); void bus_insert_lora_uplink(cyng::context& ctx); cyng::vector_t reply(std::chrono::system_clock::time_point, bool); void sig_ins(cyng::store::table const* , cyng::table::key_type const& , cyng::table::data_type const& , std::uint64_t , boost::uuids::uuid); void sig_del(cyng::store::table const*, cyng::table::key_type const&, boost::uuids::uuid); void sig_clr(cyng::store::table const*, boost::uuids::uuid); void sig_mod(cyng::store::table const* , cyng::table::key_type const& , cyng::attr_t const& , std::uint64_t , boost::uuids::uuid); void stop_cb(cyng::vm&, cyng::object); private: cyng::async::mux& mux_; cyng::logging::log_ptr logger_; boost::uuids::uuid mtag_; // master tag cyng::store::db& db_; cyng::controller vm_; /** * Parser for binary cyng data stream (from cluster members) */ cyng::parser parser_; const std::string account_; const std::string pwd_; const std::chrono::seconds cluster_monitor_; /** * cluster bus sequence */ std::uint64_t seq_; /** * separate implementation of client logic */ client client_; /** * separate implementation of cluster logic */ cluster cluster_; /** * table subscriptions */ cyng::store::subscriptions_t subscriptions_; /** * watchdog task id */ std::size_t tsk_watchdog_; /** * group id */ std::uint32_t group_; /** * Cluster tag of this session. * This tag is unique for each node. */ boost::uuids::uuid cluster_tag_; }; cyng::object make_session(cyng::async::mux& mux , cyng::logging::log_ptr logger , boost::uuids::uuid mtag , cyng::store::db& , std::string const& account , std::string const& pwd , boost::uuids::uuid stag , std::chrono::seconds monitor // cluster watchdog , std::atomic<std::uint64_t>& global_configuration , boost::filesystem::path); } #include <cyng/intrinsics/traits.hpp> namespace cyng { namespace traits { template <> struct type_tag<node::session> { using type = node::session; using tag = std::integral_constant<std::size_t, PREDEF_SESSION>; #if defined(CYNG_LEGACY_MODE_ON) const static char name[]; #else constexpr static char name[] = "session"; #endif }; template <> struct reverse_type < PREDEF_SESSION > { using type = node::session; }; } } #include <functional> #include <boost/functional/hash.hpp> namespace std { template<> struct hash<node::session> { inline size_t operator()(node::session const& s) const noexcept { return s.hash(); } }; template<> struct equal_to<node::session> { using result_type = bool; using first_argument_type = node::session; using second_argument_type = node::session; inline bool operator()(node::session const& s1, node::session const& s2) const noexcept { return s1.hash() == s2.hash(); } }; template<> struct less<node::session> { using result_type = bool; using first_argument_type = node::session; using second_argument_type = node::session; inline bool operator()(node::session const& s1, node::session const& s2) const noexcept { return s1.hash() < s2.hash(); } }; } #endif
kaabimg/EvLibrary
src/ezverif/data/variation/EvFilter.h
<reponame>kaabimg/EvLibrary #ifndef CORRELATION_H_ #define CORRELATION_H_ #include "../../../application/core/base/EvObject.h" #include "EvParameter.h" class EvFilter: public EvObject { Q_OBJECT EV_OBJECT(Variation,Filter,1,0) EV_TYPE_LABEL("Filter") EV_CHILDREN_INTERFACE_1(Parameter) public: Q_INVOKABLE EvFilter(QObject * parent=0); public Q_SLOTS: Q_SIGNALS: private: }; #endif /* CORRELATION_H_ */
agiletrailblazers/Website-GraduateSchool
router/routes/news-route.js
var express = require('express'); var contentful = require('../../API/contentful.js'); var async = require('async'); var prune = require('underscore.string/prune'); var striptags = require('striptags'); var router = express.Router(); var logger = require('../../logger'); var common = require("../../helpers/common.js"); var marked = require('marked'); var config = require('konphyg')(__dirname + '/../../config'); router.get('/news', function(req, res, next) { contentful.getNewsRecent(function(response, error) { if (error) { logger.error('Could not retrieve news from Contentful. Redirecting to error page', error); common.redirectToError(res); } else { res.render('news/recent_entries', { pageSearchPriority: convertPageSearchPriorityToString(config("pageSearchPriority").news), posts: response.items, striptags: striptags, prune: prune, title: 'News' }); } }); }); router.get('/news/:news_slug', function(req, res, next) { var slug = req.params.news_slug; contentful.getNewsDetail(function(response, error) { if (error) { if (response.statusCode == 404) { //expected 404, do not log error logger.warn('No results for news slug ' + slug + ' from Contentful. Redirecting to page not found'); res.redirect('/pagenotfound') } else { logger.error('Error retrieving news slug from Contentful. Redirect to error page', error); common.redirectToError(res); } } else { function renderNews(index, featureImageURL) { res.render('news/news_details', { pageSearchPriority: convertPageSearchPriorityToString(config("pageSearchPriority").news), title: response.items[index].fields.title, body: response.items[index].fields.body.replace(/<br\/?>/gi, " "), featureImage: response.items[index].fields.featuredImage, featureImageURL: featureImageURL, tags: response.items[index].fields.tags, category: response.items[index].fields.category, author: response.items[index].fields.author, date: response.items[index].fields.date, markdown: marked }); } switch (response.items.length) { case 0: logger.warn('No results for news slug ' + slug + ' from Contentful. Redirecting to page not found'); res.redirect('/pagenotfound'); break; case 1: featureImageURL = ""; if ((response.includes != null) && (null != response.includes.Asset) && (response.includes.Asset.length > 0) && (null != response.includes.Asset[0].fields) && (null != response.includes.Asset[0].fields.file) && (null != response.includes.Asset[0].fields.file)) { featureImageURL = response.includes.Asset[0].fields.file.url; } renderNews(0, featureImageURL); break; case (response.items.length > 1): for (var i = 0; i < response.items.length; i++) { featureImageURL = ""; var newSlug = response.items[i].slug; if (newSlug === slug) { if ((response.includes != null) && (null != response.includes.Asset) && (response.includes.Asset.length > 0) && (null != response.includes.Asset[index].fields) && (null != response.includes.Asset[index].fields.file) && (null != response.includes.Asset[index].fields.file)) { featureImageURL = response.includes.Asset[index].fields.file.url; } renderNews(i, featureImageURL); break; } else { logger.warn('No results for news slug ' + slug + ' from Contentful. Redirecting to page not found'); res.redirect('/pagenotfound'); break; } } case null: // The response contained no items. This may be deprecated since there is now an error check above logger.error('Error retrieving news slug ' + slug + ' from Contentful. Redirecting to error page'); common.redirectToError(res); break; } } }, slug) }); module.exports = router;
mixandmatch/titanium
Gruntfile.js
//based on http://culttt.com/2013/11/18/setting-sass-grunt/ 'use strict'; /** * Grunt Module */ module.exports = function (grunt) { grunt.initConfig({ /** * Get package meta data */ pkg: grunt.file.readJSON('package.json') , /** * Set project object */ project: { app: 'app' , assets: '<%= project.app %>/assets' , css: ['<%= project.assets %>/default.scss'] } , /** * Project banner */ tag: { banner: '/*!\n' + ' * <%= pkg.name %>\n' + ' * <%= pkg.title %>\n' + ' * <%= pkg.url %>\n' + ' * @author <%= pkg.author %>\n' + ' * @version <%= pkg.version %>\n' + ' * Copyright <%= pkg.copyright %>. <%= pkg.license %> licensed.\n' + ' */\n' } , /** * Sass */ sass: { dev: { options: { style: 'expanded' , banner: '<%= tag.banner %>' , compass: true } , files: { '<%= project.assets %>/default.css': '<%= project.css %>' } } } , /** * Watch */ watch: { sass: { files: '<%= project.assets %>{,*/}*.{scss,sass}' , tasks: ['sass:dev'] } } }); /** * Load Grunt plugins */ require('matchdep').filterDev('grunt-*').forEach(grunt.loadNpmTasks); /** * Default task * Run `grunt` on the command line */ grunt.registerTask('default' , ['sass:dev' , 'watch']); };
snxamdf/florist
web-core/src/main/java/com/sxm/core/constants/SYMBOL.java
<reponame>snxamdf/florist /* * * * */ package com.sxm.core.constants; /** * 符号全局常量. * * @author sxm * @version 2014-09-18 * @---------------------------------------------------------------------------------------- * @updated 修改描述. * @updated by sxm * @updated at 2014-09-18 */ public class SYMBOL { public final static String COMMA = ","; // 逗号 public final static String DOT = "."; // 点 public final static String SLANT = "/"; // 斜杠 public final static String SLANT2 = " / "; // 斜杠 public final static String BACK_SLANT = "\\"; // 反斜杠 public final static String UNDERLINE = "_"; // 下划线 public final static String SEMICOLON = ";"; // 分号 public final static String EQUAL = "="; // 等于号 public final static String PLUS = "+"; // 加号 public final static String MINUS = "-"; // 减号 public final static String COLON = ":"; // 冒号 public final static String ASTERISK = "*"; // 星号 public final static String WELL = "#"; // 井号 public final static String AT = "@"; // at符号 public final static String AND = "&"; // and符号 public final static String PERCENT = "%"; // 百分号 public final static String DPHK = "、"; // 顿号 }
subratamal/cautious-umbrella
src/actions/notification.js
import generateNotificationProps from '../utils/notificationPropsGenerator'; import { getStreamNotificationFeedToken } from '../webappApi/api'; import { NOTIFICATION_SUCCESS, NOTIFICATION_FEEDTOKEN, NOTIFICATION_ERROR, SHOW_MORE_NOTIFICATION_LOADER, SHOW_MORE_NOTIFICATION_BUTTON, } from '../constants/actions'; import client from '../getStreamApi'; import getStreamConfig from '../../config/getStream'; let updateCount = getStreamConfig.initialLimit; /*eslint-disable */ export function getAllNotification(id, count) { /*eslint-enable */ return (dispatch) => { const requestObject = { accountId: id, }; let limit = getStreamConfig.initialLimit; if (count !== undefined) { dispatch({ type: SHOW_MORE_NOTIFICATION_LOADER, status: true, }); limit += limit * count; updateCount = limit; } // creating a feed token server side getStreamNotificationFeedToken(requestObject).then((res) => { if (res.data) { const token = res.data; dispatch({ type: NOTIFICATION_FEEDTOKEN, token, }); const userFeeds = client.feed('notification', id, token); let totalLength = 0; const tLimit = limit + 1; userFeeds.get({ limit: tLimit, offset: 0 }).then((response) => { totalLength = response.results.length; return totalLength; }) .then((value) => { userFeeds.get({ limit, offset: 0 }) .then((response) => { if (response.results.length === value) { dispatch({ type: SHOW_MORE_NOTIFICATION_BUTTON, status: false, }); } generateNotificationProps(response.results) .then((values) => { if (values !== '') { const notificationCount = response.unseen; dispatch({ type: NOTIFICATION_SUCCESS, notificationsData: { notifications: values, notificationCount, status: false, loaderStatus: false, }, }); } else { dispatch({ type: NOTIFICATION_ERROR, notificationsData: { errorStatus: true, status: false, }, }); } }) .catch(() => { dispatch({ type: SHOW_MORE_NOTIFICATION_LOADER, status: false, }); }); }) .catch(() => { dispatch({ type: SHOW_MORE_NOTIFICATION_LOADER, status: false, }); }); }); function callback(data) { if (data.new !== undefined) { let newLimit = updateCount; newLimit += data.new.length; limit = newLimit; let totalResultLength = 0; const callbackLimit = newLimit + 1; userFeeds.get({ limit: callbackLimit, offset: 0 }).then((response) => { totalResultLength = response.results.length; return totalResultLength; }) .then((value) => { userFeeds.get({ limit: newLimit, offset: 0 }) .then((response) => { if (response.results.length === value) { dispatch({ type: SHOW_MORE_NOTIFICATION_BUTTON, status: false, }); } else { dispatch({ type: SHOW_MORE_NOTIFICATION_BUTTON, status: true, }); } generateNotificationProps(response.results) .then((values) => { if (values !== '') { const notificationCount = response.unseen; dispatch({ type: NOTIFICATION_SUCCESS, notificationsData: { notifications: values, notificationCount, status: false, }, }); } else { dispatch({ type: NOTIFICATION_ERROR, status: true, }); } }) .catch(() => { dispatch({ type: NOTIFICATION_ERROR, status: true, }); }); }) .catch(() => { dispatch({ type: NOTIFICATION_ERROR, status: true, }); }); }); } } if (count === undefined) { userFeeds.subscribe(callback); } } else { dispatch({ type: NOTIFICATION_ERROR, status: true, }); } }, () => { }); }; }
Valtis/Peliprojekti-2013
Koulupeliprojekti/src/UI/Window/TextBox.cpp
<gh_stars>0 #include "UI/Window/TextBox.h" #include "UI/Window/TextField.h" #include "Graphics/TextureFactory.h" TextBox::TextBox(std::string text, SDL_Rect location, Renderer *renderer) { SetLocation(location); SDL_Color color = { 160, 160, 160, 255}; SetTexture(TextureFactory::CreateWindowTexture(location.w, location.h, color, renderer)); std::unique_ptr<TextField> element(new TextField(text, 13, location, renderer)); AddWindow(std::move(element)); } TextBox::~TextBox() { }
DreamLi1314/shared
java/v3/gui/src/main/java/org/javafamily/swing/GuiMain12.java
<reponame>DreamLi1314/shared<gh_stars>0 package org.javafamily.swing; import org.javafamily.util.SwingUtils; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.util.Arrays; import java.util.Random; /** * MenuBar2 */ public class GuiMain12 extends JFrame { private static GuiMain12 mainFrame; private JMenuBar jMenuBar; private JMenuBar jMenuBar2; private Random random = new Random(); public GuiMain12() { super("MenuBar2 JFrame!"); setLayout(new FlowLayout()); JTextField text = new JTextField(30); text.setEditable(false); add(text); ActionListener listener = (e) -> { text.setText(((JMenuItem) e.getSource()).getText()); }; JMenu file = new JMenu("File"); // 设置助记符, 当 Menu 打开时按下 N 直接触发 New 的事件 JMenuItem newItem = new JMenuItem("New", KeyEvent.VK_N); // 当菜单打开时, 按下 Ctrl + N 触发 New 事件 newItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_N, KeyEvent.CTRL_MASK)); newItem.addActionListener(listener); JMenuItem open = new JMenuItem("Open", KeyEvent.VK_O); open.addActionListener(listener); JMenuItem settings = new JMenuItem("Settings"); settings.addActionListener(listener); JMenu openRecent = new JMenu("Open Recent"); JMenuItem workspace1 = new JMenuItem("Workspace1"); workspace1.addActionListener(listener); JMenuItem workspace2 = new JMenuItem("Workspace2"); workspace2.addActionListener(listener); openRecent.add(workspace1); openRecent.add(workspace2); file.add(newItem); file.add(open); file.add(openRecent); file.add(settings); JMenu window = new JMenu("Window"); ItemListener checkBoxListener = e -> { Object[] selectedObjects = e.getItemSelectable().getSelectedObjects(); String command = ((JCheckBoxMenuItem) e.getItem()).getActionCommand(); boolean state = ((JCheckBoxMenuItem) e.getItem()).getState(); text.setText(command + "===" + state + "===" + selectedObjects != null ? Arrays.asList(selectedObjects).toString() : null); }; JCheckBoxMenuItem restore = new JCheckBoxMenuItem("Restore"); restore.setActionCommand("restore"); restore.addItemListener(checkBoxListener); JCheckBoxMenuItem view = new JCheckBoxMenuItem("View"); view.setActionCommand("view"); view.addItemListener(checkBoxListener); window.add(restore); window.add(view); JMenu help = new JMenu("Help"); JMenuItem about = new JMenuItem("About"); about.addActionListener(listener); help.add(about); jMenuBar = new JMenuBar(); jMenuBar2 = new JMenuBar(); jMenuBar.add(file); jMenuBar.add(window); jMenuBar.add(help); jMenuBar2.add(new JMenu("File2")); jMenuBar2.add(new JMenu("Help2")); setJMenuBar(jMenuBar); JButton changeMenuBar = new JButton("Change Menu Bar"); changeMenuBar.addActionListener(e -> { int value = random.nextInt(100); System.out.println(value); if(value > 50) { setJMenuBar(jMenuBar); } else { setJMenuBar(jMenuBar2); } // 刷新 Container 和子 Components validate(); }); add(changeMenuBar); } public static void main(String[] args) throws Exception { mainFrame = new GuiMain12(); SwingUtils.run(mainFrame, 800, 600); } }
drewnoakes/bold-humanoid
Option/WalkTo/walkto.hh
#pragma once #include "../option.hh" #include "../../MotionModule/WalkModule/walkmodule.hh" #include <Eigen/Core> namespace bold { class WalkTo : public Option { public: WalkTo(std::string const& id, std::shared_ptr<WalkModule> walkModule); OptionVector runPolicy(rapidjson::Writer<rapidjson::StringBuffer>& writer) override; void setTargetPosition(Eigen::Vector2d targetPos, double targetAngle) { d_targetPos = targetPos; d_targetAngle = targetAngle; } private: Eigen::Vector2d d_targetPos; double d_targetAngle; double d_turnDist; std::shared_ptr<WalkModule> d_walkModule; Setting<double>* d_turnScale; Setting<double>* d_maxForwardSpeed; Setting<double>* d_minForwardSpeed; Setting<double>* d_maxSidewaysSpeed; Setting<double>* d_minSidewaysSpeed; Setting<double>* d_brakeDistance; Setting<double>* d_lowerTurnLimitDegs; Setting<double>* d_upperTurnLimitDegs; }; }
MilindThakur/nzme-skynet
test/test_docker_grid.py
import pytest from nzme_skynet.core.driver.driverregistry import DriverRegistry TEST_URL = "https://www.google.co.nz" DOCKER_SELENIUM_URL = "http://localhost:4444/wd/hub" @pytest.fixture(scope='module', params=["chrome", "firefox"]) def driver_setup(request): DriverRegistry.register_driver(driver_type=request.param, local=False) driver = DriverRegistry.get_driver() yield driver DriverRegistry.deregister_driver() def test_browser_setup(driver_setup): driver_setup.goto_url(TEST_URL, absolute=True) assert (TEST_URL in driver_setup.current_url) is True
lpenuelac/ImageAnalysis
scripts/lib/archive/spline.py
<gh_stars>10-100 # NAME # # Math::Spline - Cubic Spline Interpolation of data # # SYNOPSIS # # require Math::Spline; # $spline=new Math::Spline(\@x,\@y) # $y_interp=$spline->evaluate($x); # # use Math::Spline qw(spline linsearch binsearch); # use Math::Derivative qw(Derivative2); # @y2=Derivative2(\@x,\@y); # $index=binsearch(\@x,$x); # $index=linsearch(\@x,$x,$index); # $y_interp=spline(\@x,\@y,\@y2,$index,$x); # # DESCRIPTION # # This package provides cubic spline interpolation of numeric data. The # data is passed as references to two arrays containing the x and y # ordinates. It may be used as an exporter of the numerical functions # or, more easily as a class module. # # The B<Math::Spline> class constructor B<new> takes references to the # arrays of x and y ordinates of the data. An interpolation is performed # using the B<evaluate> method, which, when given an x ordinate returns # the interpolate y ordinate at that value. # # The B<spline> function takes as arguments references to the x and y # ordinate array, a reference to the 2nd derivatives (calculated using # B<Derivative2>, the low index of the interval in which to interpolate # and the x ordinate in that interval. Returned is the interpolated y # ordinate. Two functions are provided to look up the appropriate index # in the array of x data. For random calls B<binsearch> can be used - # give a reference to the x ordinates and the x loopup value it returns # the low index of the interval in the data in which the value # lies. Where the lookups are strictly in ascending sequence (e.g. if # interpolating to produce a higher resolution data set to draw a curve) # the B<linsearch> function may more efficiently be used. It performs # like B<binsearch>, but requires a third argument being the previous # index value, which is incremented if necessary. # # NOTE # # requires Math::Derivative module # # EXAMPLE # # require Math::Spline; # my @x=(1,3,8,10); # my @y=(1,2,3,4); # $spline=new Math::Spline(\@x,\@y); # print $spline->evaluate(5)."\n"; # # produces the output # # 2.44 # # (Perl version) AUTHOR # # <NAME> <<EMAIL>> # # SEE ALSO # # "Numerical Recipies: The Art of Scientific Computing" # W.H. Press, <NAME>, <NAME>, <NAME>. # Cambridge University Press. ISBN 0 521 30811 9. # functions for calculating derivatives of data # # Math::Derivative - Numeric 1st and 2nd order differentiation # # use Math::Derivative qw(Derivative1 Derivative2); # @dydx=Derivative1(\@x,\@y); # @d2ydx2=Derivative2(\@x,\@y); # @d2ydx2=Derivative2(\@x,\@y,$yp0,$ypn); # # DESCRIPTION # # This Perl package exports functions for performing numerical first # (B<Derivative1>) and second B<Derivative2>) order differentiation on # vectors of data. They both take references to two arrays containing # the x and y ordinates of the data and return an array of the 1st or # 2nd derivative at the given x ordinates. B<Derivative2> may optionally # be given values to use for the first dervivative at the start and end # points of the data - otherwiswe 'natural' values are used. # # (PERL) AUTHOR # # <NAME> <J.A.R.Willi<EMAIL>> # # PYTHON PORT (combines spline and derivative perl modules into a single # spline python module) # # <NAME> <curtolson ata flightgear dota org > # def derivative1(points): n = len(points)-1 # index of last point y2 = list(xrange(n+1)) y2[0] = (points[1][1]-points[0][1]) / (points[1][0]-points[0][0]) y2[n] = (points[n][1]-points[n-1][1]) / (points[n][0]-points[n-1][0]) for i in range(1, n): y2[i]=(points[i+1][1]-points[i-1][1]) / (points[i+1][0]-points[i-1][0]) return y2 def derivative2(points, yp1 = "", ypn = ""): n = len(points)-1 # index of last point y2 = list(xrange(n+1)) u = list(xrange(n+1)) if yp1 == "": y2[0] = 0 u[0] = 0 else: y2[0] = -0.5 u[0] = (3/(points[1][0]-points[0][0]))*((points[1][1]-points[0][1])/(points[1][0]-points[0][0])-float(yp1)) for i in range(1, n): sig = (points[i][0]-points[i-1][0])/(points[i+1][0]-points[i-1][0]) p = sig * y2[i-1] + 2.0 y2[i] = (sig-1.0) / p u[i] = (6.0*( (points[i+1][1]-points[i][1])/(points[i+1][0]-points[i][0])-(points[i][1]-points[i-1][1])/(points[i][0]-points[i-1][0]))/(points[i+1][0]-points[i-1][0])-sig*u[i-1])/p; if ypn == "": qn = 0 un = 0 else: qn = 0.5 un = (3.0/(points[n][0]-points[n-1][0]))*(float(ypn)-(points[n][1]-points[n-1][1])/(points[n][0]-points[n-1][0])) y2[n] = (un-qn*u[n-1])/(qn*y2[n-1]+1.0) for i in range(n-1, -1, -1): y2[i] = y2[i]*y2[i+1]+u[i] return y2 def spline(points, y2, i, v): klo = i khi = i + 1 h = points[khi][0] - points[klo][0] if h == 0: print "Zero interval in spline data." return 0; a = (points[khi][0] - v) / h b = (v - points[klo][0]) / h return a*points[klo][1] + b*points[khi][1]+((a*a*a-a)*y2[klo]+(b*b*b-b)*y2[khi])*(h*h)/6.0 # binary search routine finds index just below value def binsearch(points, v): klo = 0 khi = len(points)-1 while (khi - klo) > 1: k = int((khi+klo)/2) if (points[k][0] > v): khi = k else: klo = k return klo # more efficient if repetatively doint it def linsearch(points, v, khi): khi += 1 n = len(points) - 1 while v > points[khi][0] and khi < n: khi += 1 return khi - 1
wanyufe/eks-anywhere
test/e2e/simpleflow_test.go
//go:build e2e // +build e2e package e2e import ( "fmt" "log" "testing" "github.com/aws/eks-anywhere/internal/pkg/api" "github.com/aws/eks-anywhere/pkg/api/v1alpha1" "github.com/aws/eks-anywhere/pkg/features" "github.com/aws/eks-anywhere/pkg/logger" "github.com/aws/eks-anywhere/test/framework" ) func init() { if err := logger.InitZap(4, logger.WithName("e2e")); err != nil { log.Fatal(fmt.Errorf("failed init zap logger for e2e tests: %v", err)) } } func runHelmInstallSimpleFlow(test *framework.ClusterE2ETest) { test.GenerateClusterConfig() test.CreateCluster() test.InstallHelmChart() test.DeleteCluster() } func runTinkerbellSimpleFlow(test *framework.ClusterE2ETest) { test.GenerateClusterConfig() test.GenerateHardwareConfig() test.PowerOffHardware() test.CreateCluster() test.DeleteCluster() test.ValidateHardwareDecommissioned() } func runSimpleFlow(test *framework.ClusterE2ETest) { test.GenerateClusterConfig() test.CreateCluster() test.DeleteCluster() } func TestDockerKubernetes120SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewDocker(t), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube120)), ) runSimpleFlow(test) } func TestDockerKubernetes121SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewDocker(t), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), ) runSimpleFlow(test) } func TestDockerKubernetes122SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewDocker(t), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube122)), ) runSimpleFlow(test) } func TestDockerKubernetes123SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewDocker(t), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestVSphereKubernetes120SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu120()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube120)), ) runSimpleFlow(test) } func TestVSphereKubernetes121SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu121()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), ) runSimpleFlow(test) } func TestVSphereKubernetes122SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu122()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube122)), ) runSimpleFlow(test) } func TestVSphereKubernetes123SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu123()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestVSphereKubernetes123ThreeReplicasFiveWorkersSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu123()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithClusterFiller(api.WithControlPlaneCount(3)), framework.WithClusterFiller(api.WithWorkerNodeCount(5)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestVSphereKubernetes123DifferentNamespaceSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu123(), framework.WithVSphereFillers(api.WithVSphereConfigNamespaceForAllMachinesAndDatacenter(clusterNamespace))), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithClusterFiller(api.WithClusterNamespace(clusterNamespace)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestVSphereKubernetes123BottleRocketSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithBottleRocket123()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestVSphereKubernetes123BottleRocketThreeReplicasFiveWorkersSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithBottleRocket123()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithClusterFiller(api.WithControlPlaneCount(3)), framework.WithClusterFiller(api.WithWorkerNodeCount(5)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestVSphereKubernetes123BottleRocketDifferentNamespaceSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithBottleRocket123(), framework.WithVSphereFillers(api.WithVSphereConfigNamespaceForAllMachinesAndDatacenter(clusterNamespace))), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube123)), framework.WithClusterFiller(api.WithClusterNamespace(clusterNamespace)), framework.WithEnvVar(features.K8s123SupportEnvVar, "true"), ) runSimpleFlow(test) } func TestCloudStackKubernetes120SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewCloudStack(t, framework.WithRedhat120()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube120)), ) runSimpleFlow(test) } func TestCloudStackKubernetes121SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewCloudStack(t, framework.WithCloudStackRedhat121()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), ) runSimpleFlow(test) } func TestCloudStackKubernetes121DifferentNamespaceSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewCloudStack(t, framework.WithCloudStackRedhat121(), framework.WithCloudStackFillers(api.WithCloudStackConfigNamespace(clusterNamespace))), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithClusterFiller(api.WithClusterNamespace(clusterNamespace)), ) runSimpleFlow(test) } func TestVSphereKubernetes121CiliumAlwaysPolicyEnforcementModeSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewVSphere(t, framework.WithUbuntu121()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithClusterFiller(api.WithCiliumPolicyEnforcementMode(v1alpha1.CiliumPolicyModeAlways)), ) runSimpleFlow(test) } func TestTinkerbellKubernetes120SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu120Tinkerbell()), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube120)), framework.WithHardware(2), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell()), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithHardware(2), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121ExternalEtcdSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell(), framework.WithTinkerbellExternalEtcdTopology(1)), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithHardware(3), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121ExternalEtcdThreeReplicasTwoWorkersSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell(), framework.WithTinkerbellExternalEtcdTopology(1)), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithClusterFiller(api.WithWorkerNodeCount(2)), framework.WithClusterFiller(api.WithControlPlaneCount(3)), framework.WithHardware(6), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121ThreeReplicasTwoWorkersSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell()), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithClusterFiller(api.WithWorkerNodeCount(2)), framework.WithClusterFiller(api.WithControlPlaneCount(3)), framework.WithHardware(5), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121SuperMicroSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell()), framework.WithHardware(2), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121DellSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell()), framework.WithHardware(2), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), ) runTinkerbellSimpleFlow(test) } func TestTinkerbellKubernetes121HPSimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewTinkerbell(t, framework.WithUbuntu121Tinkerbell()), framework.WithHardware(2), framework.WithEnvVar("TINKERBELL_PROVIDER", "true"), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), ) runTinkerbellSimpleFlow(test) } func TestSnowKubernetes121SimpleFlow(t *testing.T) { test := framework.NewClusterE2ETest( t, framework.NewSnow(t, framework.WithSnowUbuntu121()), framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube121)), framework.WithEnvVar("SNOW_PROVIDER", "true"), ) runSimpleFlow(test) }
jreece1567/go_swagger_client
client_core/s_t_o_r_e_t_r_a_d_i_n_g_h_o_u_r_s/patch_participants_participant_id_responses.go
package s_t_o_r_e_t_r_a_d_i_n_g_h_o_u_r_s // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "io" "github.com/go-openapi/runtime" strfmt "github.com/go-openapi/strfmt" "restclient/models_core" ) // PatchParticipantsParticipantIDReader is a Reader for the PatchParticipantsParticipantID structure. type PatchParticipantsParticipantIDReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the recieved o. func (o *PatchParticipantsParticipantIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 204: result := NewPatchParticipantsParticipantIDNoContent() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil case 401: result := NewPatchParticipantsParticipantIDUnauthorized() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return nil, result case 404: result := NewPatchParticipantsParticipantIDNotFound() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return nil, result case 422: result := NewPatchParticipantsParticipantIDUnprocessableEntity() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return nil, result default: return nil, runtime.NewAPIError("unknown error", response, response.Code()) } } // NewPatchParticipantsParticipantIDNoContent creates a PatchParticipantsParticipantIDNoContent with default headers values func NewPatchParticipantsParticipantIDNoContent() *PatchParticipantsParticipantIDNoContent { return &PatchParticipantsParticipantIDNoContent{} } /*PatchParticipantsParticipantIDNoContent handles this case with default header values. Successfully updated the participant. */ type PatchParticipantsParticipantIDNoContent struct { } func (o *PatchParticipantsParticipantIDNoContent) Error() string { return fmt.Sprintf("[PATCH /participants/{participant_id}][%d] patchParticipantsParticipantIdNoContent ", 204) } func (o *PatchParticipantsParticipantIDNoContent) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { return nil } // NewPatchParticipantsParticipantIDUnauthorized creates a PatchParticipantsParticipantIDUnauthorized with default headers values func NewPatchParticipantsParticipantIDUnauthorized() *PatchParticipantsParticipantIDUnauthorized { return &PatchParticipantsParticipantIDUnauthorized{} } /*PatchParticipantsParticipantIDUnauthorized handles this case with default header values. Unauthorized request. */ type PatchParticipantsParticipantIDUnauthorized struct { Payload *models_core.Http401Response } func (o *PatchParticipantsParticipantIDUnauthorized) Error() string { return fmt.Sprintf("[PATCH /participants/{participant_id}][%d] patchParticipantsParticipantIdUnauthorized %+v", 401, o.Payload) } func (o *PatchParticipantsParticipantIDUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models_core.Http401Response) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil } // NewPatchParticipantsParticipantIDNotFound creates a PatchParticipantsParticipantIDNotFound with default headers values func NewPatchParticipantsParticipantIDNotFound() *PatchParticipantsParticipantIDNotFound { return &PatchParticipantsParticipantIDNotFound{} } /*PatchParticipantsParticipantIDNotFound handles this case with default header values. Participant not found. */ type PatchParticipantsParticipantIDNotFound struct { Payload *models_core.Http404Response } func (o *PatchParticipantsParticipantIDNotFound) Error() string { return fmt.Sprintf("[PATCH /participants/{participant_id}][%d] patchParticipantsParticipantIdNotFound %+v", 404, o.Payload) } func (o *PatchParticipantsParticipantIDNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models_core.Http404Response) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil } // NewPatchParticipantsParticipantIDUnprocessableEntity creates a PatchParticipantsParticipantIDUnprocessableEntity with default headers values func NewPatchParticipantsParticipantIDUnprocessableEntity() *PatchParticipantsParticipantIDUnprocessableEntity { return &PatchParticipantsParticipantIDUnprocessableEntity{} } /*PatchParticipantsParticipantIDUnprocessableEntity handles this case with default header values. Participant not updated. Returns an array of error messages explaining the problems with the provided attributes. */ type PatchParticipantsParticipantIDUnprocessableEntity struct { Payload *models_core.Participant422Response } func (o *PatchParticipantsParticipantIDUnprocessableEntity) Error() string { return fmt.Sprintf("[PATCH /participants/{participant_id}][%d] patchParticipantsParticipantIdUnprocessableEntity %+v", 422, o.Payload) } func (o *PatchParticipantsParticipantIDUnprocessableEntity) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models_core.Participant422Response) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil }
volzotan/django-howl
howl/forecast/__init__.py
default_app_config = 'forecast.apps.ForecastConfig'
SergeyBelyj/Sergey-White
Junior001/src/test/java/set/SimpleSetTest.java
package set; import list.SimpleArrayList; import org.junit.Test; public class SimpleSetTest { @Test public void whenAddTwoElemSameThenNo() { SimpleSet<Integer> setM = new SimpleSet<>(); setM.add(12); setM.add(13); setM.add(13); setM.add(14); System.out.println(setM.iterator().next()); System.out.println(setM.iterator().next()); System.out.println(setM.iterator().next()); } }
anaai/anaai
nft-service/service.py
import os from dotenv import load_dotenv from fastapi import FastAPI from fastapi.responses import JSONResponse from pydantic import BaseModel from web3 import Web3 import logger from contracts import style_art_factory load_dotenv() PUBLIC_KEY = Web3.toChecksumAddress(os.getenv("PUBLIC_KEY")) PRIVATE_KEY = f'0x{os.getenv("PRIVATE_KEY")}' API_URL = os.getenv("API_URL") CONTRACT_ADDRESS = Web3.toChecksumAddress(os.getenv("CONTRACT_ADDRESS")) CONTRACT_PATH = "contracts/StyleArt.json" app = FastAPI() class NFTPayload(BaseModel): payer: str token_uri: str @app.get("/status") async def get_status(): return "Service running" @app.post("/mint_nft") async def mint_nft(nft: NFTPayload): contract = style_art_factory(PUBLIC_KEY, PRIVATE_KEY, API_URL, CONTRACT_ADDRESS, CONTRACT_PATH) token_id = contract.mint_nft(Web3.toChecksumAddress(nft.payer), nft.token_uri) logger.log_token_minted(nft.payer, token_id, nft.token_uri) return JSONResponse({"token_id": token_id})
joefk/pulsar
pulsar-broker-common/src/main/java/org/apache/pulsar/broker/authorization/AuthorizationService.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.authorization; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.pulsar.zookeeper.ZooKeeperCache.cacheTimeOutInSec; import java.util.Set; import java.util.concurrent.CompletableFuture; import org.apache.commons.lang3.StringUtils; import org.apache.pulsar.broker.PulsarServerException; import org.apache.pulsar.broker.ServiceConfiguration; import org.apache.pulsar.broker.authentication.AuthenticationDataSource; import org.apache.pulsar.broker.cache.ConfigurationCacheService; import org.apache.pulsar.common.naming.NamespaceName; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.policies.data.AuthAction; import org.apache.pulsar.common.util.FutureUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Authorization service that manages pluggable authorization provider and authorize requests accordingly. * */ public class AuthorizationService { private static final Logger log = LoggerFactory.getLogger(AuthorizationService.class); private AuthorizationProvider provider; private final ServiceConfiguration conf; public AuthorizationService(ServiceConfiguration conf, ConfigurationCacheService configCache) throws PulsarServerException { this.conf = conf; if (this.conf.isAuthorizationEnabled()) { try { final String providerClassname = conf.getAuthorizationProvider(); if (StringUtils.isNotBlank(providerClassname)) { provider = (AuthorizationProvider) Class.forName(providerClassname).newInstance(); provider.initialize(conf, configCache); log.info("{} has been loaded.", providerClassname); } else { throw new PulsarServerException("No authorization providers are present."); } } catch (PulsarServerException e) { throw e; } catch (Throwable e) { throw new PulsarServerException("Failed to load an authorization provider.", e); } } else { log.info("Authorization is disabled"); } } /** * * Grant authorization-action permission on a namespace to the given client * * @param namespace * @param actions * @param role * @param authDataJson * additional authdata in json for targeted authorization provider * @return * @throws IllegalArgumentException * when namespace not found * @throws IllegalStateException * when failed to grant permission */ public CompletableFuture<Void> grantPermissionAsync(NamespaceName namespace, Set<AuthAction> actions, String role, String authDataJson) { if (provider != null) { return provider.grantPermissionAsync(namespace, actions, role, authDataJson); } return FutureUtil.failedFuture(new IllegalStateException("No authorization provider configured")); } /** * Grant authorization-action permission on a topic to the given client * * @param topicname * @param role * @param authDataJson * additional authdata in json for targeted authorization provider * @return IllegalArgumentException when namespace not found * @throws IllegalStateException * when failed to grant permission */ public CompletableFuture<Void> grantPermissionAsync(TopicName topicname, Set<AuthAction> actions, String role, String authDataJson) { if (provider != null) { return provider.grantPermissionAsync(topicname, actions, role, authDataJson); } return FutureUtil.failedFuture(new IllegalStateException("No authorization provider configured")); } /** * Check if the specified role has permission to send messages to the specified fully qualified topic name. * * @param topicName * the fully qualified topic name associated with the topic. * @param role * the app id used to send messages to the topic. */ public CompletableFuture<Boolean> canProduceAsync(TopicName topicName, String role, AuthenticationDataSource authenticationData) { if (!this.conf.isAuthorizationEnabled()) { return CompletableFuture.completedFuture(true); } if (provider != null) { return provider.canProduceAsync(topicName, role, authenticationData); } return FutureUtil.failedFuture(new IllegalStateException("No authorization provider configured")); } /** * Check if the specified role has permission to receive messages from the specified fully qualified topic name. * * @param topicName * the fully qualified topic name associated with the topic. * @param role * the app id used to receive messages from the topic. * @param subscription * the subscription name defined by the client */ public CompletableFuture<Boolean> canConsumeAsync(TopicName topicName, String role, AuthenticationDataSource authenticationData, String subscription) { if (!this.conf.isAuthorizationEnabled()) { return CompletableFuture.completedFuture(true); } if (provider != null) { return provider.canConsumeAsync(topicName, role, authenticationData, subscription); } return FutureUtil.failedFuture(new IllegalStateException("No authorization provider configured")); } public boolean canProduce(TopicName topicName, String role, AuthenticationDataSource authenticationData) throws Exception { try { return canProduceAsync(topicName, role, authenticationData).get(cacheTimeOutInSec, SECONDS); } catch (InterruptedException e) { log.warn("Time-out {} sec while checking authorization on {} ", cacheTimeOutInSec, topicName); throw e; } catch (Exception e) { log.warn("Producer-client with Role - {} failed to get permissions for topic - {}. {}", role, topicName, e.getMessage()); throw e; } } public boolean canConsume(TopicName topicName, String role, AuthenticationDataSource authenticationData, String subscription) throws Exception { try { return canConsumeAsync(topicName, role, authenticationData, subscription).get(cacheTimeOutInSec, SECONDS); } catch (InterruptedException e) { log.warn("Time-out {} sec while checking authorization on {} ", cacheTimeOutInSec, topicName); throw e; } catch (Exception e) { log.warn("Consumer-client with Role - {} failed to get permissions for topic - {}. {}", role, topicName, e.getMessage()); throw e; } } /** * Check whether the specified role can perform a lookup for the specified topic. * * For that the caller needs to have producer or consumer permission. * * @param topicName * @param role * @return * @throws Exception */ public boolean canLookup(TopicName topicName, String role, AuthenticationDataSource authenticationData) throws Exception { return canProduce(topicName, role, authenticationData) || canConsume(topicName, role, authenticationData, null); } /** * Check whether the specified role can perform a lookup for the specified topic. * * For that the caller needs to have producer or consumer permission. * * @param topicName * @param role * @return * @throws Exception */ public CompletableFuture<Boolean> canLookupAsync(TopicName topicName, String role, AuthenticationDataSource authenticationData) { CompletableFuture<Boolean> finalResult = new CompletableFuture<Boolean>(); canProduceAsync(topicName, role, authenticationData).whenComplete((produceAuthorized, ex) -> { if (ex == null) { if (produceAuthorized) { finalResult.complete(produceAuthorized); return; } } else { if (log.isDebugEnabled()) { log.debug( "Topic [{}] Role [{}] exception occured while trying to check Produce permissions. {}", topicName.toString(), role, ex.getMessage()); } } canConsumeAsync(topicName, role, null, null).whenComplete((consumeAuthorized, e) -> { if (e == null) { if (consumeAuthorized) { finalResult.complete(consumeAuthorized); return; } } else { if (log.isDebugEnabled()) { log.debug( "Topic [{}] Role [{}] exception occured while trying to check Consume permissions. {}", topicName.toString(), role, e.getMessage()); } finalResult.completeExceptionally(e); return; } finalResult.complete(false); }); }); return finalResult; } }
ErnestaP/runregistry
runregistry_frontend/ducks/json/jsons.js
<filename>runregistry_frontend/ducks/json/jsons.js import axios from 'axios'; import stringify from 'json-stringify-pretty-compact'; import auth from '../../auth/auth'; import { error_handler } from '../../utils/error_handlers'; import { api_url } from '../../config/config'; import Swal from 'sweetalert2'; const FETCH_JSONS = 'FETCH_JSONS'; const FETCH_MORE_JSONS = 'FETCH_MORE_JSONS'; const UPDATE_PROGRESS = 'UPDATE_PROGRESS'; const MORE_JSONS = 'MORE_JSONS'; const DELETE_JSON = 'DELETE_JSON'; export const getJsons = (selected_tab) => error_handler(async (dispatch, getState) => { const { data } = await axios.post( `${api_url}/json_portal/jsons`, { filter: selected_tab, }, auth(getState) ); const jsons = data.jsons.filter((json) => json !== null); dispatch({ type: FETCH_JSONS, payload: jsons, }); }); export const deleteJson = (id_json) => error_handler(async (dispatch, getState) => { const { headers } = auth(getState); const { data } = await axios.delete(`${api_url}/json_portal/json`, { headers, data: { id_json }, }); await Swal(`JSON deleted`, '', 'success'); dispatch({ type: DELETE_JSON, payload: data.deleted_json, }); }); export const fetchMoreJsons = (selected_tab, reference) => error_handler(async (dispatch, getState) => { const { data } = await axios.post( `${api_url}/json_portal/jsons`, { filter: selected_tab, reference, }, auth(getState) ); const jsons = data.jsons.filter((json) => json !== null); if (jsons.length === 0) { dispatch({ type: MORE_JSONS }); } dispatch({ type: FETCH_MORE_JSONS, payload: jsons, }); }); export const updateProgress = (event) => ({ type: UPDATE_PROGRESS, payload: event, }); const INITIAL_STATE = { more_jsons: true, jsons: [], deleted_jsons: [], }; export default function (state = INITIAL_STATE, action) { const { type, payload } = action; switch (type) { case FETCH_JSONS: return { ...state, jsons: payload, more_jsons: true, }; case UPDATE_PROGRESS: return { ...state, jsons: [...updateProgressHelper(state, payload)] }; case FETCH_MORE_JSONS: return { ...state, jsons: [...state.jsons, ...payload] }; case MORE_JSONS: return { ...state, more_jsons: false }; case DELETE_JSON: return { ...state, deleted_jsons: [...state.deleted_jsons, payload] }; default: return state; } } function updateProgressHelper(jsons, event) { const { id, progress } = event; const result = jsons.map((json) => { if (json.id === id) { return { ...json, progress }; } return { ...json }; }); return result; }
dskcode/jwala
jwala-common/src/test/java/com/cerner/jwala/commandprocessor/impl/jsch/JschScpCommandProcessorImplTest.java
<gh_stars>10-100 package com.cerner.jwala.commandprocessor.impl.jsch; import com.cerner.jwala.commandprocessor.CommandProcessor; import com.cerner.jwala.common.exec.ExecCommand; import com.cerner.jwala.common.exec.RemoteExecCommand; import com.cerner.jwala.common.exec.RemoteSystemConnection; import com.cerner.jwala.exception.RemoteCommandFailureException; import com.jcraft.jsch.ChannelExec; import com.jcraft.jsch.JSch; import com.jcraft.jsch.Session; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import static org.mockito.Matchers.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; public class JschScpCommandProcessorImplTest { @Mock private JSch mockJsch; @Mock private RemoteExecCommand mockRemoteExecCommand; private CommandProcessor jschScpCommandProcessor; private static final String PROPERTIES_ROOT_PATH = "PROPERTIES_ROOT_PATH"; private String resourceDir; public JschScpCommandProcessorImplTest() { resourceDir = this.getClass().getClassLoader().getResource("vars.properties").getPath(); resourceDir = resourceDir.substring(0, resourceDir.lastIndexOf("/")); } @Before public void setup() { System.setProperty(PROPERTIES_ROOT_PATH, resourceDir); initMocks(this); jschScpCommandProcessor = new JschScpCommandProcessorImpl(mockJsch, mockRemoteExecCommand); } @After public void tearDown() { System.clearProperty(PROPERTIES_ROOT_PATH); } @Test public void testProcessCommand() throws Exception { final ExecCommand command = new ExecCommand("frag1", this.getClass().getClassLoader().getResource("jsch-scp.txt").getPath(), "frag3"); final RemoteSystemConnection mockRemoteSystemConnection = mock(RemoteSystemConnection.class); when(mockRemoteExecCommand.getCommand()).thenReturn(command); when(mockRemoteExecCommand.getRemoteSystemConnection()).thenReturn(mockRemoteSystemConnection); when(mockRemoteSystemConnection.getEncryptedPassword()).thenReturn("#$@%aaa==".toCharArray()); final Session mockSession = mock(Session.class); final ChannelExec mockChannelExec = mock(ChannelExec.class); when(mockChannelExec.getOutputStream()).thenReturn(mock(OutputStream.class)); when(mockChannelExec.getInputStream()).thenReturn(new AckIn()); when(mockSession.openChannel(eq("exec"))).thenReturn(mockChannelExec); when(mockJsch.getSession(anyString(), anyString(), anyInt())).thenReturn(mockSession); jschScpCommandProcessor.processCommand(); } @Test(expected = RemoteCommandFailureException.class) public void testProcessCommandAckErr() throws Exception { final ExecCommand command = new ExecCommand("frag1", this.getClass().getClassLoader().getResource("jsch-scp.txt").getPath(), "frag3"); final RemoteSystemConnection mockRemoteSystemConnection = mock(RemoteSystemConnection.class); when(mockRemoteExecCommand.getCommand()).thenReturn(command); when(mockRemoteExecCommand.getRemoteSystemConnection()).thenReturn(mockRemoteSystemConnection); when(mockRemoteSystemConnection.getEncryptedPassword()).thenReturn("#$@%aaa==".toCharArray()); final Session mockSession = mock(Session.class); final ChannelExec mockChannelExec = mock(ChannelExec.class); final byte [] bytes = {5}; when(mockChannelExec.getInputStream()).thenReturn(new ByteArrayInputStream(bytes)); when(mockSession.openChannel(eq("exec"))).thenReturn(mockChannelExec); when(mockJsch.getSession(anyString(), anyString(), anyInt())).thenReturn(mockSession); jschScpCommandProcessor.processCommand(); } static class AckIn extends InputStream { @Override public int available() throws IOException { return 1; } @Override public int read() throws IOException { return 0; } } }
russellb/nova
nova/scheduler/vsa.py
<gh_stars>0 # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2011 Zadara Storage Inc. # Copyright (c) 2011 OpenStack LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ VSA Simple Scheduler """ from nova import context from nova import db from nova import flags from nova import log as logging from nova.openstack.common import cfg from nova import rpc from nova import utils from nova import exception from nova.scheduler import driver from nova.scheduler import simple from nova.vsa.api import VsaState from nova.volume import volume_types LOG = logging.getLogger(__name__) vsa_scheduler_opts = [ cfg.IntOpt('drive_type_approx_capacity_percent', default=10, help='The percentage range for capacity comparison'), cfg.IntOpt('vsa_unique_hosts_per_alloc', default=10, help='The number of unique hosts per storage allocation'), cfg.BoolOpt('vsa_select_unique_drives', default=True, help='Allow selection of same host for multiple drives'), ] FLAGS = flags.FLAGS FLAGS.register_opts(vsa_scheduler_opts) def BYTES_TO_GB(bytes): return bytes >> 30 def GB_TO_BYTES(gb): return gb << 30 class VsaScheduler(simple.SimpleScheduler): """Implements Scheduler for volume placement.""" def __init__(self, *args, **kwargs): super(VsaScheduler, self).__init__(*args, **kwargs) self._notify_all_volume_hosts("startup") def _notify_all_volume_hosts(self, event): rpc.fanout_cast(context.get_admin_context(), FLAGS.volume_topic, {"method": "notification", "args": {"event": event}}) def _qosgrp_match(self, drive_type, qos_values): def _compare_names(str1, str2): return str1.lower() == str2.lower() def _compare_sizes_approxim(cap_capacity, size): cap_capacity = BYTES_TO_GB(int(cap_capacity)) size = int(size) size_perc = size * FLAGS.drive_type_approx_capacity_percent / 100 return (cap_capacity >= size - size_perc and cap_capacity <= size + size_perc) # Add more entries for additional comparisons compare_list = [{'cap1': 'DriveType', 'cap2': 'type', 'cmp_func': _compare_names}, {'cap1': 'DriveCapacity', 'cap2': 'size', 'cmp_func': _compare_sizes_approxim}] for cap in compare_list: if (cap['cap1'] in qos_values.keys() and cap['cap2'] in drive_type.keys() and cap['cmp_func'] is not None and cap['cmp_func'](qos_values[cap['cap1']], drive_type[cap['cap2']])): pass else: return False return True def _get_service_states(self): return self.host_manager.service_states def _filter_hosts(self, topic, request_spec, host_list=None): LOG.debug(_("_filter_hosts: %(request_spec)s"), locals()) drive_type = request_spec['drive_type'] LOG.debug(_("Filter hosts for drive type %s"), drive_type['name']) if host_list is None: host_list = self._get_service_states().iteritems() filtered_hosts = [] # returns list of (hostname, capability_dict) for host, host_dict in host_list: for service_name, service_dict in host_dict.iteritems(): if service_name != topic: continue gos_info = service_dict.get('drive_qos_info', {}) for qosgrp, qos_values in gos_info.iteritems(): if self._qosgrp_match(drive_type, qos_values): if qos_values['AvailableCapacity'] > 0: filtered_hosts.append((host, gos_info)) else: LOG.debug(_("Host %s has no free capacity. Skip"), host) break host_names = [item[0] for item in filtered_hosts] LOG.debug(_("Filter hosts: %s"), host_names) return filtered_hosts def _allowed_to_use_host(self, host, selected_hosts, unique): if not unique or host not in [item[0] for item in selected_hosts]: return True else: return False def _add_hostcap_to_list(self, selected_hosts, host, cap): if host not in [item[0] for item in selected_hosts]: selected_hosts.append((host, cap)) def host_selection_algorithm(self, request_spec, all_hosts, selected_hosts, unique): """Must override this method for VSA scheduler to work.""" raise NotImplementedError(_("Must implement host selection mechanism")) def _select_hosts(self, request_spec, all_hosts, selected_hosts=None): if selected_hosts is None: selected_hosts = [] host = None if len(selected_hosts) >= FLAGS.vsa_unique_hosts_per_alloc: # try to select from already selected hosts only LOG.debug(_("Maximum number of hosts selected (%d)"), len(selected_hosts)) unique = False (host, qos_cap) = self.host_selection_algorithm(request_spec, selected_hosts, selected_hosts, unique) LOG.debug(_("Selected excessive host %(host)s"), locals()) else: unique = FLAGS.vsa_select_unique_drives if host is None: # if we've not tried yet (# of sel hosts < max) - unique=True # or failed to select from selected_hosts - unique=False # select from all hosts (host, qos_cap) = self.host_selection_algorithm(request_spec, all_hosts, selected_hosts, unique) if host is None: raise exception.NoValidHost(reason=_("")) return (host, qos_cap) def _provision_volume(self, context, vol, vsa_id, availability_zone): if availability_zone is None: availability_zone = FLAGS.storage_availability_zone now = utils.utcnow() options = { 'size': vol['size'], 'user_id': context.user_id, 'project_id': context.project_id, 'snapshot_id': None, 'availability_zone': availability_zone, 'status': "creating", 'attach_status': "detached", 'display_name': vol['name'], 'display_description': vol['description'], 'volume_type_id': vol['volume_type_id'], 'metadata': dict(to_vsa_id=vsa_id), } size = vol['size'] host = vol['host'] name = vol['name'] LOG.debug(_("Provision volume %(name)s of size %(size)s GB on " "host %(host)s"), locals()) volume_ref = db.volume_create(context.elevated(), options) driver.cast_to_volume_host(context, vol['host'], 'create_volume', volume_id=volume_ref['id'], snapshot_id=None) def _check_host_enforcement(self, context, availability_zone): if (availability_zone and ':' in availability_zone and context.is_admin): zone, _x, host = availability_zone.partition(':') service = db.service_get_by_args(context.elevated(), host, 'nova-volume') if service['disabled'] or not utils.service_is_up(service): raise exception.WillNotSchedule(host=host) return host else: return None def _assign_hosts_to_volumes(self, context, volume_params, forced_host): prev_volume_type_id = None request_spec = {} selected_hosts = [] LOG.debug(_("volume_params %(volume_params)s") % locals()) i = 1 for vol in volume_params: name = vol['name'] LOG.debug(_("%(i)d: Volume %(name)s"), locals()) i += 1 if forced_host: vol['host'] = forced_host vol['capabilities'] = None continue volume_type_id = vol['volume_type_id'] request_spec['size'] = vol['size'] if (prev_volume_type_id is None or prev_volume_type_id != volume_type_id): # generate list of hosts for this drive type volume_type = volume_types.get_volume_type(context, volume_type_id) drive_type = { 'name': volume_type['extra_specs'].get('drive_name'), 'type': volume_type['extra_specs'].get('drive_type'), 'size': int(volume_type['extra_specs'].get('drive_size')), 'rpm': volume_type['extra_specs'].get('drive_rpm'), } request_spec['drive_type'] = drive_type all_hosts = self._filter_hosts("volume", request_spec) prev_volume_type_id = volume_type_id (host, qos_cap) = self._select_hosts(request_spec, all_hosts, selected_hosts) vol['host'] = host vol['capabilities'] = qos_cap self._consume_resource(qos_cap, vol['size'], -1) def schedule_create_volumes(self, context, request_spec, availability_zone=None, *_args, **_kwargs): """Picks hosts for hosting multiple volumes.""" num_volumes = request_spec.get('num_volumes') LOG.debug(_("Attempting to spawn %(num_volumes)d volume(s)") % locals()) vsa_id = request_spec.get('vsa_id') volume_params = request_spec.get('volumes') host = self._check_host_enforcement(context, availability_zone) try: self._print_capabilities_info() self._assign_hosts_to_volumes(context, volume_params, host) for vol in volume_params: self._provision_volume(context, vol, vsa_id, availability_zone) except Exception: LOG.exception(_("Error creating volumes")) if vsa_id: db.vsa_update(context, vsa_id, dict(status=VsaState.FAILED)) for vol in volume_params: if 'capabilities' in vol: self._consume_resource(vol['capabilities'], vol['size'], 1) raise return None def schedule_create_volume(self, context, volume_id, *_args, **_kwargs): """Picks the best host based on requested drive type capability.""" volume_ref = db.volume_get(context, volume_id) host = self._check_host_enforcement(context, volume_ref['availability_zone']) if host: driver.cast_to_volume_host(context, host, 'create_volume', volume_id=volume_id, **_kwargs) return None volume_type_id = volume_ref['volume_type_id'] if volume_type_id: volume_type = volume_types.get_volume_type(context, volume_type_id) if (volume_type_id is None or volume_types.is_vsa_volume(volume_type_id, volume_type)): LOG.debug(_("Non-VSA volume %d"), volume_ref['id']) return super(VsaScheduler, self).schedule_create_volume(context, volume_id, *_args, **_kwargs) self._print_capabilities_info() drive_type = { 'name': volume_type['extra_specs'].get('drive_name'), 'type': volume_type['extra_specs'].get('drive_type'), 'size': int(volume_type['extra_specs'].get('drive_size')), 'rpm': volume_type['extra_specs'].get('drive_rpm'), } LOG.debug(_("Spawning volume %(volume_id)s with drive type " "%(drive_type)s"), locals()) request_spec = {'size': volume_ref['size'], 'drive_type': drive_type} hosts = self._filter_hosts("volume", request_spec) try: (host, qos_cap) = self._select_hosts(request_spec, all_hosts=hosts) except Exception: LOG.exception(_("Error creating volume")) if volume_ref['to_vsa_id']: db.vsa_update(context, volume_ref['to_vsa_id'], dict(status=VsaState.FAILED)) raise if host: driver.cast_to_volume_host(context, host, 'create_volume', volume_id=volume_id, **_kwargs) def _consume_full_drive(self, qos_values, direction): qos_values['FullDrive']['NumFreeDrives'] += direction qos_values['FullDrive']['NumOccupiedDrives'] -= direction def _consume_partition(self, qos_values, size, direction): if qos_values['PartitionDrive']['PartitionSize'] != 0: partition_size = qos_values['PartitionDrive']['PartitionSize'] else: partition_size = size part_per_drive = qos_values['DriveCapacity'] / partition_size if (direction == -1 and qos_values['PartitionDrive']['NumFreePartitions'] == 0): self._consume_full_drive(qos_values, direction) qos_values['PartitionDrive']['NumFreePartitions'] += part_per_drive qos_values['PartitionDrive']['NumFreePartitions'] += direction qos_values['PartitionDrive']['NumOccupiedPartitions'] -= direction if (direction == 1 and qos_values['PartitionDrive']['NumFreePartitions'] >= part_per_drive): self._consume_full_drive(qos_values, direction) qos_values['PartitionDrive']['NumFreePartitions'] -= part_per_drive def _consume_resource(self, qos_values, size, direction): if qos_values is None: LOG.debug(_("No capability selected for volume of size %(size)s"), locals()) return if size == 0: # full drive match qos_values['AvailableCapacity'] += (direction * qos_values['DriveCapacity']) self._consume_full_drive(qos_values, direction) else: qos_values['AvailableCapacity'] += direction * GB_TO_BYTES(size) self._consume_partition(qos_values, GB_TO_BYTES(size), direction) return def _print_capabilities_info(self): host_list = self._get_service_states().iteritems() for host, host_dict in host_list: for service_name, service_dict in host_dict.iteritems(): if service_name != "volume": continue LOG.info(_("Host %s:"), host) gos_info = service_dict.get('drive_qos_info', {}) for qosgrp, qos_values in gos_info.iteritems(): total = qos_values['TotalDrives'] used = qos_values['FullDrive']['NumOccupiedDrives'] free = qos_values['FullDrive']['NumFreeDrives'] avail = BYTES_TO_GB(qos_values['AvailableCapacity']) LOG.info(_("\tDrive %(qosgrp)-25s: total %(total)2s, " "used %(used)2s, free %(free)2s. Available " "capacity %(avail)-5s"), locals()) class VsaSchedulerLeastUsedHost(VsaScheduler): """ Implements VSA scheduler to select the host with least used capacity of particular type. """ def __init__(self, *args, **kwargs): super(VsaSchedulerLeastUsedHost, self).__init__(*args, **kwargs) def host_selection_algorithm(self, request_spec, all_hosts, selected_hosts, unique): size = request_spec['size'] drive_type = request_spec['drive_type'] best_host = None best_qoscap = None best_cap = None min_used = 0 for (host, capabilities) in all_hosts: has_enough_capacity = False used_capacity = 0 for qosgrp, qos_values in capabilities.iteritems(): used_capacity = (used_capacity + qos_values['TotalCapacity'] - qos_values['AvailableCapacity']) if self._qosgrp_match(drive_type, qos_values): # we found required qosgroup if size == 0: # full drive match if qos_values['FullDrive']['NumFreeDrives'] > 0: has_enough_capacity = True matched_qos = qos_values else: break else: _fp = qos_values['PartitionDrive']['NumFreePartitions'] _fd = qos_values['FullDrive']['NumFreeDrives'] if (qos_values['AvailableCapacity'] >= size and (_fp > 0 or _fd > 0)): has_enough_capacity = True matched_qos = qos_values else: break if (has_enough_capacity and self._allowed_to_use_host(host, selected_hosts, unique) and (best_host is None or used_capacity < min_used)): min_used = used_capacity best_host = host best_qoscap = matched_qos best_cap = capabilities if best_host: self._add_hostcap_to_list(selected_hosts, best_host, best_cap) min_used = BYTES_TO_GB(min_used) LOG.debug(_("\t LeastUsedHost: Best host: %(best_host)s. " "(used capacity %(min_used)s)"), locals()) return (best_host, best_qoscap) class VsaSchedulerMostAvailCapacity(VsaScheduler): """ Implements VSA scheduler to select the host with most available capacity of one particular type. """ def __init__(self, *args, **kwargs): super(VsaSchedulerMostAvailCapacity, self).__init__(*args, **kwargs) def host_selection_algorithm(self, request_spec, all_hosts, selected_hosts, unique): size = request_spec['size'] drive_type = request_spec['drive_type'] best_host = None best_qoscap = None best_cap = None max_avail = 0 for (host, capabilities) in all_hosts: for qosgrp, qos_values in capabilities.iteritems(): if self._qosgrp_match(drive_type, qos_values): # we found required qosgroup if size == 0: # full drive match available = qos_values['FullDrive']['NumFreeDrives'] else: available = qos_values['AvailableCapacity'] if (available > max_avail and self._allowed_to_use_host(host, selected_hosts, unique)): max_avail = available best_host = host best_qoscap = qos_values best_cap = capabilities break # go to the next host if best_host: self._add_hostcap_to_list(selected_hosts, best_host, best_cap) type_str = "drives" if size == 0 else "bytes" LOG.debug(_("\t MostAvailCap: Best host: %(best_host)s. " "(available %(max_avail)s %(type_str)s)"), locals()) return (best_host, best_qoscap)
ChineseTony/javaDataStruct
src/main/java/com/tom/array/MinMoves.java
<filename>src/main/java/com/tom/array/MinMoves.java package com.tom.array; import java.util.Arrays; /** * @author wangtao */ public class MinMoves { /** * * 输入: * [1,2,3] * 输出: * 3 * 解释: * 只需要3次操作(注意每次操作会增加两个元素的值): * [1,2,3] => [2,3,3] => [3,4,3] => [4,4,4] * https://leetcode-cn.com/problems/minimum-moves-to-equal-array-elements * @param nums * @return */ public static int minMoves(int[] nums) { if(nums == null || nums.length <= 0){ return 0; } int len = nums.length; int min = 0,max = len-1,count =0; while (true){ //求最大值和最小值下标 for (int i = 0; i < len; i++) { if (nums[max] < nums[i]) { max = i; } if (nums[min] > nums[i]) { min = i; } } if(nums[min] == nums[max]){ break; } //累加 for (int i = 0; i < len; i++) { if (i != max){ nums[i]++; } } count++; } return count; } public static int minMoves2(int[] nums) { if(nums == null || nums.length <= 0){ return 0; } int len = nums.length; int min = 0,max = len-1,count =0; while (true){ //求最大值和最小值下标 for (int i = 0; i < len; i++) { if (nums[max] < nums[i]) { max = i; } if (nums[min] > nums[i]) { min = i; } } int diff = nums[max] - nums[min]; if(diff == 0){ break; } //累加 for (int i = 0; i < len; i++) { if (i != max){ nums[i] += diff; } } count += diff; } return count; } public int minMoves3(int[] nums) { Arrays.sort(nums); int count = 0; for (int i = nums.length - 1; i > 0; i--) { count += nums[i] - nums[0]; } return count; } public static void main(String[] args) { int[] nums = new int[]{1,2147483647}; System.out.println(minMoves2(nums)); } }
filecoin-heiben/go-fil-markets
pieceio/mocks/SectorCalculator.go
// Code generated by mockery v1.0.0. DO NOT EDIT. package mocks import ( io "io" mock "github.com/stretchr/testify/mock" ) // SectorCalculator is an autogenerated mock type for the SectorCalculator type type SectorCalculator struct { mock.Mock } // GeneratePieceCommitment provides a mock function with given fields: piece, pieceSize func (_m *SectorCalculator) GeneratePieceCommitment(piece io.Reader, pieceSize uint64) ([]byte, error) { ret := _m.Called(piece, pieceSize) var r0 []byte if rf, ok := ret.Get(0).(func(io.Reader, uint64) []byte); ok { r0 = rf(piece, pieceSize) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]byte) } } var r1 error if rf, ok := ret.Get(1).(func(io.Reader, uint64) error); ok { r1 = rf(piece, pieceSize) } else { r1 = ret.Error(1) } return r0, r1 }
bobobot/SzakgyakChan
src/main/java/org/github/bobobot/repositories/IMetaRepository.java
<filename>src/main/java/org/github/bobobot/repositories/IMetaRepository.java package org.github.bobobot.repositories; import org.github.bobobot.entities.MetaInfo; import org.springframework.data.jpa.repository.JpaRepository; import java.util.Optional; public interface IMetaRepository extends JpaRepository<MetaInfo, Long> { Optional<MetaInfo> findByName(String name); }
yamkazu/grails-data-mapping
grails-datastore-dynamodb/src/main/groovy/org/grails/datastore/mapping/dynamodb/engine/DynamoDBHiLoIdGenerator.java
<reponame>yamkazu/grails-data-mapping package org.grails.datastore.mapping.dynamodb.engine; import java.util.HashMap; import java.util.Map; import org.grails.datastore.mapping.core.OptimisticLockingException; import org.grails.datastore.mapping.dynamodb.DynamoDBDatastore; import org.grails.datastore.mapping.dynamodb.util.DynamoDBConst; import org.grails.datastore.mapping.dynamodb.util.DynamoDBUtil; import org.grails.datastore.mapping.model.PersistentEntity; import org.springframework.dao.DataAccessException; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.dynamodb.model.AttributeValue; /** * Implementation of HiLo generator for DynamoDB. * All HiLows are stored in a single dedicated AWS table. Id of each record is the corresponding table name of the * {@link org.grails.datastore.mapping.model.PersistentEntity}. The only attributes are the nextHi long attribute and the version. * * @author <NAME> */ public class DynamoDBHiLoIdGenerator implements DynamoDBIdGenerator { /** * @param table table where the all the counters are stored * @param id name of the domain for some {@link org.grails.datastore.mapping.model.PersistentEntity} for which this instance will be keeping the counter * @param datastore */ public DynamoDBHiLoIdGenerator(String table, String id, int lowSize, DynamoDBDatastore datastore) { this.table = table; this.id = id; this.lowSize = lowSize; this.datastore = datastore; } public synchronized Object generateIdentifier(PersistentEntity persistentEntity, DynamoDBNativeItem nativeEntry) { if (!initialized) { initialize(persistentEntity); } if (current == max) { incrementDBAndRefresh(persistentEntity); reset(); } long result = current; current = current + 1; return result; } private void reset() { current = currentHi * lowSize; max = current + lowSize; } private void incrementDBAndRefresh(PersistentEntity persistentEntity) { boolean done = false; int attempt = 0; while (!done) { attempt++; if (attempt > 10000) {//todo - make configurable at some point throw new IllegalArgumentException("exceeded number of attempts to load new Hi value value from db"); } try { Map<String,AttributeValue> item = datastore.getDynamoDBTemplate().getConsistent(table, DynamoDBUtil.createIdKey(id)); if (item == null) {//no record exist yet currentHi = 1; currentVersion = null; } else { currentHi = Long.parseLong(DynamoDBUtil.getAttributeValueNumeric(item, DynamoDBConst.ID_GENERATOR_HI_LO_ATTRIBUTE_NAME)); currentVersion = Long.parseLong(DynamoDBUtil.getAttributeValueNumeric(item, "version")); } long nextHi = currentHi + 1; long nextVersion = currentVersion == null ? (long)1: currentVersion+1; createOrUpdate(nextHi, nextVersion, currentVersion, persistentEntity); currentVersion = nextVersion; done = true; } catch (OptimisticLockingException e) { //collition, it is expected to happen, we will try again } } } /** * Create table if needed. */ private void initialize(PersistentEntity persistentEntity) { try { /*Map<String,AttributeValue> item =*/ datastore.getDynamoDBTemplate().getConsistent(table, DynamoDBUtil.createIdKey(id)); } catch (DataAccessException e) { throw new RuntimeException(e); } catch (Exception e) { //check if domain does not exist at all AmazonServiceException awsE = null; if (e instanceof AmazonServiceException) { awsE = (AmazonServiceException) e; } else if (e.getCause() instanceof AmazonServiceException) { awsE = (AmazonServiceException) e.getCause(); } if (awsE != null && DynamoDBUtil.AWS_ERR_CODE_RESOURCE_NOT_FOUND.equals(awsE.getErrorCode())) { //table does not exist, must create it createHiLoTable(datastore, table); } else { throw new RuntimeException(e); } } current = 0; max = 0; initialized = true; } public static void createHiLoTable(DynamoDBDatastore datastore, String tableName) { datastore.getDynamoDBTemplate().createTable( tableName, DynamoDBUtil.createIdKeySchema(), DynamoDBUtil.createDefaultProvisionedThroughput(datastore)); } private void createOrUpdate(long nextHi, long newVersion, Long expectedVersion, PersistentEntity persistentEntity) { Map<String, AttributeValue> item = new HashMap<String, AttributeValue>(); item.put(DynamoDBConst.ID_GENERATOR_HI_LO_ATTRIBUTE_NAME, new AttributeValue().withN(String.valueOf(nextHi))); item.put("version", new AttributeValue().withN(String.valueOf(newVersion))); DynamoDBUtil.addId(item, id); if (expectedVersion == null) { //since there is no record yet we can't assert on version datastore.getDynamoDBTemplate().putItem(table, item); } else { datastore.getDynamoDBTemplate().putItemVersioned(table, DynamoDBUtil.createIdKey(id), item, String.valueOf(expectedVersion), persistentEntity); } } private String id; private long current; private int lowSize; private long max; private boolean initialized; private long currentHi; private Long currentVersion; private DynamoDBDatastore datastore; private String table; }
FyisFe/UCB-CS61A-20Fall
lab/lab11/repl.py
<reponame>FyisFe/UCB-CS61A-20Fall<gh_stars>1-10 try: import readline # history and arrow keys for CLI except ImportError: pass # but not everyone has it import sys from reader import read from expr import global_env # program start if __name__ == "__main__": """Run a read-eval-print loop. `python3 repl.py` to start an interactive REPL. `python3 repl.py --read` to interactively read expressions and print their Python representations. """ read_only = len(sys.argv) == 2 and sys.argv[1] == "--read" while True: try: # `input` prints the prompt, waits, and returns the user's input. user_input = input("> ") expr = read(user_input) if expr is not None: if read_only: print(repr(expr)) else: print(expr.eval(global_env)) except ( SyntaxError, NameError, TypeError, OverflowError, ZeroDivisionError, AttributeError, ) as err: print(type(err).__name__ + ":", err) except (KeyboardInterrupt, EOFError): # Ctrl-C, Ctrl-D print() # blank line break # exit while loop (and end program)
jorgeejgonzalez/fermat
P2P/library/api/fermat-p2p-api/src/main/java/com/bitdubai/fermat_p2p_api/layer/all_definition/communication/events/FailureComponentRegistrationNotificationEvent.java
/* * @#FailureComponentRegistrationNotificationEvent.java - 2015 * Copyright bitDubai.com., All rights reserved.  * You may not modify, use, reproduce or distribute this software. * BITDUBAI/CONFIDENTIAL */ package com.bitdubai.fermat_p2p_api.layer.all_definition.communication.events; import com.bitdubai.fermat_api.layer.all_definition.components.interfaces.PlatformComponentProfile; import com.bitdubai.fermat_api.layer.all_definition.network_service.enums.NetworkServiceType; import com.bitdubai.fermat_p2p_api.layer.all_definition.communication.enums.P2pEventType; /** * The Class <code>com.bitdubai.fermat_p2p_api.layer.all_definition.communication.events.FailureComponentRegistrationNotificationEvent</code> * <p/> * Created by <NAME> - (<EMAIL>) on 09/10/15. * * @version 1.0 * @since Java JDK 1.7 */ public class FailureComponentRegistrationNotificationEvent extends AbstractP2PFermatEvent { /** * Represent the remoteComponent */ private NetworkServiceType networkServiceApplicant; /** * Represent the remoteParticipant */ private PlatformComponentProfile platformComponentProfile; /** * Constructor with parameter * @param p2pEventType */ public FailureComponentRegistrationNotificationEvent(P2pEventType p2pEventType) { super(p2pEventType); } /** * Get the NetworkServiceApplicant * * @return NetworkServiceType */ public NetworkServiceType getNetworkServiceApplicant() { return networkServiceApplicant; } /** * Set the NetworkServiceApplicant * @param networkServiceApplicant */ public void setNetworkServiceApplicant(NetworkServiceType networkServiceApplicant) { this.networkServiceApplicant = networkServiceApplicant; } /** * Get the PlatformComponentProfile * @return PlatformComponentProfile */ public PlatformComponentProfile getPlatformComponentProfile() { return platformComponentProfile; } /** * Set the PlatformComponentProfile * @param platformComponentProfile */ public void setPlatformComponentProfile(PlatformComponentProfile platformComponentProfile) { this.platformComponentProfile = platformComponentProfile; } }
androj07/kata
src/main/java/com/androj/kata/product/Product.java
<gh_stars>0 package com.androj.kata.product; /** * This problem was asked by Uber. * * Given an array of integers, return a new array such that each element at index i * of the new array is the product of all the numbers in the original array except the one at i. * * For example, if our input was [1, 2, 3, 4, 5], the expected output would be [120, 60, 40, 30, 24]. * If our input was [3, 2, 1], the expected output would be [2, 3, 6]. Don't use division and find a solution with time complexity O(n). */ public class Product { public int[] productAllExceptCurrentElement(int[] in) { int[] out = new int[in.length]; int left = in[0]; int right = in[in.length - 1]; for (int i = 1, j = in.length - 2; i < in.length && j >= 0; i++, j--) { if (i <= j || i == out.length - 1) { out[i] = left; } else { out[i] = out[i] * left; } if (j > i || j == 0) { out[j] = right; } else { out[j] = out[j] * right; } left = left * in[i]; right = right * in[j]; } return out; } }
cylemonVip/NovelAPP
js/actions/hotSimilar.js
/** * @author Semper */ import {RECEIVE_HOT_SIMILAR, REQUEST_HOT_SIMILAR} from "../constants/ActionTypes"; export function requestHotSimilar(hotSimilarUrl) { return { type: REQUEST_HOT_SIMILAR, hotSimilarUrl }; } export function receiveHotSimilar(hotSimilarData) { return { type: RECEIVE_HOT_SIMILAR, hotSimilarData, }; }
dgreid/platform2
authpolicy/policy/device_policy_encoder_test.cc
// Copyright 2017 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <unordered_set> #include <base/strings/string_util.h> #include <components/policy/core/common/registry_dict.h> #include <gmock/gmock.h> #include <gtest/gtest.h> #include "authpolicy/policy/device_policy_encoder.h" #include "authpolicy/policy/policy_encoder_test_base.h" #include "bindings/chrome_device_policy.pb.h" #include "bindings/policy_constants.h" namespace em = enterprise_management; namespace policy { namespace { // Converts a repeated string field to a vector. std::vector<std::string> ToVector( const google::protobuf::RepeatedPtrField<std::string>& repeated_field) { return std::vector<std::string>(repeated_field.begin(), repeated_field.end()); } // Converts a repeated int field to a vector. std::vector<int> ToVector( const google::protobuf::RepeatedField<int>& repeated_field) { return std::vector<int>(repeated_field.begin(), repeated_field.end()); } } // namespace // Checks whether all device policies are properly encoded from RegistryDict // into em::ChromeDeviceSettingsProto. Makes sure no device policy is missing. class DevicePolicyEncoderTest : public PolicyEncoderTestBase<em::ChromeDeviceSettingsProto> { public: DevicePolicyEncoderTest() {} DevicePolicyEncoderTest(const DevicePolicyEncoderTest&) = delete; DevicePolicyEncoderTest& operator=(const DevicePolicyEncoderTest&) = delete; ~DevicePolicyEncoderTest() override {} protected: void EncodeDict(em::ChromeDeviceSettingsProto* policy, const RegistryDict* dict) override { DevicePolicyEncoder encoder(dict, POLICY_LEVEL_MANDATORY); *policy = em::ChromeDeviceSettingsProto(); encoder.EncodePolicy(policy); } void MarkHandled(const char* key) override { handled_policy_keys_.insert(key); } // Returns a vector of all policy keys that were not encoded. std::vector<std::string> GetUnhandledPolicyKeys() const { std::vector<std::string> unhandled_policy_keys; for (const char** key = kDevicePolicyKeys; *key; ++key) { if (handled_policy_keys_.find(*key) == handled_policy_keys_.end()) unhandled_policy_keys.push_back(*key); } return unhandled_policy_keys; } private: // Keeps track of handled device policies. Used to detect device policies that // device_policy_encoder forgets to encode. std::unordered_set<std::string> handled_policy_keys_; }; TEST_F(DevicePolicyEncoderTest, TestEncoding) { // Note that kStringList can't be constexpr, so we put them all here. constexpr bool kBool = true; constexpr int kInt = 123; constexpr int kScreenMagnifierTypeInRangeInt = 1; constexpr int kScreenMagnifierTypeOutOfRangeInt = 10; constexpr int kDeviceChromeVariationsInRangeInt = 1; constexpr int kDeviceChromeVariationsOutOfRangeInt = 12; constexpr int kDeviceCrostiniArcAdbSideloadingAllowedOutOfRangeInt = 13; const std::string kString = "val1"; const std::vector<std::string> kStringList = {"val1", "val2", "val3"}; em::ChromeDeviceSettingsProto policy; // // Login policies. // EncodeBoolean(&policy, key::kDeviceGuestModeEnabled, kBool); EXPECT_EQ(kBool, policy.guest_mode_enabled().guest_mode_enabled()); EncodeBoolean(&policy, key::kDeviceRebootOnShutdown, kBool); EXPECT_EQ(kBool, policy.reboot_on_shutdown().reboot_on_shutdown()); EncodeBoolean(&policy, key::kDeviceShowUserNamesOnSignin, kBool); EXPECT_EQ(kBool, policy.show_user_names().show_user_names()); EncodeBoolean(&policy, key::kDeviceAllowNewUsers, kBool); EXPECT_EQ(kBool, policy.allow_new_users().allow_new_users()); EncodeStringList(&policy, key::kDeviceUserWhitelist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.user_whitelist().user_whitelist())); // Old policy copied to new name. EXPECT_EQ(kStringList, ToVector(policy.user_allowlist().user_allowlist())); EncodeStringList(&policy, key::kDeviceUserAllowlist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.user_allowlist().user_allowlist())); EncodeBoolean(&policy, key::kDeviceEphemeralUsersEnabled, kBool); EXPECT_EQ(kBool, policy.ephemeral_users_enabled().ephemeral_users_enabled()); EncodeBoolean(&policy, key::kDeviceAllowBluetooth, kBool); EXPECT_EQ(kBool, policy.allow_bluetooth().allow_bluetooth()); EncodeStringList(&policy, key::kDeviceLoginScreenExtensions, kStringList); EXPECT_EQ(kStringList, ToVector(policy.device_login_screen_extensions() .device_login_screen_extensions())); EncodeString(&policy, key::kDeviceLoginScreenDomainAutoComplete, kString); EXPECT_EQ(kString, policy.login_screen_domain_auto_complete() .login_screen_domain_auto_complete()); EncodeStringList(&policy, key::kDeviceLoginScreenLocales, kStringList); EXPECT_EQ(kStringList, ToVector(policy.login_screen_locales().login_screen_locales())); EncodeStringList(&policy, key::kDeviceLoginScreenInputMethods, kStringList); EXPECT_EQ( kStringList, ToVector( policy.login_screen_input_methods().login_screen_input_methods())); EncodeStringList(&policy, key::kDeviceLoginScreenAutoSelectCertificateForUrls, kStringList); EXPECT_EQ( kStringList, ToVector(policy.device_login_screen_auto_select_certificate_for_urls() .login_screen_auto_select_certificate_rules())); EncodeInteger(&policy, key::kDeviceRebootOnUserSignout, em::DeviceRebootOnUserSignoutProto_RebootOnSignoutMode_ALWAYS); EXPECT_EQ(em::DeviceRebootOnUserSignoutProto_RebootOnSignoutMode_ALWAYS, policy.device_reboot_on_user_signout().reboot_on_signout_mode()); EncodeBoolean(&policy, key::kDevicePowerwashAllowed, kBool); EXPECT_EQ(kBool, policy.device_powerwash_allowed().device_powerwash_allowed()); EncodeBoolean(&policy, key::kManagedGuestSessionPrivacyWarningsEnabled, kBool); EXPECT_EQ(kBool, policy.managed_guest_session_privacy_warnings().enabled()); // // Network policies. // EncodeBoolean(&policy, key::kDeviceDataRoamingEnabled, kBool); EXPECT_EQ(kBool, policy.data_roaming_enabled().data_roaming_enabled()); EncodeBoolean(&policy, key::kDeviceWiFiFastTransitionEnabled, kBool); EXPECT_EQ(kBool, policy.device_wifi_fast_transition_enabled() .device_wifi_fast_transition_enabled()); EncodeString(&policy, key::kDeviceOpenNetworkConfiguration, kString); EXPECT_EQ(kString, policy.open_network_configuration().open_network_configuration()); EncodeString(&policy, key::kDeviceHostnameTemplate, kString); EXPECT_EQ(kString, policy.network_hostname().device_hostname_template()); // The encoder of this policy converts ints to // DeviceKerberosEncryptionTypes::Types enums. EncodeInteger(&policy, key::kDeviceKerberosEncryptionTypes, em::DeviceKerberosEncryptionTypesProto::ENC_TYPES_ALL); EXPECT_EQ(em::DeviceKerberosEncryptionTypesProto::ENC_TYPES_ALL, policy.device_kerberos_encryption_types().types()); // // Auto update policies. // EncodeString(&policy, key::kChromeOsReleaseChannel, kString); EXPECT_EQ(kString, policy.release_channel().release_channel()); EncodeBoolean(&policy, key::kChromeOsReleaseChannelDelegated, kBool); EXPECT_EQ(kBool, policy.release_channel().release_channel_delegated()); EncodeString(&policy, key::kDeviceReleaseLtsTag, kString); EXPECT_EQ(kString, policy.release_channel().release_lts_tag()); EncodeBoolean(&policy, key::kDeviceAutoUpdateDisabled, kBool); EXPECT_EQ(kBool, policy.auto_update_settings().update_disabled()); EncodeString(&policy, key::kDeviceTargetVersionPrefix, kString); EXPECT_EQ(kString, policy.auto_update_settings().target_version_prefix()); EncodeString(&policy, key::kDeviceQuickFixBuildToken, kString); EXPECT_EQ(kString, policy.auto_update_settings().device_quick_fix_build_token()); // The encoder of this policy converts ints to RollbackToTargetVersion enums. EncodeInteger(&policy, key::kDeviceRollbackToTargetVersion, em::AutoUpdateSettingsProto::ROLLBACK_AND_POWERWASH); EXPECT_EQ(em::AutoUpdateSettingsProto::ROLLBACK_AND_POWERWASH, policy.auto_update_settings().rollback_to_target_version()); EncodeInteger(&policy, key::kDeviceRollbackAllowedMilestones, kInt); EXPECT_EQ(kInt, policy.auto_update_settings().rollback_allowed_milestones()); EncodeInteger(&policy, key::kDeviceUpdateScatterFactor, kInt); EXPECT_EQ(kInt, policy.auto_update_settings().scatter_factor_in_seconds()); // The encoder of this policy converts connection type strings to enums. std::vector<std::string> str_types; std::vector<int> enum_types; for (size_t n = 0; n < kConnectionTypesSize; ++n) { str_types.push_back(kConnectionTypes[n].first); enum_types.push_back(kConnectionTypes[n].second); } EncodeStringList(&policy, key::kDeviceUpdateAllowedConnectionTypes, str_types); EXPECT_EQ(enum_types, ToVector(policy.auto_update_settings().allowed_connection_types())); EncodeBoolean(&policy, key::kDeviceUpdateHttpDownloadsEnabled, kBool); EXPECT_EQ(kBool, policy.auto_update_settings().http_downloads_enabled()); EncodeBoolean(&policy, key::kRebootAfterUpdate, kBool); EXPECT_EQ(kBool, policy.auto_update_settings().reboot_after_update()); EncodeBoolean(&policy, key::kDeviceAutoUpdateP2PEnabled, kBool); EXPECT_EQ(kBool, policy.auto_update_settings().p2p_enabled()); EncodeString(&policy, key::kDeviceAutoUpdateTimeRestrictions, kString); EXPECT_EQ(kString, policy.auto_update_settings().disallowed_time_intervals()); EncodeString(&policy, key::kDeviceUpdateStagingSchedule, kString); EXPECT_EQ(kString, policy.auto_update_settings().staging_schedule()); EncodeString(&policy, key::kDeviceLoginScreenWebUsbAllowDevicesForUrls, kString); EXPECT_EQ(kString, policy.device_login_screen_webusb_allow_devices_for_urls() .device_login_screen_webusb_allow_devices_for_urls()); EncodeInteger(&policy, key::kDeviceChannelDowngradeBehavior, em::AutoUpdateSettingsProto::ROLLBACK); EXPECT_EQ(em::AutoUpdateSettingsProto::ROLLBACK, policy.auto_update_settings().channel_downgrade_behavior()); // // Accessibility policies. // EncodeBoolean(&policy, key::kDeviceLoginScreenDefaultLargeCursorEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings() .login_screen_default_large_cursor_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenLargeCursorEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_large_cursor_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenAutoclickEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings().login_screen_autoclick_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenCaretHighlightEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_caret_highlight_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenCursorHighlightEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_cursor_highlight_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenDictationEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings().login_screen_dictation_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenHighContrastEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_high_contrast_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenMonoAudioEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings().login_screen_mono_audio_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenSelectToSpeakEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_select_to_speak_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenSpokenFeedbackEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_spoken_feedback_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenStickyKeysEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings().login_screen_sticky_keys_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenVirtualKeyboardEnabled, kBool); EXPECT_EQ( kBool, policy.accessibility_settings().login_screen_virtual_keyboard_enabled()); EncodeInteger(&policy, key::kDeviceLoginScreenScreenMagnifierType, kScreenMagnifierTypeOutOfRangeInt); EXPECT_FALSE( policy.accessibility_settings().has_login_screen_screen_magnifier_type()); EncodeInteger(&policy, key::kDeviceLoginScreenScreenMagnifierType, kScreenMagnifierTypeInRangeInt); EXPECT_EQ( kScreenMagnifierTypeInRangeInt, policy.accessibility_settings().login_screen_screen_magnifier_type()); EncodeBoolean(&policy, key::kDeviceLoginScreenDefaultSpokenFeedbackEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings() .login_screen_default_spoken_feedback_enabled()); EncodeBoolean(&policy, key::kDeviceLoginScreenDefaultHighContrastEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings() .login_screen_default_high_contrast_enabled()); EncodeInteger(&policy, key::kDeviceChromeVariations, kDeviceChromeVariationsOutOfRangeInt); EXPECT_FALSE(policy.has_device_chrome_variations_type()); EncodeInteger(&policy, key::kDeviceChromeVariations, kDeviceChromeVariationsInRangeInt); EXPECT_EQ(kDeviceChromeVariationsInRangeInt, policy.device_chrome_variations_type().value()); EncodeBoolean(&policy, key::kDeviceLoginScreenPrivacyScreenEnabled, kBool); EXPECT_EQ(kBool, policy.device_login_screen_privacy_screen_enabled().enabled()); EncodeBoolean(&policy, key::kDeviceShowNumericKeyboardForPassword, kBool); EXPECT_EQ(kBool, policy.device_show_numeric_keyboard_for_password().value()); EncodeStringList(&policy, key::kDeviceWebBasedAttestationAllowedUrls, kStringList); EXPECT_EQ(kStringList, ToVector(policy.device_web_based_attestation_allowed_urls() .value() .entries())); // The encoder of this policy converts ints to ScreenMagnifierType enums. EncodeInteger(&policy, key::kDeviceLoginScreenDefaultScreenMagnifierType, em::AccessibilitySettingsProto::SCREEN_MAGNIFIER_TYPE_FULL); EXPECT_EQ(em::AccessibilitySettingsProto::SCREEN_MAGNIFIER_TYPE_FULL, policy.accessibility_settings() .login_screen_default_screen_magnifier_type()); EncodeBoolean(&policy, key::kDeviceLoginScreenDefaultVirtualKeyboardEnabled, kBool); EXPECT_EQ(kBool, policy.accessibility_settings() .login_screen_default_virtual_keyboard_enabled()); // // Generic policies. // EncodeInteger(&policy, key::kDevicePolicyRefreshRate, kInt); EXPECT_EQ(kInt, policy.device_policy_refresh_rate().device_policy_refresh_rate()); EncodeBoolean(&policy, key::kDeviceMetricsReportingEnabled, kBool); EXPECT_EQ(kBool, policy.metrics_enabled().metrics_enabled()); EncodeString(&policy, key::kSystemTimezone, kString); EXPECT_EQ(kString, policy.system_timezone().timezone()); EncodeString(&policy, key::kRequiredClientCertificateForDevice, kString); EXPECT_EQ(policy.required_client_certificate_for_device() .required_client_certificate_for_device(), kString); EncodeString(&policy, key::kSystemProxySettings, kString); EXPECT_FALSE(policy.has_system_proxy_settings()); // The encoder of this policy converts ints to // DeviceCrostiniArcAdbSideloadingAllowedProto::AllowanceMode enums. EncodeInteger(&policy, key::kDeviceCrostiniArcAdbSideloadingAllowed, em::DeviceCrostiniArcAdbSideloadingAllowedProto::DISALLOW); EXPECT_EQ(em::DeviceCrostiniArcAdbSideloadingAllowedProto::DISALLOW, policy.device_crostini_arc_adb_sideloading_allowed().mode()); EncodeInteger(&policy, key::kDeviceCrostiniArcAdbSideloadingAllowed, kDeviceCrostiniArcAdbSideloadingAllowedOutOfRangeInt); EXPECT_FALSE(policy.has_device_crostini_arc_adb_sideloading_allowed()); // TODO(crbug.com/1092593) The following policy is going to be supported for // chrome_os, but its not now. However, it needs to be encoded temporarily to // pass the tests. MarkHandled(key::kDeviceSamlLoginAuthenticationType); EncodeString(&policy, key::kSystemProxySettings, R"!!!( { "system_proxy_username": "test_user", "system_services_password": "<PASSWORD>", "system_proxy_enabled": true, })!!!"); EXPECT_TRUE(policy.has_system_proxy_settings()); EncodeString(&policy, key::kDeviceMinimumVersion, kString); EXPECT_EQ(policy.device_minimum_version().value(), kString); EncodeString(&policy, key::kDeviceMinimumVersionAueMessage, kString); EXPECT_EQ(policy.device_minimum_version_aue_message().value(), kString); // The encoder of this policy converts ints to AutomaticTimezoneDetectionType // enums. EncodeInteger(&policy, key::kSystemTimezoneAutomaticDetection, em::SystemTimezoneProto::IP_ONLY); EXPECT_EQ(em::SystemTimezoneProto::IP_ONLY, policy.system_timezone().timezone_detection_type()); EncodeBoolean(&policy, key::kSystemUse24HourClock, kBool); EXPECT_EQ(kBool, policy.use_24hour_clock().use_24hour_clock()); EncodeBoolean(&policy, key::kDeviceAllowRedeemChromeOsRegistrationOffers, kBool); EXPECT_EQ(kBool, policy.allow_redeem_offers().allow_redeem_offers()); EncodeString(&policy, key::kDeviceVariationsRestrictParameter, kString); EXPECT_EQ(kString, policy.variations_parameter().parameter()); EncodeString(&policy, key::kDeviceLoginScreenPowerManagement, kString); EXPECT_EQ( kString, policy.login_screen_power_management().login_screen_power_management()); // The encoder of this policy converts ints to Rotation enums. EncodeInteger(&policy, key::kDisplayRotationDefault, em::DisplayRotationDefaultProto::ROTATE_180); EXPECT_EQ(em::DisplayRotationDefaultProto::ROTATE_180, policy.display_rotation_default().display_rotation_default()); EncodeString(&policy, key::kDeviceDisplayResolution, kString); EXPECT_EQ(kString, policy.device_display_resolution().device_display_resolution()); // The encoder of this policy converts a JSON string to separate values. EncodeStringList(&policy, key::kUsbDetachableWhitelist, {"{\"vendor_id\":123, \"product_id\":234}", "{\"vendor_id\":345, \"product_id\":456}"}); const auto& whitelist_proto = policy.usb_detachable_whitelist(); const auto& copied_allowlist_proto = policy.usb_detachable_allowlist(); EXPECT_EQ(123, whitelist_proto.id().Get(0).vendor_id()); EXPECT_EQ(234, whitelist_proto.id().Get(0).product_id()); EXPECT_EQ(345, whitelist_proto.id().Get(1).vendor_id()); EXPECT_EQ(456, whitelist_proto.id().Get(1).product_id()); // Whitelist values should have been copied to the allowlist proto EXPECT_EQ(123, copied_allowlist_proto.id().Get(0).vendor_id()); EXPECT_EQ(234, copied_allowlist_proto.id().Get(0).product_id()); EXPECT_EQ(345, copied_allowlist_proto.id().Get(1).vendor_id()); EXPECT_EQ(456, copied_allowlist_proto.id().Get(1).product_id()); EncodeStringList(&policy, key::kUsbDetachableAllowlist, {"{\"vendor_id\":1234, \"product_id\":2345}", "{\"vendor_id\":3456, \"product_id\":4567}"}); const auto& allowlist_proto = policy.usb_detachable_allowlist(); EXPECT_EQ(1234, allowlist_proto.id().Get(0).vendor_id()); EXPECT_EQ(2345, allowlist_proto.id().Get(0).product_id()); EXPECT_EQ(3456, allowlist_proto.id().Get(1).vendor_id()); EXPECT_EQ(4567, allowlist_proto.id().Get(1).product_id()); EXPECT_FALSE(policy.has_usb_detachable_whitelist()); EncodeBoolean(&policy, key::kDeviceQuirksDownloadEnabled, kBool); EXPECT_EQ(kBool, policy.quirks_download_enabled().quirks_download_enabled()); EncodeString(&policy, key::kDeviceWallpaperImage, kString); EXPECT_EQ(kString, policy.device_wallpaper_image().device_wallpaper_image()); EncodeString(&policy, key::kDeviceOffHours, R"!!!( { "intervals": [ { "start": { "day_of_week": "MONDAY", "time": 12840000 }, "end": { "day_of_week": "MONDAY", "time": 21720000 } }, { "start": { "day_of_week": "FRIDAY", "time": 38640000 }, "end": { "day_of_week": "FRIDAY", "time": 57600000 } } ], "timezone": "GMT", "ignored_policy_proto_tags": [3, 8] })!!!"); const auto& device_off_hours_proto = policy.device_off_hours(); EXPECT_EQ(2, device_off_hours_proto.intervals_size()); { const auto& interval1 = device_off_hours_proto.intervals().Get(0); const auto& interval2 = device_off_hours_proto.intervals().Get(1); EXPECT_EQ(em::WeeklyTimeProto::MONDAY, interval1.start().day_of_week()); EXPECT_EQ(em::WeeklyTimeProto::MONDAY, interval1.end().day_of_week()); EXPECT_EQ(12840000, interval1.start().time()); EXPECT_EQ(21720000, interval1.end().time()); EXPECT_EQ(em::WeeklyTimeProto::FRIDAY, interval2.start().day_of_week()); EXPECT_EQ(em::WeeklyTimeProto::FRIDAY, interval2.end().day_of_week()); EXPECT_EQ(38640000, interval2.start().time()); EXPECT_EQ(57600000, interval2.end().time()); } EXPECT_EQ("GMT", device_off_hours_proto.timezone()); EXPECT_EQ(2, device_off_hours_proto.ignored_policy_proto_tags_size()); EXPECT_EQ(3, device_off_hours_proto.ignored_policy_proto_tags().Get(0)); EXPECT_EQ(8, device_off_hours_proto.ignored_policy_proto_tags().Get(1)); EncodeString(&policy, key::kCastReceiverName, kString); EXPECT_EQ(kString, policy.cast_receiver_name().name()); EncodeString(&policy, key::kDevicePrinters, kString); EXPECT_EQ(kString, policy.device_printers().external_policy()); // Old policy copied to new name. EncodeString(&policy, key::kDeviceNativePrinters, kString); EXPECT_EQ(kString, policy.native_device_printers().external_policy()); // Old policy copied to new name. EXPECT_EQ(kString, policy.device_printers().external_policy()); // The encoder of this policy converts ints to AccessMode enums. EncodeInteger(&policy, key::kDevicePrintersAccessMode, em::DevicePrintersAccessModeProto::ACCESS_MODE_ALLOWLIST); EXPECT_EQ(em::DevicePrintersAccessModeProto::ACCESS_MODE_ALLOWLIST, policy.device_printers_access_mode().access_mode()); // Old policy copied to new name. EncodeInteger(&policy, key::kDeviceNativePrintersAccessMode, em::DeviceNativePrintersAccessModeProto::ACCESS_MODE_WHITELIST); EXPECT_EQ(em::DeviceNativePrintersAccessModeProto::ACCESS_MODE_WHITELIST, policy.native_device_printers_access_mode().access_mode()); // Old policy copied to new name. EXPECT_EQ(em::DevicePrintersAccessModeProto::ACCESS_MODE_ALLOWLIST, policy.device_printers_access_mode().access_mode()); EncodeStringList(&policy, key::kDevicePrintersAllowlist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.device_printers_allowlist().allowlist())); EncodeStringList(&policy, key::kDeviceNativePrintersWhitelist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.native_device_printers_whitelist().whitelist())); // Old policy copied to new name. EXPECT_EQ(kStringList, ToVector(policy.device_printers_allowlist().allowlist())); EncodeStringList(&policy, key::kDevicePrintersBlocklist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.device_printers_blocklist().blocklist())); EncodeStringList(&policy, key::kDeviceNativePrintersBlacklist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.native_device_printers_blacklist().blacklist())); // Old policy copied to new name. EXPECT_EQ(kStringList, ToVector(policy.device_printers_blocklist().blocklist())); EncodeString(&policy, key::kDeviceExternalPrintServers, kString); EXPECT_EQ(kString, policy.external_print_servers().external_policy()); EncodeStringList(&policy, key::kDeviceExternalPrintServersAllowlist, kStringList); EXPECT_EQ(kStringList, ToVector(policy.external_print_servers_allowlist().allowlist())); EncodeString(&policy, key::kTPMFirmwareUpdateSettings, "{\"allow-user-initiated-powerwash\":true," " \"allow-user-initiated-preserve-device-state\":true}"); EXPECT_EQ( true, policy.tpm_firmware_update_settings().allow_user_initiated_powerwash()); EXPECT_EQ(true, policy.tpm_firmware_update_settings() .allow_user_initiated_preserve_device_state()); EncodeBoolean(&policy, key::kUnaffiliatedArcAllowed, kBool); EXPECT_EQ(kBool, policy.unaffiliated_arc_allowed().unaffiliated_arc_allowed()); EncodeBoolean(&policy, key::kPluginVmAllowed, kBool); EXPECT_EQ(kBool, policy.plugin_vm_allowed().plugin_vm_allowed()); EncodeString(&policy, key::kPluginVmLicenseKey, kString); EXPECT_EQ(kString, policy.plugin_vm_license_key().plugin_vm_license_key()); EncodeBoolean(&policy, key::kDeviceWilcoDtcAllowed, kBool); EXPECT_EQ(kBool, policy.device_wilco_dtc_allowed().device_wilco_dtc_allowed()); EncodeBoolean(&policy, key::kDeviceBootOnAcEnabled, kBool); EXPECT_EQ(kBool, policy.device_boot_on_ac().enabled()); EncodeInteger(&policy, key::kDevicePowerPeakShiftBatteryThreshold, kInt); EXPECT_EQ(kInt, policy.device_power_peak_shift().battery_threshold()); EncodeBoolean(&policy, key::kDevicePowerPeakShiftEnabled, kBool); EXPECT_EQ(kBool, policy.device_power_peak_shift().enabled()); EncodeString(&policy, key::kDevicePowerPeakShiftDayConfig, kString); EXPECT_EQ(kString, policy.device_power_peak_shift().day_configs()); EncodeBoolean(&policy, key::kDeviceWiFiAllowed, kBool); EXPECT_EQ(kBool, policy.device_wifi_allowed().device_wifi_allowed()); EncodeString(&policy, key::kDeviceWilcoDtcConfiguration, kString); EXPECT_EQ( kString, policy.device_wilco_dtc_configuration().device_wilco_dtc_configuration()); // The encoder of this policy converts ints to // DeviceDockMacAddressSourceProto::Source enums. EncodeInteger(&policy, key::kDeviceDockMacAddressSource, em::DeviceDockMacAddressSourceProto::DOCK_NIC_MAC_ADDRESS); EXPECT_EQ(em::DeviceDockMacAddressSourceProto::DOCK_NIC_MAC_ADDRESS, policy.device_dock_mac_address_source().source()); EncodeBoolean(&policy, key::kDeviceAdvancedBatteryChargeModeEnabled, kBool); EXPECT_EQ(kBool, policy.device_advanced_battery_charge_mode().enabled()); EncodeString(&policy, key::kDeviceAdvancedBatteryChargeModeDayConfig, kString); EXPECT_EQ(kString, policy.device_advanced_battery_charge_mode().day_configs()); // The encoder of this policy converts ints to // DeviceBatteryChargeMode::BatteryChardeMode enums. EncodeInteger(&policy, key::kDeviceBatteryChargeMode, em::DeviceBatteryChargeModeProto::CUSTOM); EXPECT_EQ(em::DeviceBatteryChargeModeProto::CUSTOM, policy.device_battery_charge_mode().battery_charge_mode()); EncodeInteger(&policy, key::kDeviceBatteryChargeCustomStartCharging, kInt); EXPECT_EQ(kInt, policy.device_battery_charge_mode().custom_charge_start()); EncodeInteger(&policy, key::kDeviceBatteryChargeCustomStopCharging, kInt); EXPECT_EQ(kInt, policy.device_battery_charge_mode().custom_charge_stop()); EncodeBoolean(&policy, key::kDeviceUsbPowerShareEnabled, kBool); EXPECT_EQ(kBool, policy.device_usb_power_share().enabled()); // The encoder of this policy converts ints to // DeviceUserPolicyLoopbackProcessingModeProto::Mode enums. EncodeInteger( &policy, key::kDeviceUserPolicyLoopbackProcessingMode, em::DeviceUserPolicyLoopbackProcessingModeProto::USER_POLICY_MODE_MERGE); EXPECT_EQ( em::DeviceUserPolicyLoopbackProcessingModeProto::USER_POLICY_MODE_MERGE, policy.device_user_policy_loopback_processing_mode().mode()); EncodeBoolean(&policy, key::kVirtualMachinesAllowed, kBool); EXPECT_EQ(kBool, policy.virtual_machines_allowed().virtual_machines_allowed()); EncodeInteger(&policy, key::kDeviceMachinePasswordChangeRate, kInt); EXPECT_EQ(kInt, policy.device_machine_password_change_rate().rate_days()); EncodeInteger(&policy, key::kDeviceGpoCacheLifetime, kInt); EXPECT_EQ(kInt, policy.device_gpo_cache_lifetime().lifetime_hours()); EncodeInteger(&policy, key::kDeviceAuthDataCacheLifetime, kInt); EXPECT_EQ(kInt, policy.device_auth_data_cache_lifetime().lifetime_hours()); EncodeBoolean(&policy, key::kDeviceUnaffiliatedCrostiniAllowed, kBool); EXPECT_EQ(kBool, policy.device_unaffiliated_crostini_allowed() .device_unaffiliated_crostini_allowed()); EncodeBoolean(&policy, key::kDeviceShowLowDiskSpaceNotification, kBool); EXPECT_EQ(kBool, policy.device_show_low_disk_space_notification() .device_show_low_disk_space_notification()); // // Check whether all device policies have been handled. // std::vector<std::string> unhandled_policy_keys = GetUnhandledPolicyKeys(); EXPECT_TRUE(unhandled_policy_keys.empty()) << "Unhandled policy detected.\n" << "Please handle the following policies in " << "device_policy_encoder.cc and device_policy_encoder_unittest.cc:\n" << " " << base::JoinString(unhandled_policy_keys, "\n "); } } // namespace policy
miren521/qirui_saas
addon/platformcoupon/component/view/admin_coupon/js/design.js
/** * 平台优惠券·组件 */ var adminCouponHtml = '<div class="layui-form-item">'; adminCouponHtml += '<label class="layui-form-label sm">选择模板</label>'; adminCouponHtml += '<div class="layui-input-block">'; adminCouponHtml += '<template v-for="(item,index) in selectedTemplateList" v-bind:k="index">'; adminCouponHtml += '<div v-on:click="data.selectedTemplate=item.value" v-bind:class="{ \'layui-unselect layui-form-radio\' : true,\'layui-form-radioed\' : (data.selectedTemplate==item.value) }"><i class="layui-anim layui-icon">&#xe643;</i><div>{{item.text}}</div></div>'; adminCouponHtml += '</template>'; adminCouponHtml += '</div>'; adminCouponHtml += '</div>'; Vue.component("admin-coupon",{ template : adminCouponHtml, data : function(){ return { data : this.$parent.data, selectedTemplateList : [{ text : '样式一', value : 'default' }] }; } });
npocmaka/Windows-Server-2003
com/netfx/src/clr/vm/comcurrency.cpp
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== #include "common.h" #include "object.h" #include "excep.h" #include "frames.h" #include "vars.hpp" #include "COMCurrency.h" #include "COMString.h" void COMCurrency::InitSingle(const InitSingleArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE HRESULT hr = VarCyFromR4(args->value, args->_this); if (FAILED(hr)) { if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==NOERROR); COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::InitDouble(const InitDoubleArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE HRESULT hr = VarCyFromR8(args->value, args->_this); if (FAILED(hr)) { if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==NOERROR); COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::InitString(InitStringArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE VARIANT var; NUMPARSE numprs; BYTE digits[30]; numprs.cDig = 30; numprs.dwInFlags = NUMPRS_LEADING_WHITE | NUMPRS_TRAILING_WHITE | NUMPRS_LEADING_MINUS | NUMPRS_DECIMAL; HRESULT hr = VarParseNumFromStr(args->value->GetBuffer(), 0x0409, LOCALE_NOUSEROVERRIDE, &numprs, digits); if (SUCCEEDED(hr)) { if (args->value->GetBuffer()[numprs.cchUsed] == 0) { hr = VarNumFromParseNum(&numprs, digits, VTBIT_CY, &var); if (SUCCEEDED(hr)) { *args->_this = var.cyVal; return; } } } if (hr==DISP_E_TYPEMISMATCH) COMPlusThrow(kFormatException, L"Format_CurrencyBad"); else if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); else { _ASSERTE(hr==NOERROR); COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::Add(const ArithOpArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE HRESULT hr = VarCyAdd(args->c1, args->c2, args->result); if (FAILED(hr)) { if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==NOERROR); COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::Floor(const FloorArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE _ASSERTE(args->result); HRESULT hr = VarCyInt(args->c, args->result); if (FAILED(hr)) { if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==NOERROR); COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::Multiply(const ArithOpArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE _ASSERTE(args->result); HRESULT hr = VarCyMul(args->c1, args->c2, args->result); if (FAILED(hr)) { if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::Round(const RoundArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE _ASSERTE(args->result); if (args->result == NULL) COMPlusThrow(kArgumentNullException, L"ArgumentNull_Generic"); if (args->decimals < 0 || args->decimals > 4) COMPlusThrowArgumentOutOfRange(L"digits", L"ArgumentOutOfRange_CurrencyRound"); HRESULT hr = VarCyRound(args->c, args->decimals, args->result); if (FAILED(hr)) { if (hr==E_INVALIDARG) COMPlusThrow(kArgumentException, L"Argument_InvalidValue"); if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::Subtract(const ArithOpArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE _ASSERTE(args->result); HRESULT hr = VarCySub(args->c1, args->c2, args->result); if (FAILED(hr)) { if (hr==DISP_E_OVERFLOW) COMPlusThrow(kOverflowException, L"Overflow_Currency"); _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } #endif // !PLATFORM_CE } void COMCurrency::ToDecimal(const ToDecimalArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE _ASSERTE(args->result); HRESULT hr = VarDecFromCy(args->c, args->result); if (FAILED(hr)) { _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } DecimalCanonicalize(args->result); args->result->wReserved = 0; #endif // !PLATFORM_CE } double COMCurrency::ToDouble(const ToXXXArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); return -1; // Compiler appeasement #else // !PLATFORM_CE double result; HRESULT hr = VarR8FromCy(args->c, &result); if (FAILED(hr)) { _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } return result; #endif // !PLATFORM_CE } float COMCurrency::ToSingle(const ToXXXArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); return -1; // Compiler appeasement #else // !PLATFORM_CE float result; HRESULT hr = VarR4FromCy(args->c, &result); if (FAILED(hr)) { _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } return result; #endif // !PLATFORM_CE } LPVOID COMCurrency::ToString(const ToXXXArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); return NULL; // Compiler appeasement #else // !PLATFORM_CE BSTR bstr; STRINGREF result; HRESULT hr = VarBstrFromCy(args->c, 0, 0, &bstr); if (FAILED(hr)) { if (hr==E_OUTOFMEMORY) COMPlusThrowOM(); _ASSERTE(hr==S_OK); // Didn't expect to get here. Update code for this HR. COMPlusThrowHR(hr); } result = COMString::NewString(bstr, SysStringLen(bstr)); SysFreeString(bstr); RETURN(result, STRINGREF); #endif // !PLATFORM_CE } void COMCurrency::Truncate(const TruncateArgs * args) { THROWSCOMPLUSEXCEPTION(); #ifdef PLATFORM_CE COMPlusThrow(kNotSupportedException, L"NotSupported_WinCEGeneric"); #else // !PLATFORM_CE _ASSERTE(args->result); VarCyFix(args->c, args->result); // VarCyFix can't return anything other than NOERROR // currently in OleAut. #endif // !PLATFORM_CE }
yma88/indy
addons/path-mapped/common/src/main/java/org/commonjava/indy/pathmapped/inject/PathMappedGroupRepositoryFilter.java
<filename>addons/path-mapped/common/src/main/java/org/commonjava/indy/pathmapped/inject/PathMappedGroupRepositoryFilter.java /** * Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.commonjava.indy.pathmapped.inject; import com.google.common.collect.Lists; import org.commonjava.indy.conf.IndyConfiguration; import org.commonjava.indy.core.content.group.AbstractGroupRepositoryFilter; import org.commonjava.indy.model.core.ArtifactStore; import org.commonjava.indy.model.core.Group; import org.commonjava.indy.model.core.StoreKey; import org.commonjava.indy.model.core.StoreType; import org.commonjava.indy.pathmapped.cache.PathMappedMavenGACache; import org.commonjava.maven.galley.cache.pathmapped.PathMappedCacheProvider; import org.commonjava.maven.galley.model.SpecialPathInfo; import org.commonjava.maven.galley.spi.cache.CacheProvider; import org.commonjava.maven.galley.spi.io.SpecialPathManager; import org.commonjava.storage.pathmapped.core.PathMappedFileManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.commonjava.atlas.maven.ident.util.SnapshotUtils.LOCAL_SNAPSHOT_VERSION_PART; import static org.commonjava.indy.pkg.PackageTypeConstants.PKG_TYPE_MAVEN; import static org.commonjava.indy.pkg.maven.model.MavenPackageTypeDescriptor.MAVEN_PKG_KEY; import static org.commonjava.indy.pkg.npm.model.NPMPackageTypeDescriptor.NPM_PKG_KEY; @ApplicationScoped public class PathMappedGroupRepositoryFilter extends AbstractGroupRepositoryFilter { private final Logger logger = LoggerFactory.getLogger( getClass() ); @Inject private CacheProvider cacheProvider; @Inject private IndyConfiguration indyConfig; @Inject private PathMappedMavenGACache gaCache; @Inject private SpecialPathManager specialPathManager; private PathMappedFileManager pathMappedFileManager; @PostConstruct void setup() { if ( cacheProvider instanceof PathMappedCacheProvider ) { pathMappedFileManager = ( (PathMappedCacheProvider) cacheProvider ).getPathMappedFileManager(); } } @Override public int getPriority() { return 0; } @Override public boolean canProcess( String path, Group group ) { if ( pathMappedFileManager != null ) { return true; } return false; } /** * Filter for remote repos plus hosted repos which contains the target path. Because caller may try to * download the target path from remote repositories. */ @Override public List<ArtifactStore> filter( String path, Group group, List<ArtifactStore> concreteStores ) { List<String> candidates = getCandidates( concreteStores ); if ( candidates.isEmpty() ) { logger.debug( "No candidate matches, skip" ); return concreteStores; } if ( gaCache.isStarted() && isMavenMetadataNonSnapshotPath( group, path ) ) { logger.debug( "Maven metadata, use GA cache filter result, skip" ); return concreteStores; } String strategyPath = getStrategyPath( group.getKey(), path ); if ( strategyPath == null ) { logger.debug( "Can not get strategy path, group: {}, path: {}", group.getKey(), path ); return concreteStores; } // batch it to avoid huge 'IN' query Set<String> ret = new HashSet<>(); int batchSize = indyConfig.getFileSystemContainingBatchSize(); List<List<String>> subSets = Lists.partition( candidates, batchSize ); subSets.forEach( subSet -> { logger.debug( "Get file system containing, strategyPath: {}, subSet: {}", strategyPath, subSet ); Set<String> st = pathMappedFileManager.getFileSystemContainingDirectory( subSet, strategyPath ); if ( st == null ) { // query failed but those candidates may contain the target path so we add all subSet candidates logger.warn( "Get fileSystems query failed, add subSet candidates" ); ret.addAll( subSet ); } else { ret.addAll( st ); } } ); return concreteStores.stream() .filter( store -> store.getType() == StoreType.remote || ret.contains( store.getKey().toString() ) ) .collect( Collectors.toList() ); } private boolean isMavenMetadataNonSnapshotPath( Group group, String path ) { if ( group.getPackageType().equals( PKG_TYPE_MAVEN ) ) { SpecialPathInfo pathInfo = specialPathManager.getSpecialPathInfo( path ); return pathInfo != null && pathInfo.isMetadata() && !path.contains( LOCAL_SNAPSHOT_VERSION_PART ); } return false; } /** * Get hosted repos */ private List<String> getCandidates( List<ArtifactStore> concreteStores ) { return concreteStores.stream() .filter( store -> store.getType() == StoreType.hosted ) .map( store -> store.getKey().toString() ) .collect( Collectors.toList() ); } private String getStrategyPath( final StoreKey key, final String rawPath ) { if ( isBlank( rawPath ) ) { return null; } Path parent = null; if ( key.getPackageType().equals( MAVEN_PKG_KEY ) ) { // Use parent path because 1. maven metadata generator need to list it, 2. it is supper set of file path parent = Paths.get( rawPath ).getParent(); } else if ( key.getPackageType().equals( NPM_PKG_KEY ) ) { /* * E.g, * jquery/-/jquery-1.5.1.tgz -> jquery/-/, jquery-1.5.1.tgz * jquery -> jquery/, package.json */ parent = Paths.get( rawPath ).getParent(); } if ( parent == null ) { return rawPath; } else { return parent.toString(); } } }
3pillarlabs/spring-integration-aws
int-aws-integration-test/src/test/java/intaws/integration/test/InterAccountSQSPermissionTest.java
<gh_stars>10-100 package intaws.integration.test; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.util.HashSet; import java.util.Set; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.integration.Message; import org.springframework.integration.MessageChannel; import org.springframework.integration.MessagingException; import org.springframework.integration.core.MessageHandler; import org.springframework.integration.core.SubscribableChannel; import org.springframework.integration.support.MessageBuilder; @RunWith(JUnit4.class) public class InterAccountSQSPermissionTest { private final Set<String> messages; private ConfigurableApplicationContext appCtx; public InterAccountSQSPermissionTest() { this.messages = new HashSet<String>(); } @Test public void messagePublishFromOtherAccount() throws InterruptedException { appCtx = new ClassPathXmlApplicationContext( "InterAccountSQSPermissionTest.xml", getClass()); SubscribableChannel inboundChannel = appCtx.getBean("message-in", SubscribableChannel.class); inboundChannel.subscribe(new MessageHandler() { @Override public void handleMessage(Message<?> message) throws MessagingException { synchronized (messages) { messages.add((String) message.getPayload()); } } }); MessageChannel outboundChannel = appCtx.getBean("message-out", MessageChannel.class); final String msg1 = "This is message 1"; outboundChannel.send(MessageBuilder.withPayload(msg1).build()); Thread.sleep(30000); assertThat(messages, contains(msg1)); } @After public void teardown() { if (appCtx != null) { appCtx.close(); } } }
zifter/nonogram
tests/test_sudoku/test_solution.py
<gh_stars>0 import env import unittest import tempfile import os from test_sudoku.testcase_sudoku import TestCaseSudoku from sudoku.solution import Solution class TestSudokuSolutionClass(TestCaseSudoku): matrix = [ [1, 2, 3, 4, 5, 6, 7, 8, 9], # 1 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 2 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 3 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 4 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 5 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 6 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 7 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 8 [1, 2, 3, 4, 5, 6, 7, 8, 9], # 9 ] def test_load(self): s0 = Solution(self.matrix) s1 = Solution.load({"matrix": self.matrix}) self.assertEqual(s0, s1) def test_save(self): s0 = Solution(self.matrix) self.assertEqual(s0.save(), {"matrix": self.matrix}) def test_save_and_load_using_file(self): handler, tmpfile = tempfile.mkstemp(prefix="solution_") try: s0 = Solution(matrix=self.matrix) s0.save_to_file(tmpfile) s1 = Solution.load_from_file(tmpfile) self.assertEqual(s0, s1) finally: os.close(handler) os.remove(tmpfile) if __name__ == '__main__': unittest.main()
Himon-SYNCRAFT/taskplus
taskplus/core/serializers/task_status_serializer.py
import json class TaskStatusEncoder(json.JSONEncoder): def default(self, data): try: to_serialize = { 'id': data.id, 'name': data.name } return to_serialize except AttributeError: return super().default(data)
eenurkka/incubator-nuttx
include/nuttx/nx/nxtypes.h
/**************************************************************************** * include/nuttx/nx/nxtypes.h * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ #ifndef __INCLUDE_NUTTX_NX_TYPES_H #define __INCLUDE_NUTTX_NX_TYPES_H /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <stdint.h> #include <stdbool.h> #include <fixedmath.h> /**************************************************************************** * Public Types ****************************************************************************/ /* Pixels *******************************************************************/ /* The size of graphics solutions can be reduced by disabling support for * specific resolutions. One thing we can do, for example, is to select * the smallest common pixel representation: */ #if !defined(CONFIG_NX_DISABLE_32BPP) || !defined(CONFIG_NX_DISABLE_24BPP) typedef uint32_t nxgl_mxpixel_t; #elif !defined(CONFIG_NX_DISABLE_16BPP) typedef uint16_t nxgl_mxpixel_t; #else typedef uint8_t nxgl_mxpixel_t; #endif /* Graphics structures ******************************************************/ /* A given coordinate is limited to the screen height an width. If either * of those values exceed 32,767 pixels, then the following will have to need * to change: */ typedef int16_t nxgl_coord_t; /* Describes a point on the display */ struct nxgl_point_s { nxgl_coord_t x; /* X position, range: 0 to screen width - 1 */ nxgl_coord_t y; /* Y position, range: 0 to screen height - 1 */ }; /* Describes the size of a rectangular region */ struct nxgl_size_s { nxgl_coord_t w; /* Width in pixels */ nxgl_coord_t h; /* Height in rows */ }; /* Describes a positioned rectangle on the display */ struct nxgl_rect_s { struct nxgl_point_s pt1; /* Upper, left-hand corner */ struct nxgl_point_s pt2; /* Lower, right-hand corner */ }; /* Describes a vector starting at pt1 and extending through pt2 */ struct nxgl_vector_s { struct nxgl_point_s pt1; /* Start position */ struct nxgl_point_s pt2; /* End position */ }; /* Describes a run, i.e., a horizontal line. * Note that the start/end positions have fractional precision. * This is necessary for good joining of trapezoids when a more complex * shape is decomposed into trapezoids. */ struct nxgl_run_s { b16_t x1; /* Left X position, range: 0 to x2 */ b16_t x2; /* Right X position, range: x1 to screen width - 1 */ nxgl_coord_t y; /* Top Y position, range: 0 to screen height - 1 */ }; /* Describes a horizontal trapezoid on the display in terms the run at the * top of the trapezoid and the run at the bottom. */ struct nxgl_trapezoid_s { struct nxgl_run_s top; /* Top run */ struct nxgl_run_s bot; /* bottom run */ }; #endif /* __INCLUDE_NUTTX_NX_TYPES_H */
endosama/three-nebula
src/debug/index.js
<filename>src/debug/index.js<gh_stars>100-1000 export { default as Debug } from './Debug'; export { default as log } from './log';
ThreatConnect-Inc/threatconnect-java
threatconnect-sdk/threatconnect-sdk-core/src/main/java/com/threatconnect/sdk/server/entity/CustomIndicator.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.threatconnect.sdk.server.entity; import java.util.Date; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.threatconnect.sdk.util.CustomIndicatorSerializer; import com.threatconnect.sdk.util.CustomIndicatorDeserializer; @JsonDeserialize(using = CustomIndicatorDeserializer.class) @JsonSerialize(using = CustomIndicatorSerializer.class) @JsonInclude(Include.NON_NULL) @XmlRootElement(name = "CustomIndicator") @XmlAccessorType(XmlAccessType.FIELD) public class CustomIndicator extends Indicator { @JsonIgnore @XmlTransient private String indicatorType; @JsonIgnore @XmlTransient private Map<String,String> map; @JsonIgnore @XmlTransient private CustomIndicatorIdFinder finder; public void setUniqueIdFinder(CustomIndicatorIdFinder finder) { this.finder = finder; } public CustomIndicator() { super(); } public CustomIndicator(Indicator indicator) { super(); this.setId(indicator.getId()); this.setDateAdded(indicator.getDateAdded()); this.setLastModified(indicator.getLastModified()); this.setRating(indicator.getRating()); this.setConfidence(indicator.getConfidence()); this.setWebLink(indicator.getWebLink()); this.setDescription(indicator.getDescription()); this.setObservationCount(indicator.getObservationCount()); this.setOwner(indicator.getOwner()); this.setFalsePositiveLastReported(indicator.getFalsePositiveLastReported()); this.setFalsePositiveCount(indicator.getFalsePositiveCount()); this.setOwnerName(indicator.getOwnerName()); } public CustomIndicator(Long id, Owner owner, String ownerName, String type, Date dateAdded, Date lastModified, Double rating, Double confidence, Double threatAssessRating, Double threatAssessConfidence, String webLink, String source, String description, String summary, Map<String,String> keyValuePairs) { super(id, owner, ownerName, type, dateAdded, lastModified, rating, confidence, threatAssessRating, threatAssessConfidence, webLink, source, description, summary); map = keyValuePairs; } public Map<String,String> getMap() { return map; } public void setMap(Map<String,String> map) { this.map = map; } public String getIndicatorType() { return indicatorType; } public void setIndicatorType(String indicatorType) { this.indicatorType = indicatorType; } public Indicator castToIndicator() { Indicator result = new Indicator(); result.setId(this.getId()); result.setDateAdded(this.getDateAdded()); result.setLastModified(this.getLastModified()); result.setRating(this.getRating()); result.setConfidence(this.getConfidence()); result.setWebLink(this.getWebLink()); result.setDescription(this.getDescription()); result.setObservationCount(this.getObservationCount()); result.setOwner(this.getOwner()); result.setFalsePositiveLastReported(this.getFalsePositiveLastReported()); result.setFalsePositiveCount(this.getFalsePositiveCount()); result.setOwnerName(this.getOwnerName()); return result; } public String getUniqueId() { if(finder == null) throw new RuntimeException("please provider an implementation to find unique id"); return finder.getUniqueId(this); } }
john-aws/backendjs
modules/db_cassandra.js
<filename>modules/db_cassandra.js<gh_stars>0 // // Author: <NAME> <EMAIL> // backendjs 2018 // var util = require('util'); var url = require('url'); var net = require('net'); var fs = require('fs'); var path = require('path'); var domain = require('domain'); var cluster = require('cluster'); var os = require('os'); var core = require(__dirname + '/../lib/core'); var lib = require(__dirname + '/../lib/lib'); var db = require(__dirname + '/../lib/db'); var logger = require(__dirname + '/../lib/logger'); var pool = { name: "cassandra", configOptions: { typesMap: { json: "text", real: "double", counter: "counter", bigint: "bigint", now: "bigint" }, opsMap: { begins_with: "begins_with" }, sqlPlaceholder: "?", strictTypes: 1, noCoalesce: 1, ifExpected: 1, noConcat: 1, noDefaults: 1, noAuto: 1, noNulls: 1, noLengths: 1, noReplace: 1, noBetween: 1, noJson: 1, noCustomKey: 1, noCompositeIndex: 1, noMultiSQL: 1 }, createPool: function(options) { return new Pool(options); } }; module.exports = pool; db.modules.push(pool); function Pool(options) { options.type = pool.name; db.SqlPool.call(this, options); this.configOptions = lib.objMerge(this.configOptions, pool.configOptions); } util.inherits(Pool, db.SqlPool) Pool.prototype.open = function(callback) { var self = this; if (this.url == "default") this.url = "cassandra://cassandra:cassandra@127.0.0.1/" + db.dbName; var hosts = lib.strSplit(this.url).map(function(x) { return url.parse(x); }); if (!hosts.length) return callback(lib.newError("no server provider")); var opts = { contactPoints: hosts.map(function(x) { return x.host }), keyspace: hosts[0].path.substr(1) }; for (var p in this.connectOptions) opts[p] = this.connectOptions[p]; if (opts.user && opts.password) { opts.authProvider = new cassandra.auth.PlainTextAuthProvider(opts.user, opts.pasword); } else if (hosts[0].auth) { opts.authProvider = new cassandra.auth.PlainTextAuthProvider(hosts[0].auth.split(':')[0], hosts[0].auth.split(':')[1]); } var cassandra = require('cassandra-driver'); var client = new cassandra.Client(opts); client.query = function() { self.doQuery.apply(client, arguments) } client.on('error', function(err) { logger.error('cassandra:', err); }); callback(null, client); } Pool.prototype.doQuery = function(text, values, options, callback) { var self = this; this.execute(text, values ? lib.objClone(values) : null, options, function(err, result) { if (err) return callback(err, []); var rows = []; if (result && result.rows) { for (var i = 0; i < result.rows.length; i++) { var obj = {}; result.rows[i].forEach(function(value, name) { obj[name] = value; }); rows.push(obj); } if (options && options.rowfilter) { rows = options.rowfilter(rows); delete options.rowfilter; } if (options && options.rowsort) { rows = options.rowsort(rows); delete options.rowsort; } } self.affected_rows = 1; callback(err, rows); }); } // No REPLACE INTO support but UPDATE creates new record if no primary key exists Pool.prototype.put = function(table, obj, options, callback) { db.update(table, obj, options, callback); } Pool.prototype.close = function(client, callback) { client.shutdown(callback); } Pool.prototype.prepare = function(req) { switch (op) { case "search": case "select": req.options = lib.objClone(req.options); // Cannot search by non primary keys var keys = db.getKeys(req.table); var cols = req.columns || db.getColumns(req.table); var lastKey = keys[keys.length - 1], lastOps = req.options.ops && req.options.ops[lastKey]; // Install custom filter if we have other columns in the keys var other = Object.keys(req.obj).filter(function(x) { return x[0] != "_" && keys.indexOf(x) == -1 && typeof req.obj[x] != "undefined" }); // Custom filter function for in-memory filtering of the results using non-indexed properties if (other.length) req.options.rowfilter = function(rows) { return db.filterRows(obj, rows, { keys: other, cols: cols, ops: req.options.ops, typesMap: req.options.typesMap || this.configOptions.typesMap }); } req.options.keys = keys; // Sorting is limited to the second part of the composite key so we will do it in memory if (req.options.sort && (keys.length < 2 || keys[1] != req.options.sort)) { var sort = req.options.sort; req.options.rowsort = function(rows) { return rows.sort(function(a,b) { return (a[sort] - b[sort])*(req.options.desc?-1:1) }) } req.options.sort = null; } // Pagination, start must be a token returned by the previous query if (Array.isArray(req.options.start) && typeof req.options.start[0] == "object") { req.obj = lib.objClone(req.obj); req.options.ops[lastKey] = req.options.desc ? "lt" : "gt"; req.options.start.forEach(function(x) { for (var p in x) req.obj[p] = x[p]; }); } logger.debug('select:', pool.name, req.options.keys, req.options.sort, other); db.sqlPrepare(req); if (lastOps) req.options.ops[lastKey] = lastOps; if (!req.obj[keys[0]]) req.text += " ALLOW FILTERING"; return; case "add": case "incr": case "put": case "update": req.options.hints = []; break; } db.sqlPrepare(req); } Pool.prototype.bindValue = function(value, info, options) { if (options.hints) options.hints.push(options.typesMap[info && info.type] || (info && info.type) || "text"); return value; } Pool.prototype.cacheColumns = function(options, callback) { var self = this; this.acquire(function(err, client) { if (err) return callback(err, []); client.query("SELECT * FROM system.schema_columns WHERE keyspace_name=?", [client.keyspace], options, function(err, rows) { rows.sort(function(a,b) { return a.component_index - b.component_index }); seld.dbcolumns = {}; self.dbkeys = {}; self.dbindexes = {}; for (var i = 0; i < rows.length; i++) { if (!self.dbcolumns[rows[i].columnfamily_name]) self.dbcolumns[rows[i].columnfamily_name] = {}; var data_type = rows[i].validator.replace(/[\(\)]/g,".").split(".").pop().replace("Type", "").toLowerCase(); // Set data type to collection type, use type for items var d = rows[i].validator.match(/(ListType|SetType|MapType)/); if (d) data_type = d[1].replace("Type", "").toLowerCase() + " " + data_type; var col = { id: i, data_type: data_type }; switch(rows[i].type) { case "regular": if (!rows[i].index_name) break; if (!self.dbindexes[rows[i].index_name]) self.dbindexes[rows[i].index_name] = []; self.dbindexes[rows[i].index_name].push(rows[i].column_name); break; case "partition_key": if (!self.dbkeys[rows[i].columnfamily_name]) self.dbkeys[rows[i].columnfamily_name] = []; self.dbkeys[rows[i].columnfamily_name].unshift(rows[i].column_name); if (col) col.primary = true; break; case "clustering_key": if (!self.dbkeys[rows[i].columnfamily_name]) self.dbkeys[rows[i].columnfamily_name] = []; self.dbkeys[rows[i].columnfamily_name].push(rows[i].column_name); if (col) col.primary = true; break; } self.dbcolumns[rows[i].columnfamily_name][rows[i].column_name] = col; } self.release(client); callback(err); }); }); } Pool.prototype.nextToken = function(client, req, rows) { if (req.options && req.options.count > 0 && rows.length == req.options.count) { var keys = db.getKeys(req.table); return keys.map(function(x) { return lib.objNew(x, rows[rows.length-1][x]) }); } return null; }