text
stringlengths
2
1.04M
meta
dict
// Copyright (c) morrisjdev. All rights reserved. // Original copyright (c) .NET Foundation. All rights reserved. // Modified version by morrisjdev // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Diagnostics; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Query; using Microsoft.EntityFrameworkCore.Storage; namespace FileContextCore.Query.Internal { public partial class FileContextShapedQueryCompilingExpressionVisitor : ShapedQueryCompilingExpressionVisitor { private readonly Type _contextType; private readonly IDiagnosticsLogger<DbLoggerCategory.Query> _logger; public FileContextShapedQueryCompilingExpressionVisitor( ShapedQueryCompilingExpressionVisitorDependencies dependencies, QueryCompilationContext queryCompilationContext) : base(dependencies, queryCompilationContext) { _contextType = queryCompilationContext.ContextType; _logger = queryCompilationContext.Logger; } protected override Expression VisitExtension(Expression extensionExpression) { switch (extensionExpression) { case FileContextQueryExpression inMemoryQueryExpression: inMemoryQueryExpression.ApplyProjection(); return Visit(inMemoryQueryExpression.ServerQueryExpression); case FileContextTableExpression inMemoryTableExpression: return Expression.Call( _tableMethodInfo, QueryCompilationContext.QueryContextParameter, Expression.Constant(inMemoryTableExpression.EntityType)); } return base.VisitExtension(extensionExpression); } protected override Expression VisitShapedQueryExpression(ShapedQueryExpression shapedQueryExpression) { var inMemoryQueryExpression = (FileContextQueryExpression)shapedQueryExpression.QueryExpression; var shaper = new ShaperExpressionProcessingExpressionVisitor( inMemoryQueryExpression, inMemoryQueryExpression.CurrentParameter) .Inject(shapedQueryExpression.ShaperExpression); shaper = InjectEntityMaterializers(shaper); var innerEnumerable = Visit(inMemoryQueryExpression); shaper = new FileContextProjectionBindingRemovingExpressionVisitor().Visit(shaper); shaper = new CustomShaperCompilingExpressionVisitor(IsTracking).Visit(shaper); var shaperLambda = (LambdaExpression)shaper; return Expression.New( typeof(QueryingEnumerable<>).MakeGenericType(shaperLambda.ReturnType).GetConstructors()[0], QueryCompilationContext.QueryContextParameter, innerEnumerable, Expression.Constant(shaperLambda.Compile()), Expression.Constant(_contextType), Expression.Constant(_logger)); } private static readonly MethodInfo _tableMethodInfo = typeof(FileContextShapedQueryCompilingExpressionVisitor).GetTypeInfo() .GetDeclaredMethod(nameof(Table)); private static IEnumerable<ValueBuffer> Table( QueryContext queryContext, IEntityType entityType) { return ((FileContextQueryContext)queryContext).Store .GetTables(entityType) .SelectMany(t => t.Rows.Select(vs => new ValueBuffer(vs))); } } }
{ "content_hash": "a9a9118cd08c101323d8cba26f205af8", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 113, "avg_line_length": 42.032967032967036, "alnum_prop": 0.6935947712418301, "repo_name": "morrisjdev/FileContextCore", "id": "eca257c7a7c25ba141dff974573fcb35d2856c5f", "size": "3827", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "FileContextCore/Query/Internal/FileContextShapedQueryCompilingExpressionVisitor.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "400143" }, { "name": "CSS", "bytes": "1346" }, { "name": "HTML", "bytes": "3683" }, { "name": "JavaScript", "bytes": "226" } ], "symlink_target": "" }
@class VoiceInputBehavior; @interface log_13905 : WXPBGeneratedMessage { } + (void)initialize; // Remaining properties @property(nonatomic) int clientVersion; // @dynamic clientVersion; @property(nonatomic) int device; // @dynamic device; @property(nonatomic) int ds; // @dynamic ds; @property(nonatomic) int importDs; // @dynamic importDs; @property(nonatomic) long long timeStamp; // @dynamic timeStamp; @property(nonatomic) long long uin; // @dynamic uin; @property(retain, nonatomic) VoiceInputBehavior *viOp; // @dynamic viOp; @end
{ "content_hash": "b156bb6308e86db504b3641897c5bebe", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 72, "avg_line_length": 28.526315789473685, "alnum_prop": 0.7490774907749077, "repo_name": "walkdianzi/DashengHook", "id": "21a5e6e1ce28e498d8ba3fc83ffdd3238a0f86aa", "size": "715", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "WeChat-Headers/log_13905.h", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "986" }, { "name": "Objective-C", "bytes": "10153542" }, { "name": "Objective-C++", "bytes": "18332" }, { "name": "Shell", "bytes": "1459" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("SampleLibrary")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("SaladLab")] [assembly: AssemblyProduct("SampleLibrary")] [assembly: AssemblyCopyright("Copyright © 2016 SaladLab")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("77c35271-d6f7-4fc1-bb0b-c0c0f4417f20")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "f6832cce9238420088b661f0c946973c", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 84, "avg_line_length": 39.30555555555556, "alnum_prop": 0.7484098939929329, "repo_name": "SaladbowlCreative/Common.Logging.Unity3D", "id": "4743ce1c68c53f50ac9f6cc5d9dd29387b4ac4fa", "size": "1418", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sample/SampleLibrary/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "256" }, { "name": "C#", "bytes": "257441" }, { "name": "F#", "bytes": "1556" } ], "symlink_target": "" }
<?php /** * Template name: Team-Rhein-Main * * This is the template that displays all pages by default. * Please note that this is the WordPress construct of pages * and that other 'pages' on your WordPress site may use a * different template. * * @link https://codex.wordpress.org/Template_Hierarchy * * @package skoda */ ?> <!DOCTYPE HTML> <html lang="de"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <meta content="width=device-width" name="viewport"> <meta name="format-detection" content="telephone=no"> <meta name="description" content="Das ŠKODA Team Rhein-Main wünscht Ihnen eine „Guude Fahrt“: Lernen Sie uns kennen und entdecken Sie unsere aktuellen Neu- und Gebrauchtwagenangebote."/> <title>Team Rhein-Main - ŠKODA TEAM Rhein-Main</title> <link rel="stylesheet" href="http://netdna.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css"> <link href="<?php echo get_template_directory_uri(); ?>/towns.css" type="text/css" rel="stylesheet"> <!-- <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bootstrap-select/1.11.2/css/bootstrap-select.min.css">--> <?php wp_head(); ?> <link href="<?php echo get_template_directory_uri(); ?>/media.css" type="text/css" rel="stylesheet"> <script type="text/javascript" src="<?php echo get_template_directory_uri(); ?>/js/scripts.js"></script> </head> <body> <div id="wrapper"> <header id="header"> <div class="container"> <nav id="nav" class="navbar row" role="navigation"> <div class="container-fluid"> <!-- Brand and toggle get grouped for better mobile display --> <div class="navbar-header col-md-3"> <button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a class="logo" href="<?php echo home_url(); ?>"> <img src="<?php echo get_template_directory_uri(); ?>/images/skoda_logo.png" alt="SKODA Team Rhein-Main" /></a> </div> <?php wp_nav_menu( array( 'menu' => 'primary', 'theme_location' => 'primary', 'depth' => 2, 'container' => 'div', 'container_class' => 'collapse navbar-collapse col-md-9', 'container_id' => 'bs-example-navbar-collapse-1', 'dropdown_class' => 'subnavigation', 'menu_class' => 'nav navbar-nav', 'fallback_cb' => 'wp_bootstrap_navwalker::fallback', 'walker' => new wp_bootstrap_navwalker()) ); ?> </div> </nav> </div> </header> <div id="main"> <div id="content"> <div class="container"> <div class="select-map"> <div class="mapWrap"> <div class="interactiveContainer"> <?php $backgroundimage = get_field('acf_haendler_karte_backgroundimage'); if( !empty($backgroundimage) ): ?> <div class="imgBox" style="background-image: url('<?php echo $backgroundimage['url'] ?>')"></div> <?php endif; ?> <div class="mapMain"> <section class="mapHead"> <div> <div> <?php $headline = get_field('acf_haendler_texte_headline'); if( !empty($headline) ): ?> <h1><?PHP echo $headline ?></h1> <?php endif; ?> <?php $subline = get_field('acf_haendler_texte_subline'); if( !empty($subline) ): ?> <h2><?PHP echo $subline ?></h2> <?php endif; ?> </div> </div> </section> <div class="customMap_wrap"> <div id="custom_map"> <div class="mapImg"><img src="<?php echo get_template_directory_uri(); ?>/images/map03.png" alt="map"></div> <ul class="towns"> <?php if( have_rows('acf_haendler_karte') ): ?> <?php while( have_rows('acf_haendler_karte') ): the_row(); // vars $city = get_sub_field('acf_haendler_karte_stadt'); $pos_left = get_sub_field('acf_haendler_karte_stadt_pos_links'); $pos_top = get_sub_field('acf_haendler_karte_stadt_pos_oben'); echo '<li onClick="ga(\'send\', \'event\', \'Map-Hotspot\', \'Click\', \''.$city.'\');" class="hotspot" data-id="'.$city.'" data-town="'.$city.'" style="left:'.$pos_left.'px; top:'.$pos_top.'px;"><i class="pnt"></i><span class="pnt">'.$city.'</span></li>'; endwhile; endif; ?> </ul> </div> </div> </div> </div> </div> </div> <section class="list_wrap"> <header class="row"> <div class="col-md-8 col-sm-6"> <?php $headline = get_field('acf_haendler_infobox_headline'); if( !empty($headline) ): ?> <h3><?PHP echo $headline ?></h3> <?php endif; ?> </div> <div class="col-md-4 col-sm-6 col-xs-12 pull-right"> <div class="sorting"> <p class="title"><span>Sortieren nach:</span></p> <div class="sort_box"> <div class="selected drops"> <span class="selected_sort" data-sort="up">Autohausname, alphabetisch</span> <span class="bs-caret"><span class="caret"></span></span> </div> <ul> <li data-sort="up">Autohausname, alphabetisch</li> <li data-sort="town">Stadtname, alphabetisch</li> </ul> </div> </div> </div> </header> <div class="item_list"> <div class="desktop_items"> <div class="row item_columns"> <div class="col-sm-4" data-column="0"></div> <div class="col-sm-4" data-column="1"></div> <div class="col-sm-4" data-column="2"></div> </div> </div> <div class="mob_items"></div> <div class="tempContainer"></div> <div class="tempMobContainer"></div> <div class="itemContainer"> <?php if( have_rows('acf_haendler_infobox') ): ?> <?php while( have_rows('acf_haendler_infobox') ): the_row(); // vars $name = get_sub_field('acf_haendler_infobox_name'); $ort = get_sub_field('acf_haendler_infobox_ort'); $bild = get_sub_field('acf_haendler_infobox_bild'); /*$icon = get_sub_field('acf_haendler_infobox_icon');*/ $adresse = get_sub_field('acf_haendler_infobox_adresse'); $link = get_sub_field('acf_haendler_infobox_link'); ?> <div class="oneItem" id="<?php echo $ort ?>" data-name="<?php echo $name ?>" data-town="<?php echo $ort ?>" data-id="<?php echo $ort ?>"> <section class="item_wrap"> <header> <h4> <span class="name"><?php echo $name ?></span>, <span class="town"><?php echo $ort ?></span> </h4> <div class="imgBox" style="background-image: url('<?php echo $bild['url']; ?>')"></div> </header> <article class="contacts"> <header class="showContact"> <h5>Kontaktdaten</h5> <i class="item_icon"></i> <!-- <img class="item_icon" src="<?php echo $icon['url'] ?>" alt="" /> --> <span> <svg fill="#fff" viewBox="0 0 48 48" xmlns="http://www.w3.org/2000/svg"><path d="m14.83 16.42l9.17 9.17 9.17-9.17 2.83 2.83-12 12-12-12z"/></svg> <svg class="down" fill="#fff" viewBox="0 0 48 48" xmlns="http://www.w3.org/2000/svg"><path d="m14.83 30.83l9.17-9.17 9.17 9.17 2.83-2.83-12-12-12 12z"/></svg> </span> </header> <div class="contact_info"> <div class="contact_info_main"> <?php if( $adresse ): ?> <?PHP echo $adresse ?> <?php endif; ?> </div> <div class="times"> <h6>Öffnungszeiten</h6> <!-- Verkauf --> <!-- --> <?php while( have_rows('acf_haendler_infobox_oeffnungszeiten') ): the_row(); ?> <?php if( have_rows('acf_haendler_infobox_oeffnungszeiten_verkauf') ): ?> <strong>Verkauf:</strong> <dl> <?php while( have_rows('acf_haendler_infobox_oeffnungszeiten_verkauf') ): the_row(); ?> <?PHP $uhrzeit = get_sub_field('acf_haendler_infobox_oeffnungszeiten_verkauf_uhrzeit'); $wochentag = get_sub_field('acf_haendler_infobox_oeffnungszeiten_verkauf_wochentag'); ?> <dt><?PHP echo $wochentag ?></dt> <dd><?PHP echo $uhrzeit ?></dd> <?php endwhile; ?> <!-- Verkauf --> </dl> <?php endif; ?><!-- Verkauf --> <!-- Werkstatt --> <!-- --> <?php if( have_rows('acf_haendler_infobox_oeffnungszeiten_werkstatt') ): ?> <strong>Werkstatt:</strong> <dl> <?php while( have_rows('acf_haendler_infobox_oeffnungszeiten_werkstatt') ): the_row(); ?> <?PHP $uhrzeit = get_sub_field('acf_haendler_infobox_oeffnungszeiten_werkstatt_uhrzeit'); $wochentag = get_sub_field('acf_haendler_infobox_oeffnungszeiten_werkstatt_wochentag'); ?> <dt><?PHP echo $wochentag ?></dt> <dd><?PHP echo $uhrzeit ?></dd> <?php endwhile; ?> </dl> <?php endif; ?><!-- Werkstatt --> <?php endwhile; ?> <!-- acf_haendler_infobox_oeffnungszeiten --> </div> </div> </article> <?php if( $link ): ?> <a onClick="ga('send', 'event', 'CTA-More-Information', 'Click', '<?php echo $name ?>, <?php echo $ort ?>');" class="btn btn-success custom-button" href="<?php echo $link; ?>"><span>Mehr Informationen</span><i class="cta_arrow"><svg viewBox="0 0 48 48" xmlns="http://www.w3.org/2000/svg"><path d="m17.17 32.92l9.17-9.17-9.17-9.17 2.83-2.83 12 12-12 12z"/></svg></i></a> <?php endif; ?> </section> </div><!-- oneItem --> <?php endwhile; ?> <!-- acf_haendler_infobox --> <?php endif; ?> <!-- acf_haendler_infobox --> </div> </div> </section> </div> </div> </div> <footer id="footer"> <div class="container"> <div class="footerContent"> <div class="col-sm-6 logo"> <a href="/"><img src="<?php echo get_template_directory_uri(); ?>/images/skoda_team_rhein_main_logo.png" alt="SKODA Team Rhein-Main Logo" class="img-responsive" /></a> </div> <div class="col-sm-6 footer-nav"> <ul class="col-lg-7 col-sm-5 col-xs-6"> <!-- <li><a href="/impressum/">Impressum</a></li>--> <!-- <li><a href="/datenschutz/">Datenschutz</a></li> --> <!-- <li><a href="#">Kontakt</a></li> </a> --> <?php wp_nav_menu( array( 'skoda' => 'footer_menu' ) ); ?> </ul> <div class="pull-right copyright col-lg-5 col-sm-7 col-xs-6"> <p>&copy; ŠKODA TEAM Rhein-Main</p> </div> </div> </div> </div> </footer> </div> <?php wp_footer(); ?> <script type="text/javascript" src="http://code.jquery.com/jquery-1.10.1.min.js"></script> <script src="http://netdna.bootstrapcdn.com/bootstrap/3.3.2/js/bootstrap.min.js"></script> <!--<script src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap-select/1.11.2/js/bootstrap-select.min.js"></script>--> <script type="text/javascript" src="<?php echo get_template_directory_uri(); ?>/js/sorting.js"></script> <!-- Google analytics --> <?php include_once("analyticstracking.php") ?> <!-- <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-31278561-1', 'auto'); ga('set', 'anonymizeIp', true); ga('send', 'pageview'); </script> --> <script type="text/javascript"> /* $('a[href*="#"]:not([href="#"])').click(function() { if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname) { var target = $(this.hash); target = target.length ? target : $('[name=' + this.hash.slice(1) + ']'); if (target.length) { $('html, body').animate({ scrollTop: target.offset().top -10 }, 1000); return false; } } }); */ </script> <script type="text/javascript"> $(".towns .hotspot").on('click', function() { var data_id = $(this).data('id'); $('.oneItem').each(function() { var el = $(this); if (el.attr('id') == data_id) { $('html, body').animate({ scrollTop: el.offset().top -10 }, 1000); return false; } }) }); </script> <script type="text/javascript"> $(document).ready(function() { $('.dropdown-menu').addClass('subnavigation'); }); </script> <script type="text/javascript"> $(document).ready(function() { }); </script> </body> </html>
{ "content_hash": "4bc818102a284c6c20c946c9cca643e3", "timestamp": "", "source": "github", "line_count": 377, "max_line_length": 381, "avg_line_length": 35.6710875331565, "alnum_prop": 0.5315288518738845, "repo_name": "belowKarim/skoda-wp-template", "id": "deb709e79099f42147c69b26b59050e39590515d", "size": "13457", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "online/wp-content/themes/skoda/pages/tmpl_team_rhein_main.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "1225" }, { "name": "CSS", "bytes": "1837627" }, { "name": "HTML", "bytes": "557976" }, { "name": "JavaScript", "bytes": "2551016" }, { "name": "PHP", "bytes": "22016110" }, { "name": "PLSQL", "bytes": "10883220" }, { "name": "Perl", "bytes": "408" }, { "name": "Shell", "bytes": "1442" } ], "symlink_target": "" }
package com.bornneet.generativepolygon; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PointF; import android.util.AttributeSet; import android.view.View; import java.util.ArrayList; import java.util.List; import java.util.Random; import static java.util.Collections.*; /** * Created by tnantoka on 7/17/16. */ public class PolygonView extends View { int circuits = 3; int number; Bitmap bitmap; private Paint paint = new Paint(); public PolygonView(Context context, AttributeSet attrs) { super(context, attrs); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); paint.setStrokeWidth(3); paint.setAntiAlias(true); bitmap = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888); Canvas bitmapCanvas = new Canvas(bitmap); // bitmapCanvas.drawColor(Color.WHITE); number = getNumber(); for (int i = 0; i < getPolygons(); i++) { drawPolygon(bitmapCanvas); } canvas.drawBitmap(bitmap, 0, 0, null); } private void drawPolygon(Canvas canvas) { float radius = getRadius(); PointF firstPoint = getPoint(0, radius); PointF lastPoint = firstPoint; int step = 360 / number; for (int i = 1; i <= 360; i += step) { paint.setColor(getColor()); paint.setAlpha(200); float degree = i; PointF point = getPoint(degree, radius); canvas.drawLine(lastPoint.x, lastPoint.y, point.x, point.y, paint); lastPoint = point; radius = getRadius(); } canvas.drawLine(lastPoint.x, lastPoint.y, firstPoint.x, firstPoint.y, paint); } private PointF getPoint(float degree, float radius) { float centerX = getWidth() / 2; float centerY = getHeight() / 2; double radian = degree * Math.PI / 180; float x = (float)Math.cos(radian) * radius + centerX; float y = (float)Math.sin(radian) * radius + centerY; return new PointF(x, y); } private float getRadius() { float maxRadius = (getWidth() - getResources().getDimension(R.dimen.activity_horizontal_margin)) / 2; float step = maxRadius / circuits; List<Float> radiuses = new ArrayList<Float>(); for (float i = step; i <= maxRadius; i += step) { radiuses.add(i); } shuffle(radiuses); return radiuses.get(0); } private int getColor() { int[] colors = { Color.parseColor("#f44336"), Color.parseColor("#e91e63"), Color.parseColor("#9c27b0"), Color.parseColor("#673ab7"), Color.parseColor("#3f51b5"), Color.parseColor("#2196f3"), Color.parseColor("#03a9f4"), Color.parseColor("#00bcd4"), Color.parseColor("#009688"), Color.parseColor("#4caf50"), Color.parseColor("#8bc34a"), Color.parseColor("#cddc39"), Color.parseColor("#ffeb3b"), Color.parseColor("#ffc107"), Color.parseColor("#ff9800"), Color.parseColor("#ff5722"), Color.parseColor("#795548"), Color.parseColor("#9e9e9e"), Color.parseColor("#607d8b"), }; return colors[new Random().nextInt(colors.length)]; } private int getPolygons() { return new Random().nextInt(15) + 5; } private int getNumber() { return new Random().nextInt(35) + 5; } }
{ "content_hash": "e0c7d45a555d1065591d5bf1e17f2ec1", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 109, "avg_line_length": 28.29850746268657, "alnum_prop": 0.5751582278481012, "repo_name": "tnantoka/itoa", "id": "7000cce4f5881dfbecdc5dc1881bc962790f274b", "size": "3792", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GenerativePolygon/app/src/main/java/com/bornneet/generativepolygon/PolygonView.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "54930" } ], "symlink_target": "" }
function streaming(url) { newwindow=window.open(url,'name','scrollbars=0, resizeable=0, width=415, height=150'); if (window.focus) {newwindow.focus()} return false; } function closewin() { if (name) { newWindow.close(); } } function clear() { $("#passwordlama").val(""); $("#passwordbaru").val(""); $("#passwordbaru2").val(""); $("#alert-msg-ubahpass").empty(); }; $('#ubahpassword').on('hidden', function(){ clear(); }); $('#ubahpass').click(function() { var form_data = { passwordlama: $('#passwordlama').val(), passwordbaru: $('#passwordbaru').val(), passwordbaru2: $('#passwordbaru2').val() }; $.ajax({ url: base_url + "dashboard_laboratorium/ubahpass", type: 'POST', data: form_data, success: function(msg) { if (msg == 'YES') { $('#alert-msg-ubahpass').html('<div class="alert alert-success text-center">Password Berhasil Disimpan!</div>'); $("#ubahpassword").fadeTo(10000, 5000).slideUp(2000, function(){ $("#ubahpassword").modal('hide'); }); window.location.href = base_url + "dashboard_laboratorium"; } else if (msg == 'NO1') { $('#alert-msg-ubahpass').html('<div class="alert alert-danger text-center">Password Lama Anda Salah!</div>'); } else if (msg == 'NO2') { $('#alert-msg-ubahpass').html('<div class="alert alert-danger text-center">Password Baru Anda Tidak Sama!</div>'); } else { $('#alert-msg-ubahpass').html('<div class="alert alert-danger">' + msg + '</div>'); } } }); return false; }); function ShowPassword() { if(document.getElementById("passwordlama").value!="") { document.getElementById("passwordlama").type="text"; document.getElementById("show").style.display="none"; document.getElementById("hide").style.display="block"; } if(document.getElementById("passwordbaru").value!="") { document.getElementById("passwordbaru").type="text"; document.getElementById("show").style.display="none"; document.getElementById("hide").style.display="block"; } if(document.getElementById("passwordbaru2").value!="") { document.getElementById("passwordbaru2").type="text"; document.getElementById("show").style.display="none"; document.getElementById("hide").style.display="block"; } } function HidePassword() { if(document.getElementById("passwordlama").type == "text") { document.getElementById("passwordlama").type="password" document.getElementById("show").style.display="block"; document.getElementById("hide").style.display="none"; } if(document.getElementById("passwordbaru").type == "text") { document.getElementById("passwordbaru").type="password" document.getElementById("show").style.display="block"; document.getElementById("hide").style.display="none"; } if(document.getElementById("passwordbaru2").type == "text") { document.getElementById("passwordbaru2").type="password" document.getElementById("show").style.display="block"; document.getElementById("hide").style.display="none"; } } $(document).ready(function(){ $("#tampil_data_awal_laborat").datepicker({ todayBtn: 1, autoclose: true, }).on('changeDate', function (selected) { var minDate = new Date(selected.date.valueOf()); $('#tampil_data_akhir_laborat').datepicker('setStartDate', minDate); }); $("#tampil_data_akhir_laborat").datepicker({ todayBtn: 1, autoclose: true, }) .on('changeDate', function (selected) { var maxDate = new Date(selected.date.valueOf()); $('#tampil_data_awal_laborat').datepicker('setEndDate', maxDate); }); });
{ "content_hash": "4f92e7b5498ac4ac596087901be49f5f", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 128, "avg_line_length": 30.384615384615383, "alnum_prop": 0.5944303797468354, "repo_name": "ucil93/royalemr", "id": "746a8c597b833d9ac58a101c14f67292ad7f8887", "size": "3950", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "assets/scripts/header_labo.js", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "420" }, { "name": "CSS", "bytes": "3245228" }, { "name": "CoffeeScript", "bytes": "83631" }, { "name": "HTML", "bytes": "392072" }, { "name": "JavaScript", "bytes": "8237042" }, { "name": "PHP", "bytes": "2732677" }, { "name": "Shell", "bytes": "444" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "bba9e79625b9662fac1ddc6b4c42657e", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "b7541d0f4cc0efc43bcb5c34538ef2668c3e2135", "size": "175", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Myrtales/Melastomataceae/Medinilla/Medinilla micrantha/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
FilenameComponent::FilenameComponent (const String& name, const File& currentFile, const bool canEditFilename, const bool isDirectory, const bool isForSaving, const String& fileBrowserWildcard, const String& suffix, const String& textWhenNothingSelected) : Component (name), maxRecentFiles (30), isDir (isDirectory), isSaving (isForSaving), isFileDragOver (false), wildcard (fileBrowserWildcard), enforcedSuffix (suffix) { addAndMakeVisible (filenameBox); filenameBox.setEditableText (canEditFilename); filenameBox.addListener (this); filenameBox.setTextWhenNothingSelected (textWhenNothingSelected); filenameBox.setTextWhenNoChoicesAvailable (TRANS ("(no recently selected files)")); setBrowseButtonText ("..."); setCurrentFile (currentFile, true, dontSendNotification); } FilenameComponent::~FilenameComponent() { } //============================================================================== void FilenameComponent::paintOverChildren (Graphics& g) { if (isFileDragOver) { g.setColour (Colours::red.withAlpha (0.2f)); g.drawRect (getLocalBounds(), 3); } } void FilenameComponent::resized() { getLookAndFeel().layoutFilenameComponent (*this, &filenameBox, browseButton); } KeyboardFocusTraverser* FilenameComponent::createFocusTraverser() { // This prevents the sub-components from grabbing focus if the // FilenameComponent has been set to refuse focus. return getWantsKeyboardFocus() ? Component::createFocusTraverser() : nullptr; } void FilenameComponent::setBrowseButtonText (const String& newBrowseButtonText) { browseButtonText = newBrowseButtonText; lookAndFeelChanged(); } void FilenameComponent::lookAndFeelChanged() { browseButton = nullptr; addAndMakeVisible (browseButton = getLookAndFeel().createFilenameComponentBrowseButton (browseButtonText)); browseButton->setConnectedEdges (Button::ConnectedOnLeft); resized(); browseButton->addListener (this); } void FilenameComponent::setTooltip (const String& newTooltip) { SettableTooltipClient::setTooltip (newTooltip); filenameBox.setTooltip (newTooltip); } void FilenameComponent::setDefaultBrowseTarget (const File& newDefaultDirectory) { defaultBrowseFile = newDefaultDirectory; } File FilenameComponent::getLocationToBrowse() { return getCurrentFile() == File::nonexistent ? defaultBrowseFile : getCurrentFile(); } void FilenameComponent::buttonClicked (Button*) { #if JUCE_MODAL_LOOPS_PERMITTED FileChooser fc (isDir ? TRANS ("Choose a new directory") : TRANS ("Choose a new file"), getLocationToBrowse(), wildcard); if (isDir ? fc.browseForDirectory() : (isSaving ? fc.browseForFileToSave (false) : fc.browseForFileToOpen())) { setCurrentFile (fc.getResult(), true); } #else jassertfalse; // needs rewriting to deal with non-modal environments #endif } void FilenameComponent::comboBoxChanged (ComboBox*) { setCurrentFile (getCurrentFile(), true); } bool FilenameComponent::isInterestedInFileDrag (const StringArray&) { return true; } void FilenameComponent::filesDropped (const StringArray& filenames, int, int) { isFileDragOver = false; repaint(); const File f (filenames[0]); if (f.exists() && (f.isDirectory() == isDir)) setCurrentFile (f, true); } void FilenameComponent::fileDragEnter (const StringArray&, int, int) { isFileDragOver = true; repaint(); } void FilenameComponent::fileDragExit (const StringArray&) { isFileDragOver = false; repaint(); } //============================================================================== String FilenameComponent::getCurrentFileText() const { return filenameBox.getText(); } File FilenameComponent::getCurrentFile() const { File f (File::getCurrentWorkingDirectory().getChildFile (getCurrentFileText())); if (enforcedSuffix.isNotEmpty()) f = f.withFileExtension (enforcedSuffix); return f; } void FilenameComponent::setCurrentFile (File newFile, const bool addToRecentlyUsedList, NotificationType notification) { if (enforcedSuffix.isNotEmpty()) newFile = newFile.withFileExtension (enforcedSuffix); if (newFile.getFullPathName() != lastFilename) { lastFilename = newFile.getFullPathName(); if (addToRecentlyUsedList) addRecentlyUsedFile (newFile); filenameBox.setText (lastFilename, dontSendNotification); if (notification != dontSendNotification) { triggerAsyncUpdate(); if (notification == sendNotificationSync) handleUpdateNowIfNeeded(); } } } void FilenameComponent::setFilenameIsEditable (const bool shouldBeEditable) { filenameBox.setEditableText (shouldBeEditable); } StringArray FilenameComponent::getRecentlyUsedFilenames() const { StringArray names; for (int i = 0; i < filenameBox.getNumItems(); ++i) names.add (filenameBox.getItemText (i)); return names; } void FilenameComponent::setRecentlyUsedFilenames (const StringArray& filenames) { if (filenames != getRecentlyUsedFilenames()) { filenameBox.clear(); for (int i = 0; i < jmin (filenames.size(), maxRecentFiles); ++i) filenameBox.addItem (filenames[i], i + 1); } } void FilenameComponent::setMaxNumberOfRecentFiles (const int newMaximum) { maxRecentFiles = jmax (1, newMaximum); setRecentlyUsedFilenames (getRecentlyUsedFilenames()); } void FilenameComponent::addRecentlyUsedFile (const File& file) { StringArray files (getRecentlyUsedFilenames()); if (file.getFullPathName().isNotEmpty()) { files.removeString (file.getFullPathName(), true); files.insert (0, file.getFullPathName()); setRecentlyUsedFilenames (files); } } //============================================================================== void FilenameComponent::addListener (FilenameComponentListener* const listener) { listeners.add (listener); } void FilenameComponent::removeListener (FilenameComponentListener* const listener) { listeners.remove (listener); } void FilenameComponent::handleAsyncUpdate() { Component::BailOutChecker checker (this); listeners.callChecked (checker, &FilenameComponentListener::filenameComponentChanged, this); }
{ "content_hash": "c9134a7c45d3cd64732b50f5379dd2af", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 111, "avg_line_length": 29.02857142857143, "alnum_prop": 0.6212035995500562, "repo_name": "jbat100/sonosthesia-relay", "id": "2a69425e4bb535e57e67612057614c1cc5f64e40", "size": "8039", "binary": false, "copies": "16", "ref": "refs/heads/master", "path": "JuceLibraryCode/modules/juce_gui_basics/filebrowser/juce_FilenameComponent.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "5781781" }, { "name": "C++", "bytes": "11755636" }, { "name": "Java", "bytes": "73692" }, { "name": "Objective-C", "bytes": "114002" }, { "name": "Objective-C++", "bytes": "690324" }, { "name": "R", "bytes": "7401" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "da7e09169b25e199b2780d4ae602091d", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "114a6e66333f42748472751899287f77d0919c5a", "size": "185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Campanulaceae/Cyanea/Cyanea pilosa/ Syn. Cyanea megacarpa/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential class IotHubClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for IotHubClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. Required. :type subscription_id: str :keyword api_version: Api Version. Default value is "2016-02-03". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: super(IotHubClientConfiguration, self).__init__(**kwargs) api_version = kwargs.pop("api_version", "2016-02-03") # type: str if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") self.credential = credential self.subscription_id = subscription_id self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-iothub/{}".format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs # type: Any ): # type: (...) -> None self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = ARMChallengeAuthenticationPolicy( self.credential, *self.credential_scopes, **kwargs )
{ "content_hash": "62184013414eac388f89585b2ad8a72c", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 107, "avg_line_length": 49.721311475409834, "alnum_prop": 0.7055720408836136, "repo_name": "Azure/azure-sdk-for-python", "id": "fe393233cee18d6faf328ed139297c8a643ded84", "size": "3501", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2016_02_03/_configuration.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
package io.fabric8.gateway.handlers.detecting.protocol; import io.netty.buffer.ByteBuf; import org.vertx.java.core.buffer.Buffer; import java.lang.reflect.Field; import java.nio.ByteBuffer; import java.util.ArrayList; /** * BufferSupport contains static methods that assist * with working with the vertx Buffer class. */ public class BufferSupport { static public Buffer chomp(Buffer self) { return self.getBuffer(0, self.length()-1); } static public Buffer trim(Buffer self) { return trimEnd(trimFront(self)); } static public Buffer trimFront(Buffer self) { int length = self.length(); int pos = 0; while ((pos < length) && (self.getByte(pos) <= ' ')) { pos++; } return (pos == 0) ? self : self.getBuffer(pos, length); } static public Buffer trimEnd(Buffer self) { int length = self.length(); int pos = length; while ( pos > 0 && (self.getByte(pos-1) <= ' ')) { pos--; } return (pos == length-1) ? self : self.getBuffer(0, pos); } static public int indexOf(Buffer self, byte value) { return indexOf(self, 0, self.length(), value); } static public int indexOf(Buffer self, int start, byte value) { return indexOf(self, start, self.length(), value); } static public int indexOf(Buffer self, int start, int end, byte value) { int max = Math.min(end, self.length()); for (; start < end ; start++) { if (self.getByte(start) == value ) { return start; } } return -1; } static public boolean startsWith(Buffer self, Buffer needle) { return indexOf(self, 0, needle.length(), needle) == 0; } static public boolean startsWith(Buffer self, int start, Buffer needle) { return indexOf(self, start, start+needle.length(), needle) == 0; } static public int indexOf(Buffer self, int start, Buffer needle) { return indexOf(self, start, self.length(), needle); } static public int indexOf(Buffer self, Buffer needle) { return indexOf(self, 0, self.length(), needle); } static public int indexOf(Buffer self, int start, int end, Buffer needle) { int max = Math.min(end, self.length() - needle.length()); for (int i = start; i <= max; i++) { if (matches(self, i, needle)) { return i; } } return -1; } static public boolean matches(Buffer self, int pos, Buffer needle) { int needleLength = needle.length(); for (int i = 0; i < needleLength; i++) { if( self.getByte(pos+i) != needle.getByte(i) ) { return false; } } return true; } static private final Field bufferField; static { try { bufferField = Buffer.class.getDeclaredField("buffer"); bufferField.setAccessible(true); } catch (NoSuchFieldException e) { throw new RuntimeException(e); } } static private ByteBuf getNettyByteBuf(Buffer self) { try { return (ByteBuf)bufferField.get(self); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } static public void setLength(Buffer self, int length) { getNettyByteBuf(self).capacity(length); } static final public Buffer[] split(Buffer self, byte separator) { ArrayList<Buffer> rc = new ArrayList<Buffer>(); int pos = 0; int nextStart = pos; int end = self.length(); while( pos < end ) { if( self.getByte(pos)==separator ) { if( nextStart < pos ) { rc.add(self.getBuffer(nextStart, pos)); } nextStart = pos+1; } pos++; } if( nextStart < pos ) { rc.add(self.getBuffer(nextStart, pos)); } return rc.toArray(new Buffer[rc.size()]); } public static Buffer toBuffer(ByteBuffer buff) { Buffer self = new Buffer(buff.remaining()); while( buff.hasRemaining() ) { self.appendByte(buff.get()); } return self; } }
{ "content_hash": "27ccd7ce76990f33ff84e3324bbf558e", "timestamp": "", "source": "github", "line_count": 148, "max_line_length": 79, "avg_line_length": 29.06756756756757, "alnum_prop": 0.5620641562064156, "repo_name": "avano/fabric8", "id": "559350d3289e3ad387ac1462dd33b77a0cbc6c8d", "size": "4940", "binary": false, "copies": "4", "ref": "refs/heads/1.2.0.redhat-6-3-x", "path": "gateway/gateway-core/src/main/java/io/fabric8/gateway/handlers/detecting/protocol/BufferSupport.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "52" }, { "name": "Batchfile", "bytes": "12242" }, { "name": "CSS", "bytes": "10262" }, { "name": "HTML", "bytes": "23749" }, { "name": "Java", "bytes": "9512945" }, { "name": "JavaScript", "bytes": "605777" }, { "name": "Protocol Buffer", "bytes": "899" }, { "name": "Scala", "bytes": "5260" }, { "name": "Shell", "bytes": "62820" } ], "symlink_target": "" }
[![Build Status](https://api.travis-ci.org/hmlingesh/dynatrace-service-broker.svg?branch=master)](https://api.travis-ci.org/hmlingesh/dynatrace-service-broker.svg) Java spring boot application has been developed for service-broker which will give profile, server details for dynatrace server and collector which has been used for dynatrace agent to send trace data to them. This service-broker will be registered in cloudfoundry , so that in future we can create a service and from the service bind VCAP_SERVICES values to any application. ## Steps to deploy and register service broker ## Maven and Java The application is Java spring boot application and packaged as a executable JAR file. To build the application user needs maven build tool. ## Deploy service broker application in cloud foundry Here our assumption is you have preinstalled CF CLI .To deploy application either user needs manifest file where user will make all required entries or user can directoly push the application by using arguments via CF CLI. If user is not passing buildpack url then default cloud foundary buildpack url will be used here To deploy run the following commands: If user is having manifest file ```bash $ mvn package $ cf push ``` If user is passing arguments from CF CLI ```bash $ mvn package $ cf push <app_name> –p <jar_name> ``` ## Register Service-broker in cloud foundry To register servicebroker in cloud foundary use following steps: ```bash $ cf create-service-broker <service_broker> <username> <password> <url_for_service_broker> ``` ## Marketplace List all services,plans and description ```bash $ cf marketplace ``` ## Create Service from Service broker ```bash $ cf create-service <service_broker> <plans> <service_name> ``` Now user can bind any application with this created service. ## Environment Variables Since the application is designed to work in a PaaS environment, all configuration is done with environment variables. The `server` and `profile` value is the only one that is provided by Dynatrace. All others are unique to a deployment. | Key | Description | --- | ----------- | `server` | The Dynatrace server ip with portnumber to provide to all applications. | `profile` | The profile that can used for mentioning agent-name during setting java agent path. This can be any value. ## Model Notes The model is for the REST/Controller level. It can be extended as needed.
{ "content_hash": "01caebf54cb66b953bed3aa0daa0cd4a", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 375, "avg_line_length": 45.509433962264154, "alnum_prop": 0.771558872305141, "repo_name": "alokhm/dynatrace-service-broker", "id": "93da30ef954f82061214ddac44a8882fcb52d801", "size": "2470", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "55749" } ], "symlink_target": "" }
package org.eigenbase.rex; import java.lang.reflect.Modifier; import java.lang.reflect.Type; import java.util.*; import org.eigenbase.relopt.RelOptPlanner; import org.eigenbase.reltype.RelDataType; import org.eigenbase.reltype.RelDataTypeFactory; import net.hydromatic.linq4j.expressions.*; import net.hydromatic.optiq.BuiltinMethod; import net.hydromatic.optiq.DataContext; import net.hydromatic.optiq.impl.java.JavaTypeFactory; import net.hydromatic.optiq.jdbc.JavaTypeFactoryImpl; import net.hydromatic.optiq.prepare.OptiqPrepareImpl; import net.hydromatic.optiq.rules.java.RexToLixTranslator; import net.hydromatic.optiq.rules.java.RexToLixTranslator.InputGetter; import com.google.common.collect.ImmutableList; /** * Evaluates a {@link RexNode} expression. */ public class RexExecutorImpl implements RelOptPlanner.Executor { private final DataContext dataContext; public RexExecutorImpl(DataContext dataContext) { this.dataContext = dataContext; } private String compile(RexBuilder rexBuilder, List<RexNode> constExps, RexToLixTranslator.InputGetter getter) { final RelDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); final RelDataType emptyRowType = typeFactory.builder().build(); return compile(rexBuilder, constExps, getter, emptyRowType); } private String compile(RexBuilder rexBuilder, List<RexNode> constExps, RexToLixTranslator.InputGetter getter, RelDataType rowType) { final RexProgramBuilder programBuilder = new RexProgramBuilder(rowType, rexBuilder); for (RexNode node : constExps) { programBuilder.addProject( node, "c" + programBuilder.getProjectList().size()); } final JavaTypeFactoryImpl javaTypeFactory = new JavaTypeFactoryImpl(rexBuilder.getTypeFactory().getTypeSystem()); final BlockBuilder blockBuilder = new BlockBuilder(); final ParameterExpression root0_ = Expressions.parameter(Object.class, "root0"); final ParameterExpression root_ = DataContext.ROOT; blockBuilder.add( Expressions.declare( Modifier.FINAL, root_, Expressions.convert_(root0_, DataContext.class))); final List<Expression> expressions = RexToLixTranslator.translateProjects(programBuilder.getProgram(), javaTypeFactory, blockBuilder, null, getter); blockBuilder.add( Expressions.return_(null, Expressions.newArrayInit(Object[].class, expressions))); final MethodDeclaration methodDecl = Expressions.methodDecl(Modifier.PUBLIC, Object[].class, BuiltinMethod.FUNCTION1_APPLY.method.getName(), ImmutableList.of(root0_), blockBuilder.toBlock()); String code = Expressions.toString(methodDecl); if (OptiqPrepareImpl.DEBUG) { System.out.println(code); } return code; } /** * Creates an {@link RexExecutable} that allows to apply the * generated code during query processing (filter, projection). * * @param rexBuilder Rex builder * @param exps Expressions * @param rowType describes the structure of the input row. */ public RexExecutable getExecutable(RexBuilder rexBuilder, List<RexNode> exps, RelDataType rowType) { final InputGetter getter = new DataContextInputGetter(rowType, rexBuilder.getTypeFactory()); final String code = compile(rexBuilder, exps, getter, rowType); return new RexExecutable(code, "generated Rex code"); } /** * Do constant reduction using generated code. */ public void reduce(RexBuilder rexBuilder, List<RexNode> constExps, List<RexNode> reducedValues) { final String code = compile(rexBuilder, constExps, new RexToLixTranslator.InputGetter() { public Expression field(BlockBuilder list, int index, Type storageType) { throw new UnsupportedOperationException(); } }); final RexExecutable executable = new RexExecutable(code, constExps); executable.setDataContext(dataContext); executable.reduce(rexBuilder, constExps, reducedValues); } /** * Implementation of * {@link net.hydromatic.optiq.rules.java.RexToLixTranslator.InputGetter} * that reads the values of input fields by calling * <code>{@link net.hydromatic.optiq.DataContext#get}("inputRecord")</code>. */ private static class DataContextInputGetter implements InputGetter { private final RelDataTypeFactory typeFactory; private final RelDataType rowType; public DataContextInputGetter(RelDataType rowType, RelDataTypeFactory typeFactory) { this.rowType = rowType; this.typeFactory = typeFactory; } public Expression field(BlockBuilder list, int index, Type storageType) { MethodCallExpression recFromCtx = Expressions.call( DataContext.ROOT, BuiltinMethod.DATA_CONTEXT_GET.method, Expressions.constant("inputRecord")); Expression recFromCtxCasted = RexToLixTranslator.convert(recFromCtx, Object[].class); IndexExpression recordAccess = Expressions.arrayIndex(recFromCtxCasted, Expressions.constant(index)); if (storageType == null) { final RelDataType fieldType = rowType.getFieldList().get(index).getType(); storageType = ((JavaTypeFactory) typeFactory).getJavaClass(fieldType); } return RexToLixTranslator.convert(recordAccess, storageType); } } } // End RexExecutorImpl.java
{ "content_hash": "b22ca629ff286f55a739d51924f3ba6e", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 79, "avg_line_length": 37.641379310344824, "alnum_prop": 0.7259069256137779, "repo_name": "sungsoo/optiq-project", "id": "da34a558a3be788e13e9335e4324b02980982b7f", "size": "6255", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "core/src/main/java/org/eigenbase/rex/RexExecutorImpl.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Erlang", "bytes": "4234" }, { "name": "Java", "bytes": "9312202" }, { "name": "Perl", "bytes": "539" }, { "name": "Shell", "bytes": "3400" } ], "symlink_target": "" }
#ifndef __CONFIG_H #define __CONFIG_H /* SoC type is defined in boards.cfg */ #include <asm/hardware.h> #include <linux/sizes.h> #define CONFIG_SYS_TEXT_BASE 0x73f00000 /* ARM asynchronous clock */ #define CONFIG_SYS_AT91_MAIN_CLOCK 12000000 /* from 12 MHz crystal */ #define CONFIG_SYS_AT91_SLOW_CLOCK 32768 /* CPU */ #define CONFIG_CMDLINE_TAG /* enable passing of ATAGs */ #define CONFIG_SETUP_MEMORY_TAGS #define CONFIG_INITRD_TAG #define CONFIG_SKIP_LOWLEVEL_INIT_ONLY /* SDRAM */ #define CONFIG_NR_DRAM_BANKS 1 #define CONFIG_SYS_SDRAM_BASE ATMEL_BASE_CS6 #define CONFIG_SYS_SDRAM_SIZE (128 * 1024 * 1024) /* 64MB */ #define CONFIG_SYS_INIT_SP_ADDR (ATMEL_BASE_SRAM + 0x1000 - \ GENERATED_GBL_DATA_SIZE) /* Mem test settings */ #define CONFIG_SYS_MEMTEST_START CONFIG_SYS_SDRAM_BASE #define CONFIG_SYS_MEMTEST_END (CONFIG_SYS_SDRAM_BASE + (1024 * 1024)) /* NAND Flash */ #define CONFIG_NAND_ATMEL #define CONFIG_ATMEL_NAND_HWECC #define CONFIG_SYS_NAND_ECC_BASE ATMEL_BASE_ECC #define CONFIG_SYS_MAX_NAND_DEVICE 1 #define CONFIG_SYS_NAND_BASE ATMEL_BASE_CS3 #define CONFIG_SYS_NAND_DBW_8 #define CONFIG_SYS_NAND_MASK_ALE (1 << 21) /* AD21 */ #define CONFIG_SYS_NAND_MASK_CLE (1 << 22) /* AD22 */ #define CONFIG_SYS_NAND_ENABLE_PIN AT91_PIN_PC14 #define CONFIG_SYS_NAND_READY_PIN AT91_PIN_PC8 /* Ethernet */ #define CONFIG_MACB #define CONFIG_RMII #define CONFIG_NET_RETRY_COUNT 20 #define CONFIG_RESET_PHY_R #define CONFIG_AT91_WANTS_COMMON_PHY #define CONFIG_TFTP_PORT #define CONFIG_TFTP_TSIZE /* MMC */ #define CONFIG_GENERIC_ATMEL_MCI /* LCD */ #define CONFIG_ATMEL_LCD #define CONFIG_GURNARD_SPLASH #define CONFIG_ATMEL_SPI /* GPIOs and IO expander */ #define CONFIG_ATMEL_LEGACY #define CONFIG_AT91_GPIO #define CONFIG_AT91_GPIO_PULLUP 1 /* UARTs/Serial console */ #define CONFIG_ATMEL_USART /* Boot options */ #define CONFIG_SYS_LOAD_ADDR 0x23000000 #define CONFIG_BOOTP_BOOTFILESIZE #define CONFIG_BOOTP_BOOTPATH #define CONFIG_BOOTP_GATEWAY #define CONFIG_BOOTP_HOSTNAME /* Environment settings */ #define CONFIG_ENV_OFFSET (512 << 10) #define CONFIG_ENV_SIZE (256 << 10) #define CONFIG_ENV_OVERWRITE #define CONFIG_EXTRA_ENV_SETTINGS \ "ethaddr=00:00:00:00:00:00\0" \ "serial=0\0" \ "stdout=serial_atmel\0" \ "stderr=serial_atmel\0" \ "stdin=serial_atmel\0" \ "bootlimit=3\0" \ "loadaddr=0x71000000\0" \ "board_rev=2\0" \ "bootfile=/tftpboot/uImage\0" \ "bootargs_def=console=ttyS0,115200 panic=5 quiet lpj=997376\0" \ "nfsroot=/export/root\0" \ "boot_working=setenv bootargs $bootargs_def; nboot $loadaddr 0 0x20c0000 && bootm\0" \ "boot_safe=setenv bootargs $bootargs_def; nboot $loadaddr 0 0xc0000 && bootm\0" \ "boot_tftp=setenv bootargs $bootargs_def ip=any nfsroot=$nfsroot; setenv autoload y && bootp && bootm\0" \ "boot_usb=setenv bootargs $bootargs_def; usb start && usb storage && fatload usb 0:1 $loadaddr dds-xm200.bin && bootm\0" \ "boot_mmc=setenv bootargs $bootargs_def; mmc rescan && fatload mmc 0:1 $loadaddr dds-xm200.bin && bootm\0" \ "bootcmd=run boot_mmc ; run boot_usb ; run boot_working ; run boot_safe\0" \ "altbootcmd=run boot_mmc ; run boot_usb ; run boot_safe ; run boot_working\0" /* Console settings */ #define CONFIG_SYS_LONGHELP #define CONFIG_CMDLINE_EDITING #define CONFIG_AUTO_COMPLETE /* U-Boot memory settings */ #define CONFIG_SYS_MALLOC_LEN (1 << 20) /* Command line configuration */ #define CONFIG_CMD_PING #define CONFIG_CMD_DHCP #define CONFIG_CMD_MII #define CONFIG_CMD_MMC #define CONFIG_CMD_CACHE #endif /* __CONFIG_H */
{ "content_hash": "f2fbd8f9fca8dc99e3301b54448b94d1", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 123, "avg_line_length": 29.81512605042017, "alnum_prop": 0.721815107102593, "repo_name": "guileschool/beagleboard", "id": "f0e1a1d39861314e7511530405fcb051107ff72c", "size": "3778", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "u-boot/include/configs/snapper9g45.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "960094" }, { "name": "Awk", "bytes": "269" }, { "name": "Batchfile", "bytes": "3451" }, { "name": "C", "bytes": "62720528" }, { "name": "C++", "bytes": "5261365" }, { "name": "CSS", "bytes": "8362" }, { "name": "GDB", "bytes": "3642" }, { "name": "HTML", "bytes": "237884" }, { "name": "Lex", "bytes": "13917" }, { "name": "Makefile", "bytes": "429363" }, { "name": "Objective-C", "bytes": "370078" }, { "name": "Perl", "bytes": "358570" }, { "name": "Python", "bytes": "884691" }, { "name": "Roff", "bytes": "9384" }, { "name": "Shell", "bytes": "96042" }, { "name": "Tcl", "bytes": "967" }, { "name": "XSLT", "bytes": "445" }, { "name": "Yacc", "bytes": "26163" } ], "symlink_target": "" }
General-purpose OAuth 2.0 authentication strategy for [Passport](http://passportjs.org/). This module lets you authenticate using OAuth 2.0 in your Node.js applications. By plugging into Passport, OAuth 2.0 authentication can be easily and unobtrusively integrated into any application or framework that supports [Connect](http://www.senchalabs.org/connect/)-style middleware, including [Express](http://expressjs.com/). Note that this strategy provides generic OAuth 2.0 support. In many cases, a provider-specific strategy can be used instead, which cuts down on unnecessary configuration, and accommodates any provider-specific quirks. See the [list](https://github.com/jaredhanson/passport/wiki/Strategies) for supported providers. Developers who need to implement authentication against an OAuth 2.0 provider that is not already supported are encouraged to sub-class this strategy. If you choose to open source the new provider-specific strategy, please add it to the list so other people can find it. ## Install $ npm install passport-oauth2 ## Usage #### Configure Strategy The OAuth 2.0 authentication strategy authenticates users using a third-party account and OAuth 2.0 tokens. The provider's OAuth 2.0 endpoints, as well as the client identifer and secret, are specified as options. The strategy requires a `verify` callback, which receives an access token and profile, and calls `cb` providing a user. ```js passport.use(new OAuth2Strategy({ authorizationURL: 'https://www.example.com/oauth2/authorize', tokenURL: 'https://www.example.com/oauth2/token', clientID: EXAMPLE_CLIENT_ID, clientSecret: EXAMPLE_CLIENT_SECRET, callbackURL: "http://localhost:3000/auth/example/callback" }, function(accessToken, refreshToken, profile, cb) { User.findOrCreate({ exampleId: profile.id }, function (err, user) { return cb(err, user); }); } )); ``` #### Authenticate Requests Use `passport.authenticate()`, specifying the `'oauth2'` strategy, to authenticate requests. For example, as route middleware in an [Express](http://expressjs.com/) application: ```js app.get('/auth/example', passport.authenticate('oauth2')); app.get('/auth/example/callback', passport.authenticate('oauth2', { failureRedirect: '/login' }), function(req, res) { // Successful authentication, redirect home. res.redirect('/'); }); ``` ## Related Modules - [node-oauth-libre](https://github.com/omouse/node-oauth-libre) — node-oauth-libre ## Contributing #### Tests The test suite is located in the `test/` directory. All new features are expected to have corresponding test cases. Ensure that the complete test suite passes by executing: ```bash $ make test ``` #### Coverage All new feature development is expected to have test coverage. Patches that increse test coverage are happily accepted. Coverage reports can be viewed by executing: ```bash $ make test-cov $ make view-cov ``` ## License [The MIT License](http://opensource.org/licenses/MIT) Copyright (c) 2016 Carlos Castillo <[http://www.carloscastillo.me/](http://www.carloscastillo.me/)>
{ "content_hash": "3dda365663344102ca213c47e33c5753", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 99, "avg_line_length": 30.607843137254903, "alnum_prop": 0.7459961563100577, "repo_name": "caco0516/passport-oauth2-libre", "id": "1b6977b687c4c75da472af9fd426617d78e86171", "size": "3150", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "149840" }, { "name": "Makefile", "bytes": "408" } ], "symlink_target": "" }
ACCEPTED #### According to NUB Generator [autonym] #### Published in null #### Original name null ### Remarks null
{ "content_hash": "51d4f172388e37098de3187f6df325c0", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 23, "avg_line_length": 9.076923076923077, "alnum_prop": 0.6779661016949152, "repo_name": "mdoering/backbone", "id": "8288fb18074960c6986b57a9759911c36f8ea3a2", "size": "163", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Sapindales/Rutaceae/Fagara/Fagara coco/Fagara coco coco/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<?php /** * Example Activation * * Activation class for Example plugin. * This is optional, and is required only if you want to perform tasks when your plugin is activated/deactivated. * * @package Croogo * @author Fahad Ibnay Heylaal <contact@fahad19.com> * @license http://www.opensource.org/licenses/mit-license.php The MIT License * @link http://www.croogo.org */ class FullCalendarActivation { /** * onActivate will be called if this returns true * * @param object $controller Controller * @return boolean */ public function beforeActivation(&$controller) { return true; } /** * Called after activating the plugin in ExtensionsPluginsController::admin_toggle() * * @param object $controller Controller * @return void */ public function onActivation(&$controller) { // ACL: set ACOs with permissions $controller->Croogo->addAco('FullCalendar'); // ExampleController $controller->Croogo->addAco('FullCalendar/admin_index'); // ExampleController::admin_index() $controller->Croogo->addAco('FullCalendar/index', array('registered', 'public')); // ExampleController::index() // Main menu: add an Example link $mainMenu = $controller->Link->Menu->findByAlias('main'); $controller->Link->Behaviors->attach('Tree', array( 'scope' => array( 'Link.menu_id' => $mainMenu['Menu']['id'], ), )); $controller->Link->save(array( 'menu_id' => $mainMenu['Menu']['id'], 'title' => 'FullCalendar', 'link' => 'plugin:full_calendar/controller:full_calendar/action:index', 'status' => 1, )); } /** * onDeactivate will be called if this returns true * * @param object $controller Controller * @return boolean */ public function beforeDeactivation(&$controller) { return true; } /** * Called after deactivating the plugin in ExtensionsPluginsController::admin_toggle() * * @param object $controller Controller * @return void */ public function onDeactivation(&$controller) { // ACL: remove ACOs with permissions $controller->Croogo->removeAco('FullCalendar'); // ExampleController ACO and it's actions will be removed // Main menu: delete Example link $link = $controller->Link->find('first', array( 'conditions' => array( 'Menu.alias' => 'main', 'Link.link' => 'plugin:full_calendar/controller:full_calendar/action:index', ), )); $controller->Link->Behaviors->attach('Tree', array( 'scope' => array( 'Link.menu_id' => $link['Link']['menu_id'], ), )); if (isset($link['Link']['id'])) { $controller->Link->delete($link['Link']['id']); } } } ?>
{ "content_hash": "a48279c5f3d5aab409a7f10f07627564", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 119, "avg_line_length": 33.64705882352941, "alnum_prop": 0.6055944055944056, "repo_name": "djstearns/blabfeed-beta2", "id": "fb1728a9064482edb18d7502697666521847c456", "size": "2860", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plugins/full_calendar/config/full_calendar_activation.php", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "47686" }, { "name": "JavaScript", "bytes": "6401213" }, { "name": "Logos", "bytes": "7766" }, { "name": "PHP", "bytes": "3756608" }, { "name": "Ruby", "bytes": "4505" } ], "symlink_target": "" }
import { Controller } from 'cx/ui'; import {getPlayer} from '../../api'; function getRanking(map, key) { if (!map[key]) map[key] = { // this does not return the updated player info, consider creating a player hash map as a computable // or rely on the firebase to propagate the change name: getPlayer(key).name, wins: 0, losses: 0, points: 0, sets: { won: 0, lost: 0 } }; return map[key]; } export default class extends Controller { onInit() { let players = this.store.get('players'); this.addTrigger('rankings', ['schedule'], schedule => { let rankings = (schedule || []).reduce((acc, game) => { let {result} = game; if (result.teamA === '' || result.teamB === '') return acc; let playerA = getRanking(acc, game.teamA); let playerB = getRanking(acc, game.teamB); if (result.teamA > result.teamB) { playerA.wins++; playerB.losses++; playerA.points += 2; } else { playerB.wins++; playerA.losses++; playerB.points += 2; } playerA.sets.won += result.teamA; playerA.sets.lost += result.teamB; playerB.sets.won += result.teamB; playerB.sets.lost += result.teamA; return acc; }, {}); rankings = Object.keys(rankings).map(key => { let {sets} = rankings[key]; return { id: key, ...rankings[key], setRatio: `${sets.won}:${sets.lost}` } }); rankings.sort((a,b) => { let d = a.wins - b.wins; if (d !== 0) return d; if (a.sets.won - a.sets.lost > b.sets.won - b.sets.lost) return 1; return -1; }).reverse(); this.store.set('rankings', rankings); }, true); } }
{ "content_hash": "50002a6b19341dc3e8ff3546eb7d70d1", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 112, "avg_line_length": 31.602739726027398, "alnum_prop": 0.4139575205895102, "repo_name": "sasatatar/spin-liga", "id": "8011a4dbae85d6890b3b2eca90f9c4957cfe1749", "size": "2307", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/routes/ranking/Controller.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6566" }, { "name": "HTML", "bytes": "341" }, { "name": "JavaScript", "bytes": "114938" } ], "symlink_target": "" }
package net.fauxpark.oled; /** * This class defines some useful constants, such as memory addressing modes, scrolling speeds and dummy bytes. * * @author fauxpark */ public class Constant { /** * A dummy byte consisting of all zeroes. */ public static final int DUMMY_BYTE_00 = 0x00; /** * A dummy byte consisting of all ones. */ public static final int DUMMY_BYTE_FF = 0xFF; /** * Horizontal memory addressing mode. * In this mode, after reading/writing the display RAM, the column address pointer is incremented. * When the pointer reaches the end, it is reset to the start address on the next page. */ public static final int MEMORY_MODE_HORIZONTAL = 0x00; /** * Vertical memory addressing mode. * In this mode, after reading/writing the display RAM, the page address pointer is incremented. * When the pointer reaches the end, it is reset to the start address on the next column. */ public static final int MEMORY_MODE_VERTICAL = 0x01; /** * Page memory addressing mode. * In this mode, after reading/writing the display RAM, the column address pointer is incremented. * When the pointer reaches the end, it is reset to the start address on the same page. */ public static final int MEMORY_MODE_PAGE = 0x02; /** * Disable the charge pump regulator. */ public static final int CHARGE_PUMP_DISABLE = 0x10; /** * Enable the charge pump regulator. */ public static final int CHARGE_PUMP_ENABLE = 0x14; /** * Sequential COM pin hardware configuration. * With {@link Command#SET_COM_SCAN_INC} issued, rows 0 - 63 on the display correspond to COM0 - COM63. */ public static final int COM_PINS_SEQUENTIAL = 0x02; /** * Sequential COM pin hardware configuration with left/right remap. * With {@link Command#SET_COM_SCAN_INC} issued, rows 0 - 31 on the display correspond to COM32 - COM63, and rows 32 - 63 correspond to COM0 - COM31. */ public static final int COM_PINS_SEQUENTIAL_LR = 0x22; /** * Alternating COM pin hardware configuration. * With {@link Command#SET_COM_SCAN_INC} issued, row 0 on the display corresponds to COM0, row 1 to COM32, row 2 to COM2, row 3 to COM33, etc. */ public static final int COM_PINS_ALTERNATING = 0x12; /** * Alternating COM pin hardware configuration with left/right remap. * With {@link Command#SET_COM_SCAN_INC} issued, row 0 on the display corresponds to COM32, row 1 to COM0, row 2 to COM33, row 3 to COM1, etc. */ public static final int COM_PINS_ALTERNATING_LR = 0x32; /** * A VCOMH deselect level of ~0.65 &times; <code>V<sub>CC</sub></code>. */ public static final int VCOMH_DESELECT_LEVEL_00 = 0x00; /** * A VCOMH deselect level of ~0.77 &times; <code>V<sub>CC</sub></code>. */ public static final int VCOMH_DESELECT_LEVEL_20 = 0x20; /** * A VCOMH deselect level of ~0.83 &times; <code>V<sub>CC</sub></code>. */ public static final int VCOMH_DESELECT_LEVEL_30 = 0x30; /** * Scroll by one pixel every 5 frames. */ public static final int SCROLL_STEP_5 = 0x00; /** * Scroll by one pixel every 64 frames. */ public static final int SCROLL_STEP_64 = 0x01; /** * Scroll by one pixel every 128 frames. */ public static final int SCROLL_STEP_128 = 0x02; /** * Scroll by one pixel every 256 frames. */ public static final int SCROLL_STEP_256 = 0x03; /** * Scroll by one pixel every 3 frames. */ public static final int SCROLL_STEP_3 = 0x04; /** * Scroll by one pixel every 4 frames. */ public static final int SCROLL_STEP_4 = 0x05; /** * Scroll by one pixel every 25 frames. */ public static final int SCROLL_STEP_25 = 0x06; /** * Scroll by one pixel every 2 frames. */ public static final int SCROLL_STEP_2 = 0x07; }
{ "content_hash": "657fdf1d50a8ad1822762a90146b4b39", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 150, "avg_line_length": 30.09375, "alnum_prop": 0.6692627206645898, "repo_name": "fauxpark/oled-core", "id": "189a21af86728fe212b609cb0da9690f0f4efa9f", "size": "3852", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/main/java/net/fauxpark/oled/Constant.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "64138" } ], "symlink_target": "" }
package org.waveprotocol.wave.util.escapers.jvm; import org.waveprotocol.wave.model.waveref.InvalidWaveRefException; import org.waveprotocol.wave.model.waveref.WaveRef; import org.waveprotocol.wave.model.waveref.WaverefEncoder; import org.waveprotocol.wave.model.waveref.WaverefEncoder.PercentEncoderDecoder; import org.waveprotocol.wave.util.escapers.PercentEscaper; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; /** * Non-GWT-enabled instance of WaverefEncoder for use in the server. */ public class JavaWaverefEncoder { private static final PercentEscaper pathEscaper = new PercentEscaper(PercentEscaper.SAFEPATHCHARS_URLENCODER + "+", false); private static final PercentEscaper queryEscaper = new PercentEscaper(PercentEscaper.SAFEQUERYSTRINGCHARS_URLENCODER + "+", false); public static final WaverefEncoder INSTANCE = new WaverefEncoder(new PercentEncoderDecoder() { @Override public String decode(String encodedValue) { try { return URLDecoder.decode(encodedValue, "UTF-8").replaceAll(" ", "+"); } catch (UnsupportedEncodingException e) { return null; } } @Override public String pathEncode(String decodedValue) { return pathEscaper.escape(decodedValue); } @Override public String queryEncode(String decodedValue) { return queryEscaper.escape(decodedValue); } }); // Disallow construction private JavaWaverefEncoder() { } /** {@link WaverefEncoder#encodeToUriQueryString(String)} */ public static String encodeToUriQueryString(String str) { return INSTANCE.encodeToUriQueryString(str); } /** {@link WaverefEncoder#encodeToUriPathSegment(String)} */ public static String encodeToUriPathSegment(String str) { return INSTANCE.encodeToUriPathSegment(str); } /** {@link WaverefEncoder#encodeToUriQueryString(WaveRef)} */ public static String encodeToUriQueryString(WaveRef ref) { return INSTANCE.encodeToUriQueryString(ref); } /** {@link WaverefEncoder#encodeToUriPathSegment(WaveRef)} */ public static String encodeToUriPathSegment(WaveRef ref) { return INSTANCE.encodeToUriPathSegment(ref); } /** {@link WaverefEncoder#decodeWaveRefFromPath(String)} */ public static WaveRef decodeWaveRefFromPath(String path) throws InvalidWaveRefException { return INSTANCE.decodeWaveRefFromPath(path); } }
{ "content_hash": "063d9e1bb37bbe9862c7116dd7b84cbc", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 96, "avg_line_length": 32.863013698630134, "alnum_prop": 0.7553147144643602, "repo_name": "JaredMiller/Wave", "id": "bfec0746cbe16db4b185267c0a02cbe89aa54e73", "size": "2998", "binary": false, "copies": "5", "ref": "refs/heads/trunk", "path": "src/org/waveprotocol/wave/util/escapers/jvm/JavaWaverefEncoder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "12864902" }, { "name": "JavaScript", "bytes": "2691" }, { "name": "Python", "bytes": "356911" }, { "name": "Shell", "bytes": "6405" }, { "name": "Smalltalk", "bytes": "44615" } ], "symlink_target": "" }
import * as SearchActions from '../actions/search.js'; import { loadFilter } from './load.js'; const loadReducer = loadFilter(SearchActions); export default function search(state = { load: { completed: 0, total: 0, loading: false }, result: null, query: '', tempQuery: '' }, action) { const load = loadReducer(state.load, action); if (!action.payload) return Object.assign({}, state, { load }); const { query } = action.payload; switch (action.type) { case SearchActions.FETCH: break; case SearchActions.SET_QUERY: // Invalidate result too. return Object.assign({}, state, { query, tempQuery: query, result: undefined, load }); case SearchActions.SET_TEMP_QUERY: return Object.assign({}, state, { tempQuery: query, load }); } return Object.assign({}, state, { load }); }
{ "content_hash": "c766ef2bc50c900853df1f17ba607630", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 65, "avg_line_length": 25.571428571428573, "alnum_prop": 0.6078212290502794, "repo_name": "GrooshBene/shellscripts", "id": "698120ffe0b7901f2b5dae4c078db0fb1427b142", "size": "895", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/reducers/search.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6142" }, { "name": "JavaScript", "bytes": "42626" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent" android:layout_gravity="center_horizontal" android:gravity="center_horizontal"> <TextView android:id="@+id/textView10Stage6Toast" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_alignParentLeft="true" android:layout_alignParentStart="true" android:layout_alignParentTop="true" android:layout_marginLeft="8dp" android:layout_marginTop="8dp" android:layout_marginBottom="8dp" android:gravity="center_horizontal" android:text="@string/tvStage6ToastQuestionText" android:textSize="16sp" /> <!-- New segmented control buttons --> <org.coderswithoutborders.deglancer.utils.AwesomeRadioButton.SegmentedGroup xmlns:segmentedgroup="http://schemas.android.com/apk/res-auto" android:id="@+id/groupStage6ToastNew" android:layout_width="wrap_content" android:layout_height="wrap_content" android:orientation="horizontal" segmentedgroup:sc_border_width="2dp" segmentedgroup:sc_corner_radius="10dp" android:layout_below="@+id/textView10Stage6Toast" android:layout_centerHorizontal="true"> <RadioButton android:id="@+id/rdbNoToastNew" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="@string/rdbStage6NoInformationText" style="@style/RadioButton" android:paddingLeft="15dp" android:paddingRight="15dp" /> <RadioButton android:id="@+id/rdbInformationNew" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="@string/rdbStage6InformationText" style="@style/RadioButton" android:paddingLeft="15dp" android:paddingRight="15dp" /> <RadioButton android:id="@+id/rdbThumbsUpNew" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="@string/rdbStage6InformationandThumbsUpText" style="@style/RadioButton" android:paddingLeft="15dp" android:paddingRight="15dp" /> </org.coderswithoutborders.deglancer.utils.AwesomeRadioButton.SegmentedGroup> </RelativeLayout>
{ "content_hash": "23ab258a61fb1760d6a76b1e628b9fe0", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 81, "avg_line_length": 40.492063492063494, "alnum_prop": 0.6569972559780478, "repo_name": "coderswithoutborders/deglancer", "id": "ee14807cc115e845e7c6000d78c5fd22dad976a5", "size": "2551", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Deglancer/app/src/main/res/layout/stage6_toast_new_view.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "430985" } ], "symlink_target": "" }
layout: kanji-remain v4: 2363 kanji: 蘭 keyword: orchid strokes: 19 on-yomi: ラン permalink: /rtk/蘭/ --- ## Koohii stories: 1) [<a href="http://kanji.koohii.com/profile/sboegema">sboegema</a>] 23-9-2008(24): The<strong> orchid</strong> thief tries to steal exotic flowers from the east, only to be stopped customs at the departure gate <strong>*</strong> 蘭 【らん】orchid. 2) [<a href="http://kanji.koohii.com/profile/chibimizuno">chibimizuno</a>] 15-7-2009(11): Note: I changed the keyword to Dutch because this is also the kanji used in Japanese for the Netherlands. I needed a break from all the flower types. Story: Nagasaki was opened up to <strong>Dutch</strong> traders in Japan, making it the only <em>gate</em> to the <em>East</em> at that time. As a result of the additional trade, both the Dutch and Japanese economies <em>flower</em>ed. 3) [<a href="http://kanji.koohii.com/profile/mantixen">mantixen</a>] 24-7-2009(8): The <em>Asian lesbians</em> are partial to this <em>flower</em> because it reminds them of their favorite classic video game character: B.<strong> Orchid</strong>. 4) [<a href="http://kanji.koohii.com/profile/Django">Django</a>] 26-2-2010(7): When the people of <strong>the Netherlands</strong> brought <strong>orchids</strong> <em>east</em> to <em>Tokyo</em>, the citizens of Tokyo opened up their <em>gates</em> to let the Dutch in. Apparently they&#039;ve always had a weakness for<strong> orchid</strong>s in the east. 5) [<a href="http://kanji.koohii.com/profile/nadiatims">nadiatims</a>] 25-6-2009(3): The queen of holland( 蘭国 ) grows<strong> orchid</strong> flowers by the west gate of her palace.
{ "content_hash": "b442dfcc0374cb80ecf50b4528775214", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 475, "avg_line_length": 78, "alnum_prop": 0.7301587301587301, "repo_name": "hochanh/hochanh.github.io", "id": "64f04dc96bb7db794d454b2fcae10cc571da7080", "size": "1664", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rtk/rtk3-remain/2363.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "6969" }, { "name": "JavaScript", "bytes": "5041" } ], "symlink_target": "" }
[![Travis](https://img.shields.io/travis/kiliankoe/ParkenDD.svg?style=flat-square)](https://travis-ci.org/kiliankoe/ParkenDD) [![App Store](https://img.shields.io/itunes/v/957165041.svg?style=flat-square)](https://itunes.apple.com/us/app/parkendd/id957165041) [![GitHub Issues](https://img.shields.io/github/issues/kiliankoe/ParkenDD.svg?style=flat-square)](https://github.com/kiliankoe/ParkenDD/issues) ![](Resources/screenshot.png) [![](http://parkendd.kilian.io/images/badge_small.svg)](https://itunes.apple.com/de/app/parkendd/id957165041) ParkenDD shows you the current parking situation for various European cities, including Dresden, Ingolstadt, Hamburg, Zürich and Aarhus to name a select few. Depending on the available data you can see the current number of available public parking spots as well as their location. For a select number of lots you can also check out forecast information to see if there will be spots available when you arrive. Features of the app: - See the current parking situation for your city at a glance - Multiples cities are supported, for a full list look [here](https://github.com/offenesdresden/ParkAPI/tree/master/park_api/cities) - Display parking lots on a map to see where they are exactly The backend to ParkenDD is also open source. You're very welcome to help us by integrating further open data into our project so that we can support more cities. See the guide on [ParkAPI](https://github.com/offenesdresden/ParkAPI#adding-support-for-a-new-city)'s project page to get started. ParkenDD also exists for **Android** and **Windows 10**, check out the [project page](https://parkendd.de) if you're interested. Pull requests and issue reports for this project are heavily encouraged! Feel free to open an issue should you have any question whatsoever. If you'd like to have this app translated to your language, you can also help out by having a look at the project on [POEditor](https://poeditor.com/join/project/ppxkC7Hgvf). We're very interested in having as many languages as possible supported. ✌️
{ "content_hash": "16803218910948c7bf2b36b61ffc4736", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 410, "avg_line_length": 82.28, "alnum_prop": 0.7822070977151191, "repo_name": "kiliankoe/ParkenDD", "id": "4ce0f7b87d7983ad35409905e95603420e97074d", "size": "2080", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "290" }, { "name": "Ruby", "bytes": "3647" }, { "name": "Swift", "bytes": "66718" } ], "symlink_target": "" }
export * from './chat.store';
{ "content_hash": "d880189b8951217ef804e4ead3161927", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 29, "avg_line_length": 30, "alnum_prop": 0.6333333333333333, "repo_name": "jpush/jchat-web", "id": "000db9cec4760f9d363517c3659ef84d14b8d0e7", "size": "30", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/pages/chat/stores/index.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "209413" }, { "name": "HTML", "bytes": "179268" }, { "name": "JavaScript", "bytes": "38604" }, { "name": "TypeScript", "bytes": "801936" } ], "symlink_target": "" }
package cli import ( "os" "path" "testing" "github.com/bitrise-io/envman/envman" "github.com/stretchr/testify/require" ) func TestPrint(t *testing.T) { envsStr := ` envs: - TEST_HOME1: $HOME - TEST_HOME2: $TEST_HOME1/test ` environments, err := envman.ParseEnvsYML([]byte(envsStr)) require.Equal(t, nil, err) envsJSONList, err := convertToEnsJSONModel(environments, false) require.Equal(t, nil, err) require.Equal(t, "$HOME", envsJSONList["TEST_HOME1"]) require.Equal(t, "$TEST_HOME1/test", envsJSONList["TEST_HOME2"]) testHome1 := os.Getenv("HOME") testHome2 := path.Join(testHome1, "test") envsJSONList, err = convertToEnsJSONModel(environments, true) require.Equal(t, nil, err) require.Equal(t, testHome1, envsJSONList["TEST_HOME1"]) require.Equal(t, testHome2, envsJSONList["TEST_HOME2"]) }
{ "content_hash": "6cb65697e2e92d967b1db43201bbbb56", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 65, "avg_line_length": 25.5625, "alnum_prop": 0.7163814180929096, "repo_name": "viktorbenei/envman", "id": "dfd3f80195dfdb479811fb3c091f1c38cb13e60b", "size": "818", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cli/print_test.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "61168" }, { "name": "Shell", "bytes": "3779" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <article id="1276"> <title>Serial Literature Used by American Geologists </title> <author>Gross, P.L.K.Woodford, A.O. </author> <text> The present investigation deals with the serial literature of geology, including mineralogy. Six American journals for 1929 were chosen, and the references tabulated. In Table I are listed these source journals, together with the total number of pages of the actual articles studied, the total number of citations in each journal, the number of references to books and to personal communications, and the net total, which represents the citations to serial literature. It is these last mentioned references which will be considered in further detail. The totals are probably slightly high, due to unintentional counting in single articles of repetitions of the same citation. </text> </article>
{ "content_hash": "e926a1257c67ff55702cb12db85915e8", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 62, "avg_line_length": 44.19047619047619, "alnum_prop": 0.7446120689655172, "repo_name": "bhlshrf/IR", "id": "3fc7728834efc3bc89adad0e63f482d56e6f327a", "size": "928", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "testCases/XML/1276.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "200275" }, { "name": "Roff", "bytes": "2511" } ], "symlink_target": "" }
package org.jadira.usertype.dateandtime.threeten; import java.time.Month; import org.jadira.usertype.dateandtime.threeten.columnmapper.IntegerColumnMonthMapper; import org.jadira.usertype.spi.shared.AbstractSingleColumnUserType; /** * Persist {@link Month} via Hibernate using integer value. */ public class PersistentMonthAsInteger extends AbstractSingleColumnUserType<Month, Integer, IntegerColumnMonthMapper> { private static final long serialVersionUID = 4694981953643179773L; }
{ "content_hash": "dc6f06636a7e3ecf5190c6ee08dbcb25", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 118, "avg_line_length": 32.93333333333333, "alnum_prop": 0.8279352226720648, "repo_name": "olliefreeman/jadira", "id": "0f3e795089848fa0b3c23d57882c4321bd2f6715", "size": "1110", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "usertype.extended/src/main/java/org/jadira/usertype/dateandtime/threeten/PersistentMonthAsInteger.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "3195187" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle"> <solid android:color="@android:color/transparent" /> <size android:width="12dp" /> </shape>
{ "content_hash": "bf5e3a80d6cf9546c9b121a1091e11c7", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 65, "avg_line_length": 39.166666666666664, "alnum_prop": 0.6851063829787234, "repo_name": "avenwu/jk-address-book", "id": "76f7908e4fa59b900e1a7b89f23176861c66865f", "size": "235", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "android/app/src/main/res/drawable/transparent_divider_space.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "273545" }, { "name": "C++", "bytes": "1844" }, { "name": "HTML", "bytes": "40352" }, { "name": "Java", "bytes": "20001" }, { "name": "Kotlin", "bytes": "40733" }, { "name": "Objective-C", "bytes": "2897" }, { "name": "Ruby", "bytes": "345" }, { "name": "Shell", "bytes": "16334" }, { "name": "Swift", "bytes": "52863" } ], "symlink_target": "" }
/** * Utilities to support code generation and compilation in-memory. */ package org.agrona.generation;
{ "content_hash": "d47ce520aca10912fba713507dfb5f0e", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 66, "avg_line_length": 17.833333333333332, "alnum_prop": 0.7476635514018691, "repo_name": "real-logic/Agrona", "id": "4058eadbdd4dc83f0289363964323dcc8fdd3986", "size": "711", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "agrona/src/main/java/org/agrona/generation/package-info.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "935539" } ], "symlink_target": "" }
WAL Authoring Tool - Node.js 6.11.x - npm@3 - node-gyp - @angular/cli - electron-forge (install separately) - gulp ## Run Run `npm i && npm start` for a dev server. Navigate to `http://localhost:9090/` (port may be randomized). The app artifacts will be stored in the `dist/` directory. ## Build Run `npm i && electron-forge package` to build the project. The standalone app (for your native platform) will be stored in the `out/` directory.
{ "content_hash": "590e7882018ba95e7ceb438d3c39b39a", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 164, "avg_line_length": 28.4375, "alnum_prop": 0.6989010989010989, "repo_name": "moodspace/walat", "id": "35b4427ea3f80c0c8ec25d0b304737b6121d4259", "size": "464", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2914671" }, { "name": "HTML", "bytes": "36154" }, { "name": "JavaScript", "bytes": "3131506" }, { "name": "TypeScript", "bytes": "77346" } ], "symlink_target": "" }
<?php /** * @link https://github.com/chrmorandi/yii2-jasper for the canonical source repository * @package yii2-jasper * @author Christopher Mota <chrmorandi@gmail.com> * @license MIT License - view the LICENSE file that was distributed with this source code. */ namespace chrmorandi\jasper; use yii\base\Component; use yii\base\Exception; use yii\db\Connection; use yii\helpers\ArrayHelper; /** * Jasper implements JasperReport application component creating reports. * * By default, Jasper create reports whithout database. * * * ```php * 'jasper' => [ * 'class' => 'chrmorandi\jasper', * 'redirect_output' => false, //optional * 'resource_directory' => false, //optional * 'locale' => pt_BR, //optional * 'db' => [ * 'dsn' =>'psql:host=localhost;port=5432;dbname=myDatabase', * 'username' => 'username', * 'password' => 'password', * //'jdbcDir' => './jdbc', **Defaults to ./jdbc * //'jdbcUrl' => 'jdbc:postgresql://"+host+":"+port+"/"+dbname', * ] * ] * ``` * * @author Christopher M. Mota <chrmorandi@gmail.com> * @since 1.0.0 */ class Jasper extends Component { /** * @var Connection|array|string the DB connection object or the application component ID of the DB connection. * After the Jasper object is created, if you want to change this property, you should * only assign it with a DB connection object. */ public $db; /** * @var bool|string contains path to report resource dir. If false given the input_file directory is used. */ public $resource_directory = false; /** * @var bool redirect output and errors to /dev/null */ public $redirect_output = true; /** * @var boll if true report is runing in the backgrount. The return status is 0. Default is false */ public $background = false; /** * * @var bool|string Switch without password with "su" command need be enable. */ public $run_as_user = false; public $locale = null; public $output_file = false; protected $executable = '/../JasperStarter/bin/jasperstarter'; protected $the_command; protected $windows = false; protected $formats = [ 'pdf', 'rtf', 'xls', 'xlsx', 'docx', 'odt', 'ods', 'pptx', 'csv', 'html', 'xhtml', 'xml', 'jrprint' ]; /** * @var array map pdo driver to jdbc driver name */ protected static $pdoDriverCompatibility = [ 'pgsql' => 'postgres', 'mysql' => 'mysql', 'sqlite' => 'sqlite', 'firebird' => 'firebirdsql', 'oci' => 'oracle', ]; /** * Initializes the Jasper component. * * @throws Exception if [[resource_directory]] not exist. */ public function init() { parent::init(); if (strtoupper(substr(PHP_OS, 0, 3)) === 'WIN') { $this->windows = true; } if ($this->resource_directory) { if (!file_exists($this->resource_directory)) { throw new Exception('Invalid resource directory', 1); } } } /** * Compile JasperReport template(JRXML) to native binary format, called Jasper file. * * @param string $input_file * @param string $output_file * @param string $output_file * @return Jasper */ public function compile($input_file, $output_file = false) { if (is_null($input_file) || empty($input_file)) { throw new Exception('No input file', 1); } $command = __DIR__.$this->executable; $command .= ' compile '; $command .= $input_file; if ($output_file !== false) { $command .= ' -o '.$output_file; } $this->the_command = escapeshellcmd($command); return $this; } /** * Process report . Accepts files in the format ".jrxml" or ".jasper". * * ```php * $jasper->process( * __DIR__ . '/vendor/chrmorandi/yii2-jasper/examples/hello_world.jasper', * ['php_version' => 'xxx'] * ['pdf', 'ods'], * )->execute(); * ``` * * @param string $input_file * @param array $parameters * @param array $format available formats : pdf, rtf, xls, xlsx, docx, odt, ods, pptx, csv, html, xhtml, xml, jrprint. * jrprint. * @param string $output_file if false the input_file directory is used. Default is false * @return Jasper */ public function process($input_file, $parameters = [], $format = ['pdf'], $output_file = false) { if (is_null($input_file) || empty($input_file)) { throw new Exception('No input file', 1); } if (is_array($format)) { foreach ($format as $key) { if (!in_array($key, $this->formats)) { throw new Exception('Invalid format!', 1); } } } else { if (!in_array($format, $this->formats)) { throw new Exception('Invalid format!', 1); } } $command = __DIR__.$this->executable; $command .= ' process '; $command .= $input_file; if ($output_file !== false) { $command .= ' -o '.$output_file; } if (is_array($format)) { $command .= ' -f '.implode(' ', $format); } else { $command .= ' -f '.$format; } if ($this->resource_directory) { $command .= ' -r '.$this->resource_directory; } if (!empty($this->locale) && $this->locale != null) { $parameters = ArrayHelper::merge(['REPORT_LOCALE' => $this->locale], $parameters); } if (count($parameters) > 0) { $command .= ' -P'; foreach ($parameters as $key => $value) { $command .= ' '.$key.'='.$value; } } if (!empty($this->db)) { $command .= $this->databaseParams(); } $this->the_command = escapeshellcmd($command); return $this; } /** * Report parameters list * * @param type $input_file * @return Jasper * @throws Exception */ public function listParameters($input_file) { if (is_null($input_file) || empty($input_file)) { throw new Exception('No input file', 1); } $command = __DIR__.$this->executable; $command .= ' list_parameters '; $command .= $input_file; $this->the_command = escapeshellcmd($command); return $this; } /** * Output command * * @return string */ public function output() { return escapeshellcmd($this->the_command); } /** * Make report. * * @return array * @throws Exception */ public function execute() { $this->unixParams(); $output = []; $return_var = 0; exec($this->the_command, $output, $return_var); if ($return_var !== 0) { throw new Exception( 'Your report has an error and couldn\'t be processed! Try to output the command: '. escapeshellcmd($this->the_command), 1 ); } return $output; } /** * Set optional Unix parameters */ protected function unixParams() { if ($this->windows) { return; } $this->the_command .= $this->redirect_output ? ' > /dev/null 2>&1' : ''; $this->the_command .= $this->background ? ' &' : ''; $this->the_command = $this->run_as_user ? 'su -u '.$this->run_as_user.' -c "'.$this->the_command.'"' : $this->the_command; } /** * @return string */ protected function databaseParams() { if (!isset($this->db)) { return ''; } if (empty($this->db['jdbc_url'])) { $driver = strtolower(substr($this->db['dsn'], 0, strpos($this->db['dsn'], ':'))); $command = ' -t '.self::$pdoDriverCompatibility[$driver]; $command .= ' -H '.$this->getDsnValue('host'); $command .= ' -n '.$this->getDsnValue('dbname'); if (!empty($port = $this->getDsnValue('port'))) { $command .= ' --db-port '.$port; } } else { $command = ' --db-url '.$this->db['jdbc_url']; } $command .= ' -u '.$this->db['username']; if (!empty($this->db['password'])) { $command .= ' -p '.$this->db['password']; } if (!empty($this->db['jdbc_dir'])) { $command .= ' --jdbc-dir '.$this->db['jdbc_dir']; } return $command; } /** * @param string $dsnParameter * @param string|null $default * @throws RuntimeException * @return string|null */ protected function getDsnValue($dsnParameter, $default = NULL) { $pattern = sprintf('~%s=([^;]*)(?:;|$)~', preg_quote($dsnParameter, '~')); $result = preg_match($pattern, $this->db['dsn'], $matches); if ($result === FALSE) { throw new Exception('Regular expression matching failed unexpectedly.'); } return $result ? $matches[1] : $default; } }
{ "content_hash": "1cea1c5fbc0da528bf76c03fa7a7588a", "timestamp": "", "source": "github", "line_count": 339, "max_line_length": 129, "avg_line_length": 28.144542772861357, "alnum_prop": 0.506026621947385, "repo_name": "chrmorandi/yii2-jasper", "id": "2eef5c905961a9f32ffc4e99d4dab0a66bd9bc26", "size": "9541", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Jasper/Jasper.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3408" }, { "name": "HTML", "bytes": "723253" }, { "name": "PHP", "bytes": "102208" }, { "name": "Shell", "bytes": "838" } ], "symlink_target": "" }
#ifndef IDA_INTEGRATOR_H #define IDA_INTEGRATOR_H #include <fvm/fvm.h> #include <fvm/mesh.h> #include <mpi/mpicomm.h> #include <idas/idas.h> #include <idas/idas_spgmr.h> #include <nvector/nvector_parallel.h> #include <algorithm> #include <cassert> #include <vector> namespace fvm { // This class is uninstantiatable template<class Physics> class NoPreconditioner { NoPreconditioner(); typedef typename Physics::TVec TVec; typedef typename Physics::TVecDevice TVecDevice; public: typedef typename Physics::value_type value_type; typedef typename fvm::Callback<Physics> Callback; int setup(const mesh::Mesh&, double, double, double, const TVecDevice &, const TVecDevice &, TVecDevice &, TVecDevice &, TVecDevice &, TVecDevice &, TVecDevice &, Callback) { return 0; } int apply(const mesh::Mesh&, double, double, double, double, const TVecDevice &, const TVecDevice &, const TVecDevice &, TVecDevice &, TVecDevice &, TVecDevice &, TVecDevice &, Callback) { return 0; } }; template<class Physics, class Preconditioner = NoPreconditioner<Physics> > class IDAIntegrator { public: typedef mesh::Mesh Mesh; typedef typename Physics::value_type value_type; typedef typename fvm::Callback<Physics> Callback; typedef typename Physics::TVecDevice TVecDevice; typedef typename Physics::TVec TVec; IDAIntegrator(const Mesh& mesh, Physics& ph, double rtol, double atol); IDAIntegrator(const Mesh& mesh, Physics& ph, Preconditioner& pc, double rtol, double atol); ~IDAIntegrator(); void initialise(double& t, TVecDevice &u, TVecDevice &up, Callback compute_residual); const Mesh& mesh() const; Preconditioner& preconditioner(); void advance(); // by one internal timestep void advance(double next_time); // to specified time // Returns the absolute tolerance value_type& abstol(int); value_type abstol(int) const; // Returns the relative tolerance double& reltol(); double reltol() const; // Sets integration tolerances void set_tolerances(); // set the maximum timestep taken by IDA void set_max_timestep(double); void set_max_order(int); double max_timestep() const; int max_order() const; void set_algebraic_variables(const TVec &vals); void compute_initial_conditions(TVecDevice &u0, TVecDevice &up0); // return pointer to the step orders const std::vector<int>& step_orders() const{ return step_orders_; } // return pointer to the step orders const std::vector<double>& step_sizes() const{ return step_sizes_; } // Exposes the IDA data structure void* ida(); private: IDAIntegrator(const IDAIntegrator&); IDAIntegrator& operator=(const IDAIntegrator&); const Mesh& m; Physics& physics; Preconditioner* pc; mpi::MPICommPtr procinfo; Callback compute_residual; double* t; void* ida_mem; double rtol; double atol; double max_timestep_; int max_order_; bool variableids_set_; N_Vector atolv; N_Vector weights; N_Vector ulocal; N_Vector uplocal; N_Vector uinterp; N_Vector upinterp; N_Vector variableids; // specify algebraic/differential variables TVecDevice u; TVecDevice up; TVecDevice ulocal_store; TVecDevice uplocal_store; TVecDevice weights_store; TVecDevice variableids_store; TVecDevice atolv_store; TVecDevice upinterp_store; TVecDevice uinterp_store; std::vector<int> step_orders_; std::vector<double> step_sizes_; static const int variables_per_node = VariableTraits<value_type>::number; // DEVICE //void copy_vector(N_Vector y, iterator w); void copy_vector(N_Vector y, TVecDevice &w); // IDA residual function static int f(double t, N_Vector y, N_Vector yp, N_Vector r, void*); // IDA preconditioner setup function static int psetup(double t, N_Vector y, N_Vector yp, N_Vector r, double c_j, void*, N_Vector t1, N_Vector t2, N_Vector t3); // IDA preconditioner solve function static int psolve(double t, N_Vector y, N_Vector yp, N_Vector fy, N_Vector r, N_Vector z, double c_j, double delta, void*, N_Vector tmp); }; template<class Physics, class Preconditioner> IDAIntegrator<Physics, Preconditioner>:: IDAIntegrator(const Mesh& mesh, Physics& physics, double rtol, double atol) : m(mesh), physics(physics), pc(), t(), ida_mem(), rtol(rtol), atol(atol), max_timestep_(0.), max_order_(5), variableids_set_(false) { procinfo = m.mpicomm()->duplicate("IDA"); } template<class Physics, class Preconditioner> IDAIntegrator<Physics, Preconditioner>:: IDAIntegrator(const Mesh& mesh, Physics& physics, Preconditioner& pc, double rtol, double atol) : m(mesh), physics(physics), pc(&pc), t(), ida_mem(), rtol(rtol), atol(atol), max_timestep_(0.), max_order_(5), variableids_set_(false) { procinfo = m.mpicomm()->duplicate("IDA"); } template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>:: initialise(double& tt, TVecDevice &y, TVecDevice &yp, Callback callback) { *procinfo << "\tIDAIntegrator<Physics, Preconditioner>::initialise()" << std::endl; t = &tt; int localSize = mesh().local_nodes()*variables_per_node; int globalSize = mesh().global_nodes()*variables_per_node; u = TVecDevice(mesh().nodes()*variables_per_node, y.data()); up = TVecDevice(mesh().nodes()*variables_per_node, yp.data()); compute_residual = callback; // Initialise solution vectors // initialise with the passed values ulocal_store = TVecDevice(localSize); uplocal_store = TVecDevice(localSize); ulocal_store.at(lin::all) = u.at(0,localSize-1); uplocal_store.at(lin::all) = up.at(0,localSize-1); ulocal = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize); assert(ulocal); uplocal = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize); assert(uplocal); N_VSetArrayPointer_Parallel(ulocal_store.data(), ulocal); N_VSetArrayPointer_Parallel(uplocal_store.data(), uplocal); // Initialise interpolation vectors used when interpolation is performed // on output from IDA, so as not to overwrite previously calculated solutions uinterp = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize ); assert(uinterp); upinterp = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize ); assert(upinterp); // point the output of the interpolation to go directly // into u and up N_VSetArrayPointer_Parallel(u.data(), uinterp); N_VSetArrayPointer_Parallel(up.data(), upinterp); // Initialise weights vector weights = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize ); assert(weights); weights_store = TVecDevice(localSize); N_VSetArrayPointer_Parallel(weights_store.data(), weights); // Initialise absolute tolerances vector atolv = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize ); assert(atolv); atolv_store = TVecDevice(localSize, atol); N_VSetArrayPointer_Parallel(atolv_store.data(), atolv); // vector for tagging algebraic and differential variables variableids = N_VNewEmpty_Parallel( procinfo->communicator(), localSize, globalSize ); assert(variableids); variableids_store = TVecDevice(localSize); N_VSetArrayPointer_Parallel(variableids_store.data(), variableids); // Create IDA data structure ida_mem = IDACreate(); assert(ida_mem); // Initialise IDA internal memory int flag = IDAInit( ida_mem, reinterpret_cast<IDAResFn>(f), tt, ulocal, uplocal); assert(flag == IDA_SUCCESS); // Set default integration tolerances flag = IDASStolerances(ida_mem, rtol, atol); assert(flag == IDA_SUCCESS); // Set f_data parameter to be "this" flag = IDASetUserData(ida_mem, this); assert(flag == IDA_SUCCESS); // Initialise linear solver flag = IDASpgmr(ida_mem, 0); assert(flag == IDASPILS_SUCCESS); flag = IDASpilsSetGSType(ida_mem, MODIFIED_GS); assert(flag == IDASPILS_SUCCESS); // Initialise preconditioner if (pc) { flag = IDASpilsSetPreconditioner( ida_mem, reinterpret_cast<IDASpilsPrecSetupFn>(psetup), reinterpret_cast<IDASpilsPrecSolveFn>(psolve) ); assert(flag == IDA_SUCCESS); } } template<class Physics, class Preconditioner> IDAIntegrator<Physics, Preconditioner>::~IDAIntegrator() { if (ida_mem) { N_VDestroy_Parallel(ulocal); N_VDestroy_Parallel(uplocal); N_VDestroy_Parallel(uinterp); N_VDestroy_Parallel(upinterp); N_VDestroy_Parallel(weights); N_VDestroy_Parallel(atolv); N_VDestroy_Parallel(variableids); IDAFree(&ida_mem); } } template<class Physics, class Preconditioner> const mesh::Mesh& IDAIntegrator<Physics, Preconditioner>::mesh() const { return m; } template<class Physics, class Preconditioner> Preconditioner& IDAIntegrator<Physics, Preconditioner>::preconditioner() { assert(pc); return *pc; } // Advances solution by one internal timestep template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>::advance() { // we copy ulocal into u because the ulocal contains the current // solution value inside IDA, whereas u and up may contain a version // of the solution that was interpolated backwards // make this copy to ensure that up to date values are used for calculating // preprocess_timestep() u.at(0,mesh().local_nodes()-1) = ulocal_store; up.at(0,mesh().local_nodes()-1) = uplocal_store; physics.preprocess_timestep( *t, m, u, up ); int flag = IDASolve( ida_mem, 1.0, t, ulocal, uplocal, IDA_ONE_STEP); assert(flag == IDA_SUCCESS); if( procinfo->rank()==0 ) std::cerr << "."; // save the order and size of last step just completed int order_last; flag = IDAGetLastOrder(ida_mem, &order_last); step_orders_.push_back(order_last); double step_last; flag = IDAGetLastStep(ida_mem, &step_last); step_sizes_.push_back(step_last); } // Advances solution to the specified time template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>::advance(double next_time) { // advance the solution to next_time while( (*t)<next_time ) advance(); // Get IDA to interpolate the solution backwards from t to next_time // this doesn't change the internal state of IDA, it is simply // to ensure that the solution returned to the user is that at the // requested time. // The vector uinterp and upinterp are pointed directly into u int flag = IDAGetDky( ida_mem, next_time, 0, uinterp ); assert(flag == IDA_SUCCESS); flag = IDAGetDky( ida_mem, next_time, 1, upinterp ); assert(flag == IDA_SUCCESS); } // Returns the absolute tolerance template<class Physics, class Preconditioner> typename IDAIntegrator<Physics, Preconditioner>::value_type& IDAIntegrator<Physics, Preconditioner>::abstol(int i) { assert(ida_mem); assert(i >= 0 && i < mesh().local_nodes()); return atolv_store[i]; } template<class Physics, class Preconditioner> typename IDAIntegrator<Physics, Preconditioner>::value_type IDAIntegrator<Physics, Preconditioner>::abstol(int i) const { assert(ida_mem); return atolv_store[i]; } // Returns the relative tolerance template<class Physics, class Preconditioner> double& IDAIntegrator<Physics, Preconditioner>::reltol() { return rtol; } template<class Physics, class Preconditioner> double IDAIntegrator<Physics, Preconditioner>::reltol() const { return rtol; } // Sets integration tolerances template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>::set_tolerances() { int flag = IDASVtolerances(ida_mem, rtol, atolv); assert(flag == IDA_SUCCESS); } // set the variable ids // this allows the user to specify which variables are algabraic and which // are differential. // vals[i]=0. -> variable i is algebraic // vals[i]=1. -> variable i is differntial template<class Physics, class Preconditioner> // DEVICE //void IDAIntegrator<Physics, Preconditioner>::set_algebraic_variables(const std::vector<double> &vals){ void IDAIntegrator<Physics, Preconditioner>::set_algebraic_variables(const TVec &vals){ // sanity check the input assert(vals.size()==variables_per_node*m.local_nodes()); for(int i=0; i<vals.size(); i++) assert(vals[i]==0. || vals[i]==1.); // DEVICE // copy user specified variable ids into NV_Vector //double* dest = reinterpret_cast<double*>(NV_DATA_P(variableids)); //std::copy(vals.begin(), vals.end(), dest); variableids_store = vals; // call IDA to set the ids int flag = IDASetId(ida_mem, variableids); assert(flag == IDA_SUCCESS); variableids_set_=true; } // compute consistent initial conditions // if IDAIntegrator::set_algebraic_variables() has been called the method // will attempt to find derivatives for the differential variabels // and values for the algebraic variables template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>::compute_initial_conditions(TVecDevice &u0, TVecDevice &up0){ int icopt = IDA_Y_INIT; if(variableids_set_){ icopt = IDA_YA_YDP_INIT; } int flag = IDACalcIC(ida_mem, icopt, (*t)+1.); assert(flag==IDA_SUCCESS); // DEVICE // get the initial conditions N_Vector yy0_mod, yp0_mod; //yy0_mod = N_VNew_Parallel( yy0_mod = N_VNewEmpty_Parallel( procinfo->communicator(), mesh().local_nodes() * variables_per_node, mesh().global_nodes() * variables_per_node); assert(yy0_mod); N_VSetArrayPointer_Parallel(u0.data(), yy0_mod); //yp0_mod = N_VNew_Parallel( yp0_mod = N_VNewEmpty_Parallel( procinfo->communicator(), mesh().local_nodes() * variables_per_node, mesh().global_nodes() * variables_per_node); assert(yp0_mod); N_VSetArrayPointer_Parallel(up0.data(), yp0_mod); flag = IDAGetConsistentIC(ida_mem, yy0_mod, yp0_mod); // DEVICE // no need for copy /* std::copy( reinterpret_cast<value_type*>(NV_DATA_P(yy0_mod)), reinterpret_cast<value_type*>(NV_DATA_P(yy0_mod)) + variables_per_node*mesh().local_nodes(), u0 ); std::copy( reinterpret_cast<value_type*>(NV_DATA_P(yp0_mod)), reinterpret_cast<value_type*>(NV_DATA_P(yp0_mod)) + variables_per_node*mesh().local_nodes(), up0 ); */ } // set the maximum timestep taken by IDA template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>::set_max_timestep(double max_ts) { assert(max_ts>0.); max_timestep_ = max_ts; int flag = IDASetMaxStep(ida_mem, max_ts); assert(flag == IDA_SUCCESS); } // set the maximum order of BDF used by IDA template<class Physics, class Preconditioner> void IDAIntegrator<Physics, Preconditioner>::set_max_order(int max_order) { assert(max_order>0 && max_order<=5); max_order_ = max_order; int flag = IDASetMaxOrd(ida_mem, max_order); assert(flag == IDA_SUCCESS); } template<class Physics, class Preconditioner> void* IDAIntegrator<Physics, Preconditioner>::ida() { return ida_mem; } // (IDA) // Computes the residual function template<class Physics, class Preconditioner> int IDAIntegrator<Physics, Preconditioner>:: f(double t, N_Vector y, N_Vector yp, N_Vector res, void* ip) { IDAIntegrator* integrator = static_cast<IDAIntegrator*>(ip); *integrator->t = t; int N = NV_LOCLENGTH_P(y); TVecDevice tmpu = TVecDevice(N, NV_DATA_P(y)); TVecDevice tmpup = TVecDevice(N, NV_DATA_P(yp)); integrator->u.at(0,N-1) = tmpu; integrator->up.at(0,N-1) = tmpup; TVecDevice r(N, NV_DATA_P(res)); bool communicate = true; int success = integrator->compute_residual(r, communicate); return success; } // (IDA) // Performs any processing that might be required to set up the preconditioner. // An example would be forming an approximation to the Jacobian matrix // J = dF/dy + c_j*dF/dyp and performing an LU factorisation on it. // This function is called only as often as the underlying solver deems it // necessary to achieve convergence. template<class Physics, class Preconditioner> int IDAIntegrator<Physics, Preconditioner>:: psetup(double tt, N_Vector y, N_Vector yp, N_Vector r, double c, void* ip, N_Vector t1, N_Vector t2, N_Vector t3) { IDAIntegrator* integrator = static_cast<IDAIntegrator*>(ip); const mesh::Mesh& m = integrator->mesh(); assert(NV_GLOBLENGTH_P(t1) == m.global_nodes() * variables_per_node); assert(NV_LOCLENGTH_P(t1) == m.local_nodes() * variables_per_node); assert(NV_GLOBLENGTH_P(t2) == m.global_nodes() * variables_per_node); assert(NV_LOCLENGTH_P(t2) == m.local_nodes() * variables_per_node); assert(NV_GLOBLENGTH_P(t3) == m.global_nodes() * variables_per_node); assert(NV_LOCLENGTH_P(t3) == m.local_nodes() * variables_per_node); double h; int flag = IDAGetCurrentStep(integrator->ida(), &h); assert(flag == 0); flag = IDAGetErrWeights(integrator->ida(), integrator->weights); assert(flag == 0); int N = NV_LOCLENGTH_P(r); TVecDevice res(N, NV_DATA_P(r)); TVecDevice w(N, NV_DATA_P(integrator->weights)); TVecDevice temp1(N, NV_DATA_P(t1)); TVecDevice temp2(N, NV_DATA_P(t2)); TVecDevice temp3(N, NV_DATA_P(t3)); int result = integrator->preconditioner().setup( m, tt, c, h, res, w, integrator->u, integrator->up, temp1, temp2, temp3, integrator->compute_residual ); return result; } // (IDA) // Solves the linear system Pz = r, where P is the preconditioner matrix. template<class Physics, class Preconditioner> int IDAIntegrator<Physics, Preconditioner>:: psolve(double tt, N_Vector y, N_Vector yp, N_Vector r, N_Vector rr, N_Vector zz, double c, double delta, void* ip, N_Vector tmp) { IDAIntegrator* integrator = static_cast<IDAIntegrator*>(ip); const mesh::Mesh& m = integrator->mesh(); assert(NV_GLOBLENGTH_P(tmp) == m.global_nodes() * variables_per_node); assert(NV_LOCLENGTH_P(tmp) == m.local_nodes() * variables_per_node); double h; int flag = IDAGetCurrentStep(integrator->ida(), &h); assert(flag == 0); flag = IDAGetErrWeights(integrator->ida(), integrator->weights); assert(flag == 0); int N = NV_LOCLENGTH_P(r); TVecDevice res(N, NV_DATA_P(r)); TVecDevice w(N, NV_DATA_P(integrator->weights)); TVecDevice rhs(N, NV_DATA_P(rr)); TVecDevice z(N, NV_DATA_P(zz)); TVecDevice temp(N, NV_DATA_P(tmp)); z.at(lin::all) = rhs; return integrator->preconditioner().apply( m, tt, c, h, delta, res, w, rhs, integrator->u, integrator->up, z, temp, integrator->compute_residual ); } // Definition of static member template<class Physics, class Preconditioner> const int IDAIntegrator<Physics, Preconditioner>::variables_per_node; } // end namespace fvm #endif
{ "content_hash": "7e1b8ff9bd243e45fe68abf1b10eef7a", "timestamp": "", "source": "github", "line_count": 595, "max_line_length": 141, "avg_line_length": 33.36302521008403, "alnum_prop": 0.6615787617752255, "repo_name": "jackd/FVMPor", "id": "516f50bccd28232cfc9371245ca0ace81f52c409", "size": "19851", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "include/fvm/integrators/ida_integrator.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "1150656" }, { "name": "C++", "bytes": "427707" }, { "name": "Python", "bytes": "16708" }, { "name": "Shell", "bytes": "2592" } ], "symlink_target": "" }
OpenGLApplication::OpenGLApplication(int screenWidth, int screenHeight) :screenWidth(screenWidth), screenHeight(screenHeight){ logger->info("Starting OpenGLApplication..."); manager = new OpenGLManager(); program = new OpenGLProgram(string("cube.vs.glsl"),string("cube.fs.glsl")); tvProgram = new OpenGLProgram(string("tvvs.glsl"),string("tvfs.glsl")); inputManager = new InputManager(); camera = new Camera(program, screenWidth, screenHeight); tvCamera = new CameraBasic(screenWidth, screenHeight); model = new SquareModel(camera,screenWidth, screenHeight); square = new Square(program, model, true); television = new Television(tvProgram,tvCamera,square); crossHairModel = new CrossHairModel(); crossHair = new CrossHair(program,crossHairModel); crossHairLocal = new CrossHair(program,model); tvProgram->SetIsClear(false); tvProgram->SetCamera(tvCamera); tvProgram->AddObject(television); program->SetIsClear(true); program->SetCamera(camera); program->AddObject(square); //program->AddObject(crossHair); //program->AddObject(crossHairLocal); program->AddObject(tvProgram); inputManager->RegisterListener((IKeyReleasedListener*)model); inputManager->RegisterListener((IKeyReleasedListener*)camera); inputManager->RegisterListener((IScrollListener*)camera); inputManager->RegisterListener((IDragListener*)camera); inputManager->RegisterListener((IButtonPressedListener*)camera); inputManager->RegisterListener((IButtonReleasedListener*)camera); } OpenGLApplication::~OpenGLApplication() { delete square; delete crossHair; delete crossHairLocal; delete crossHairModel; delete model; delete camera; delete program; delete manager; delete inputManager; delete television; delete tvProgram; delete tvCamera; logger->info("Stopped OpenGLApplication."); logger = 0; } void OpenGLApplication::Init() { logger->info("Init OpenGLApplication..."); manager->Init(manager); program->Init(); } void OpenGLApplication::Reset() { model->Reset(); camera->Reset(); } void OpenGLApplication::Render() { //logger->info("Render OpenGLApplication..."); program->Render(); } void OpenGLApplication::Shutdown() { logger->info("Shutdown OpenGLApplication..."); } void OpenGLApplication::UpdateCamera(glm::vec3 &eye, glm::vec3 &lookAt, glm::vec3 &up) { camera->Update(eye,lookAt,up); } void OpenGLApplication::GetCameraVectors(glm::vec3 &eye,glm::vec3 &lookAt,glm::vec3 &up) { glm::vec3 cEye = camera->GetEye(); glm::vec3 cLookAt = camera->GetLookAt(); glm::vec3 cUp = camera->GetUp(); eye.x = cEye.x; eye.y = cEye.y; eye.z = cEye.z; lookAt.x = cLookAt.x; lookAt.y = cLookAt.y; lookAt.z = cLookAt.z; up.x = cUp.x; up.y = cUp.y; up.z = cUp.z; } void OpenGLApplication::OnKeyReleased(int key) { inputManager->OnKeyReleased(key); } void OpenGLApplication::OnScroll(GdkScrollDirection dir) { inputManager->OnScroll(dir); } void OpenGLApplication::OnDrag(double x, double y) { inputManager->OnDrag(x,y); } void OpenGLApplication::OnButtonPressed(int button, double x, double y) { inputManager->OnButtonPressed(button,x,y); } void OpenGLApplication::OnButtonReleased(int button, double x, double y) { inputManager->OnButtonReleased(button,x,y); }
{ "content_hash": "be86856da2ecfaf26ea6832a358fae98", "timestamp": "", "source": "github", "line_count": 118, "max_line_length": 90, "avg_line_length": 27.779661016949152, "alnum_prop": 0.7336790726052471, "repo_name": "relyah/cubecamera", "id": "3cccaceb4e9bd2df1a0d2538d43f68bd670f692d", "size": "3310", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "OpenGLApplication.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "212" }, { "name": "C++", "bytes": "84612" }, { "name": "GLSL", "bytes": "3574" }, { "name": "Makefile", "bytes": "2254" } ], "symlink_target": "" }
////////////////////////////////////////////////////////////////// #include <sg.h> #include "sg_wc__public_typedefs.h" #include "sg_wc__public_prototypes.h" #include "sg_wc__private.h" ////////////////////////////////////////////////////////////////// struct _amf_data { SG_mrg * pMrg; SG_mrg_cset * pMrgCSet; }; typedef struct _amf_data _amf_data; ////////////////////////////////////////////////////////////////// /** * When doing a MERGE we run the "merge" version of the * tool (as opposed to the "resolve" version of the tool). * Internal merge tools are fully automatic. External * merge tools may (gnu diff3) or may not (diffmerge) be. * We recommend that they only install manual tools in the * resolve phase, but there is nothing to enforce that. * * If the merge-phase tool is fully automatic, then the * merge-result file is fully generated and is DISPOSABLE * if they later revert the merge. * * If the merge-phase tool asked them for help (or we can't * tell if it did), then we say that the merge-result file * is not disposable. * */ static void _sg_mrg__is_automatic_filetool(SG_context * pCtx, SG_filetool * pMergeTool, SG_bool * pbIsResultFileDisposable) { // TODO 2011/03/03 Consider adding a flag to the actual filetool // TODO indicate if it is interactive/automatic. // TODO // TODO For now we just assume that internal tools // TODO are automatic and external ones are not. const char * pszExe = NULL; SG_bool bInternal; SG_ERR_CHECK_RETURN( SG_filetool__get_executable(pCtx, pMergeTool, &pszExe) ); bInternal = ((pszExe == NULL) || (*pszExe == 0)); *pbIsResultFileDisposable = (bInternal); } /** * If auto-merge was successful or generated a conflict file * and the auto-merge tool is fully automatic, I want to declare * that the merge-result file is "disposable". * * If for example, the user does a REVERT we don't really need * to make a backup of the content. * */ static void _sg_mrg__compute_automerge_hid(SG_context * pCtx, SG_mrg * pMrg, SG_mrg_cset_entry_conflict * pMrgCSetEntryConflict) { SG_file * pFile = NULL; SG_bool bIsFullyAutomatic = SG_FALSE; SG_bool bCreatedResultFile; SG_ERR_CHECK( SG_fsobj__exists__pathname(pCtx, pMrgCSetEntryConflict->pPathTempFile_Result, &bCreatedResultFile, NULL, NULL) ); if (bCreatedResultFile) { if (pMrgCSetEntryConflict->pMergeTool) { // We get called when __AUTO_OK or __AUTO_CONFLICT. // // When we generated a result file (whether good or with // conflict markers) remember the HID so that we can // write it to the issue.automerge_generated_hid so that // STATUS and/or MSTATUS can give better answers). SG_ERR_CHECK( SG_file__open__pathname(pCtx, pMrgCSetEntryConflict->pPathTempFile_Result, SG_FILE_RDONLY | SG_FILE_OPEN_EXISTING, SG_FSOBJ_PERMS__UNUSED, &pFile) ); SG_ERR_CHECK( SG_repo__alloc_compute_hash__from_file(pCtx, pMrg->pWcTx->pDb->pRepo, pFile, &pMrgCSetEntryConflict->pszHidGenerated) ); #if TRACE_WC_MERGE SG_ERR_IGNORE( SG_console(pCtx, SG_CS_STDERR, "_sg_mrg__compute_automerge_hid: %s %s\n", pMrgCSetEntryConflict->pszHidGenerated, SG_pathname__sz(pMrgCSetEntryConflict->pPathTempFile_Result)) ); #endif // If the merge tool is fully automatic (and we could delete the // file on a REVERT without backing it upt), set the "disposable" HID. SG_ERR_CHECK( _sg_mrg__is_automatic_filetool(pCtx, pMrgCSetEntryConflict->pMergeTool, &bIsFullyAutomatic) ); if (bIsFullyAutomatic) { SG_ERR_CHECK( SG_strdup(pCtx, pMrgCSetEntryConflict->pszHidGenerated, &pMrgCSetEntryConflict->pszHidDisposable) ); } } } fail: SG_FILE_NULLCLOSE(pCtx, pFile); } ////////////////////////////////////////////////////////////////// /** * Fetch a copy of each version of the file (ancestor and each * branch/leaf) into our private temp-dir. * * These are written to disk to allow an external mergetool to * use them and write the merge-result to the same directory. * * Even if we select the internal the builtin core diff3 engine for * the automerge, we still want them written to temp files in case * of conflicts and/or so that the user can re-merge them using * a GUI tool during a RESOLVE. * * We do all of this (even the merge-result file) in a private * temp directory so that 'vv merge --test' can completely run * without trashing/modifying their working directory. * */ static void _sg_mrg__fetch_file_versions_into_temp_files(SG_context * pCtx, SG_mrg * pMrg, SG_mrg_cset_entry_conflict * pMrgCSetEntryConflict) { SG_rbtree_iterator * pIter = NULL; const char * pszKey_k; const SG_vector * pVec_k; SG_uint32 nrUnique; SG_bool bFound; SG_ERR_CHECK( SG_rbtree__count(pCtx, pMrgCSetEntryConflict->prbUnique_File_HidBlob, &nrUnique) ); if (nrUnique != 2) SG_ERR_THROW2_RETURN( SG_ERR_NOTIMPLEMENTED, (pCtx, "TODO figure out how to layout temp files when more than 2 versions of the file.") ); // create and record pathnames for the 3 inputs and the 1 result file. SG_ERR_CHECK( _sg_mrg__create_pathname_for_conflict_file(pCtx, pMrg, pMrgCSetEntryConflict, pMrg->pMrgCSet_LCA->pszMnemonicName, &pMrgCSetEntryConflict->pPathTempFile_Ancestor) ); SG_ERR_CHECK( _sg_mrg__create_pathname_for_conflict_file(pCtx, pMrg, pMrgCSetEntryConflict, pMrg->pMrgCSet_Baseline->pszMnemonicName, &pMrgCSetEntryConflict->pPathTempFile_Baseline) ); SG_ERR_CHECK( _sg_mrg__create_pathname_for_conflict_file(pCtx, pMrg, pMrgCSetEntryConflict, pMrg->pMrgCSet_Other->pszMnemonicName, &pMrgCSetEntryConflict->pPathTempFile_Other) ); SG_ERR_CHECK( _sg_mrg__create_pathname_for_conflict_file(pCtx, pMrg, pMrgCSetEntryConflict, pMrg->pMrgCSet_FinalResult->pszMnemonicName, &pMrgCSetEntryConflict->pPathTempFile_Result) ); // fetch the 3 input versions of the file into temp. // // for the "baseline" version, we copy the file as it // is in the WD rather than doing a fresh fetch; in the // event that the file is dirty (and they allowed a dirty // merge), we get the current content of the file. and in // the case that it is clean, we get the same result as if // we had done our own fetch. SG_ERR_CHECK( _sg_mrg__export_to_temp_file(pCtx, pMrg, pMrgCSetEntryConflict->pMrgCSetEntry_Ancestor->bufHid_Blob, pMrgCSetEntryConflict->pPathTempFile_Ancestor) ); SG_ERR_CHECK( SG_rbtree__iterator__first(pCtx, &pIter, pMrgCSetEntryConflict->prbUnique_File_HidBlob, &bFound, &pszKey_k, (void **)&pVec_k) ); while (bFound) { if (strcmp(pszKey_k, pMrgCSetEntryConflict->pMrgCSetEntry_Baseline->bufHid_Blob) == 0) { SG_ERR_CHECK( _sg_mrg__copy_wc_to_temp_file(pCtx, pMrg, pMrgCSetEntryConflict->pMrgCSetEntry_Baseline, pMrgCSetEntryConflict->pPathTempFile_Baseline) ); } else { SG_ERR_CHECK( _sg_mrg__export_to_temp_file(pCtx, pMrg, pszKey_k, pMrgCSetEntryConflict->pPathTempFile_Other) ); } SG_ERR_CHECK( SG_rbtree__iterator__next(pCtx, pIter, &bFound, &pszKey_k, (void **)&pVec_k) ); } fail: SG_RBTREE_ITERATOR_NULLFREE(pCtx, pIter); } ////////////////////////////////////////////////////////////////// /** * Look at the entryname and/or the file content of the ancestor * version of the file and select the best mergetool for it. * * Update the mergetool name field in the conflict. */ static void _sg_mrg__select_mergetool(SG_context * pCtx, SG_mrg * pMrg, SG_mrg_cset_entry_conflict * pMrgCSetEntryConflict) { SG_ASSERT( ((pMrgCSetEntryConflict->flags & SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__MASK) == SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__TBD) ); SG_ASSERT( (pMrgCSetEntryConflict->pMrgCSetEntry_Composite->tneType == SG_TREENODEENTRY_TYPE_REGULAR_FILE) ); // use the pathname and contents of the ANCESTOR version of the file // to choose the mergetool. // // this is an arbitrary choice, but it simplifies things. // // Most of time, it won't matter, but if there was a rename in one or more // of the leaves/branches, then we have to decide which of the new names // should be used. and if there is an unresolved-divergent-rename, then // we also have to worry about that. so just use the ancestor name to // guide the lookup. // // likewise we use the ancestor version of the file content (for "#!" // (shebang) style matches), if appropriate. // // Also by using the ancestor entryname, we don't have to worry so much // about whether the user has resolved the rename conflict before or // after they resolve the file-content conflict. // // this may return null if there is no match. SG_ERR_CHECK( SG_mergetool__select(pCtx, SG_MERGETOOL__CONTEXT__MERGE, NULL, pMrgCSetEntryConflict->pPathTempFile_Ancestor, ((pMrg->pMergeArgs && pMrg->pMergeArgs->bNoAutoMergeFiles) ? SG_MERGETOOL__INTERNAL__SKIP : NULL), pMrg->pWcTx->pDb->pRepo, &pMrgCSetEntryConflict->pMergeTool) ); fail: return; } static void _sg_mrg__invoke_mergetool(SG_context * pCtx, SG_mrg * pMrg, SG_mrg_cset_entry_conflict * pMrgCSetEntryConflict) { SG_varray * pvaExternal = NULL; SG_int32 statusMergeTool = SG_FILETOOL__RESULT__COUNT; SG_bool resultExists = SG_FALSE; if (pMrgCSetEntryConflict->pMergeTool == NULL) { // no mergetool defined/appropriate ==> "No Rule" pMrgCSetEntryConflict->flags &= ~SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__TBD; pMrgCSetEntryConflict->flags |= SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__NO_RULE; goto done; } SG_ERR_CHECK( SG_mergetool__invoke(pCtx, pMrgCSetEntryConflict->pMergeTool, pMrgCSetEntryConflict->pPathTempFile_Ancestor, pMrg->pMrgCSet_LCA->pStringAcceptLabel, pMrgCSetEntryConflict->pPathTempFile_Baseline, pMrg->pMrgCSet_Baseline->pStringAcceptLabel, pMrgCSetEntryConflict->pPathTempFile_Other, pMrg->pMrgCSet_Other->pStringAcceptLabel, pMrgCSetEntryConflict->pPathTempFile_Result, pMrg->pMrgCSet_FinalResult->pStringAcceptLabel, &statusMergeTool, &resultExists) ); if (statusMergeTool == SG_FILETOOL__RESULT__SUCCESS && resultExists == SG_TRUE) { pMrgCSetEntryConflict->flags &= ~SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__TBD; pMrgCSetEntryConflict->flags |= SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__AUTO_OK; SG_ERR_CHECK( _sg_mrg__compute_automerge_hid(pCtx, pMrg, pMrgCSetEntryConflict) ); } else if (statusMergeTool == SG_MERGETOOL__RESULT__CONFLICT && resultExists == SG_TRUE) { pMrgCSetEntryConflict->flags &= ~SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__TBD; pMrgCSetEntryConflict->flags |= SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__AUTO_CONFLICT; // If the merge tool managed to spit out a conflict file (with <<< ||| >>> markers), // we'll install it in the WD (for historical reasons like other products). If so, // we still want to compute the disposable-HID of the conflict file (so that we can // tell if it is disposable should they decide to revert). SG_ERR_CHECK( _sg_mrg__compute_automerge_hid(pCtx, pMrg, pMrgCSetEntryConflict) ); } else if (statusMergeTool != SG_MERGETOOL__RESULT__CANCEL) { pMrgCSetEntryConflict->flags &= ~SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__TBD; pMrgCSetEntryConflict->flags |= SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__AUTO_ERROR; // I'm going to assume that in these cases no merge result file is generated, so // we don't need to compute the disposable-HID. // // TODO 2012/03/01 Should we assert(!resultExists) and/or delete it if it does // TODO (assuming that we always use a temp file for the result) ? } done: ; fail: SG_VARRAY_NULLFREE(pCtx, pvaExternal); } ////////////////////////////////////////////////////////////////// static void _throw_if_sparse(SG_context * pCtx, SG_mrg * pMrg, SG_mrg_cset_entry * pMrgCSetEntry) { SG_string * pStringRepoPath = NULL; sg_wc_liveview_item * pLVI; SG_bool bKnown; SG_bool bIsSparse; // If the file needs merging but is sparse, we need to complain. // We know the HID of the file (sparse-hid in tbl_PC), so it isn't // a problem to populate it, but at some point we probably need to // make it un-sparse so that the result can be placed in the WD // (not counting that rare case when we already have a blob with // the same HID as the merge-result). But this is not the right // place to make it un-sparse -- suppose the parent directory is // also sparse. SG_ERR_CHECK( sg_wc_tx__liveview__fetch_random_item(pCtx, pMrg->pWcTx, pMrgCSetEntry->uiAliasGid, &bKnown, &pLVI) ); bIsSparse = SG_WC_PRESCAN_FLAGS__IS_CONTROLLED_SPARSE(pLVI->scan_flags_Live); if (bIsSparse) { SG_ERR_CHECK( sg_wc_tx__liveview__compute_live_repo_path(pCtx, pMrg->pWcTx, pLVI, &pStringRepoPath) ); SG_ERR_THROW2( SG_ERR_WC_IS_SPARSE, (pCtx, "The file '%s' needs to be merged, but is sparse.", SG_string__sz(pStringRepoPath)) ); } fail: SG_STRING_NULLFREE(pCtx, pStringRepoPath); } ////////////////////////////////////////////////////////////////// /** * We get called once for each conflict in SG_mrg_cset.prbConflicts. * We may be a structural conflict or a TBD file-content conflict. * Ignore the former and try to automerge the contents of the latter. */ static SG_rbtree_foreach_callback _try_automerge_files; static void _try_automerge_files(SG_context * pCtx, const char * pszKey_Gid_Entry, void * pVoidAssocData_MrgCSetEntryConflict, void * pVoid_Data) { SG_mrg_cset_entry_conflict * pMrgCSetEntryConflict = (SG_mrg_cset_entry_conflict *)pVoidAssocData_MrgCSetEntryConflict; _amf_data * pAmfData = (_amf_data *)pVoid_Data; SG_UNUSED(pszKey_Gid_Entry); if ((pMrgCSetEntryConflict->flags & SG_MRG_CSET_ENTRY_CONFLICT_FLAGS__DIVERGENT_FILE_EDIT__TBD) == 0) return; SG_ERR_CHECK( _throw_if_sparse(pCtx, pAmfData->pMrg, pMrgCSetEntryConflict->pMrgCSetEntry_Baseline) ); SG_ERR_CHECK( _sg_mrg__fetch_file_versions_into_temp_files(pCtx, pAmfData->pMrg, pMrgCSetEntryConflict) ); SG_ERR_CHECK( _sg_mrg__select_mergetool(pCtx, pAmfData->pMrg, pMrgCSetEntryConflict) ); SG_ERR_CHECK( _sg_mrg__invoke_mergetool(pCtx, pAmfData->pMrg, pMrgCSetEntryConflict) ); // leave the final merge-result and the temp-files in the temp-dir. fail: return; } ////////////////////////////////////////////////////////////////// void SG_mrg__automerge_files(SG_context * pCtx, SG_mrg * pMrg, SG_mrg_cset * pMrgCSet) { _amf_data amfData; SG_NULLARGCHECK_RETURN(pMrg); SG_NULLARGCHECK_RETURN(pMrgCSet); // if there were no conflicts *anywhere* in the result-cset, we don't have to do anything. if (!pMrgCSet->prbConflicts) return; // visit each entry in the result-cset and do any auto-merges that we can. amfData.pMrg = pMrg; amfData.pMrgCSet = pMrgCSet; SG_ERR_CHECK_RETURN( SG_rbtree__foreach(pCtx,pMrgCSet->prbConflicts,_try_automerge_files,&amfData) ); }
{ "content_hash": "83a283b0d52137844581b7e219c11fc6", "timestamp": "", "source": "github", "line_count": 410, "max_line_length": 145, "avg_line_length": 38.06829268292683, "alnum_prop": 0.6630574064582265, "repo_name": "glycerine/vj", "id": "c30007fa426de5b27c9fbebf90d240cad0d4c956", "size": "16170", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/veracity/src/libraries/wc/wc6merge/sg_wc_tx__merge__automerge_files.c", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "305113" }, { "name": "C", "bytes": "16794809" }, { "name": "C++", "bytes": "23785718" }, { "name": "CMake", "bytes": "95243" }, { "name": "CSS", "bytes": "149452" }, { "name": "Gnuplot", "bytes": "691" }, { "name": "Groff", "bytes": "19029" }, { "name": "HTML", "bytes": "199269" }, { "name": "Java", "bytes": "849244" }, { "name": "JavaScript", "bytes": "8559068" }, { "name": "Makefile", "bytes": "274053" }, { "name": "Objective-C", "bytes": "91611" }, { "name": "Perl", "bytes": "170709" }, { "name": "Python", "bytes": "128973" }, { "name": "QMake", "bytes": "274" }, { "name": "Shell", "bytes": "424637" }, { "name": "TeX", "bytes": "230259" } ], "symlink_target": "" }
using System; using System.Threading.Tasks; using NServiceBus; using SFA.DAS.EmployerFinance.Data; using SFA.DAS.EmployerFinance.Messages.Commands; using SFA.DAS.EmployerFinance.Models.Paye; using SFA.DAS.NLog.Logger; namespace SFA.DAS.EmployerFinance.MessageHandlers.CommandHandlers { public class CreateAccountPayeCommandHandler : IHandleMessages<CreateAccountPayeCommand> { private readonly IPayeRepository _payeRepository; private readonly ILog _logger; public CreateAccountPayeCommandHandler(IPayeRepository payeRepository, ILog logger) { _payeRepository = payeRepository; _logger = logger; } public async Task Handle(CreateAccountPayeCommand message, IMessageHandlerContext context) { try { _logger.Info($"Account Paye scheme created via {(string.IsNullOrEmpty(message.Aorn) ? "Gov gateway" : "Aorn")} - Account Id: {message.AccountId}; Emp Ref: {message.EmpRef};"); var payeScheme = new Paye(message.EmpRef, message.AccountId, message.Name, message.Aorn); await _payeRepository.CreatePayeScheme(payeScheme); await GetLevyForNoneAornPayeSchemes(payeScheme, context); _logger.Info($"Account Paye scheme created - Account Id: {payeScheme.AccountId}; Emp Ref: {payeScheme.EmpRef}"); } catch (Exception ex) { _logger.Error(ex, "Could not create account paye scheme"); throw; } } private async Task GetLevyForNoneAornPayeSchemes(Paye payeScheme, IMessageHandlerContext context) { if (string.IsNullOrEmpty(payeScheme.Aorn)) { await context.SendLocal(new ImportAccountLevyDeclarationsCommand(payeScheme.AccountId, payeScheme.EmpRef)); _logger.Info($"Requested levy for - Account Id: {payeScheme.AccountId}; Emp Ref: {payeScheme.EmpRef}"); } } } }
{ "content_hash": "90498a7760aab1ff1d057399fbb1015b", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 191, "avg_line_length": 39.05769230769231, "alnum_prop": 0.6528803545051699, "repo_name": "SkillsFundingAgency/das-employerapprenticeshipsservice", "id": "fc7000c3010be67f5a882ed759c283c6e0447980", "size": "2033", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/SFA.DAS.EmployerFinance.MessageHandlers/CommandHandlers/CreateAccountPayeCommandHandler.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP.NET", "bytes": "808" }, { "name": "Batchfile", "bytes": "92" }, { "name": "C#", "bytes": "5033158" }, { "name": "CSS", "bytes": "282148" }, { "name": "Dockerfile", "bytes": "1137" }, { "name": "Gherkin", "bytes": "54605" }, { "name": "HTML", "bytes": "1299641" }, { "name": "JavaScript", "bytes": "219328" }, { "name": "PLpgSQL", "bytes": "4731" }, { "name": "PowerShell", "bytes": "2478" }, { "name": "SCSS", "bytes": "476673" }, { "name": "TSQL", "bytes": "138159" } ], "symlink_target": "" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql.planner.iterative.rule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import io.trino.cost.PlanNodeStatsEstimate; import io.trino.cost.StatsProvider; import io.trino.sql.planner.PlanNodeIdAllocator; import io.trino.sql.planner.Symbol; import io.trino.sql.planner.iterative.rule.test.PlanBuilder; import io.trino.sql.planner.plan.PlanNode; import io.trino.sql.planner.plan.PlanNodeId; import io.trino.sql.planner.plan.TableScanNode; import io.trino.sql.planner.plan.ValuesNode; import io.trino.testing.TestingMetadata.TestingColumnHandle; import org.testng.annotations.Test; import java.util.Optional; import static io.trino.cost.PlanNodeStatsEstimate.unknown; import static io.trino.metadata.AbstractMockMetadata.dummyMetadata; import static io.trino.sql.planner.iterative.Lookup.noLookup; import static io.trino.sql.planner.plan.JoinNode.Type.INNER; import static io.trino.testing.TestingSession.testSessionBuilder; import static java.lang.Double.NaN; import static org.testng.Assert.assertEquals; public class TestGetSourceTablesRowCount { @Test public void testMissingSourceStats() { PlanBuilder planBuilder = planBuilder(); Symbol symbol = planBuilder.symbol("col"); assertEquals( getSourceTablesRowCount( planBuilder.tableScan( tableScan -> tableScan .setSymbols(ImmutableList.of(symbol)) .setAssignments(ImmutableMap.of(symbol, new TestingColumnHandle("col"))) .setStatistics(Optional.of(unknown())))), NaN); } @Test public void testTwoSourcePlanNodes() { PlanBuilder planBuilder = planBuilder(); Symbol symbol = planBuilder.symbol("col"); Symbol sourceSymbol1 = planBuilder.symbol("source1"); Symbol sourceSymbol2 = planBuilder.symbol("soruce2"); assertEquals( getSourceTablesRowCount( planBuilder.union( ImmutableListMultimap.<Symbol, Symbol>builder() .put(symbol, sourceSymbol1) .put(symbol, sourceSymbol2) .build(), ImmutableList.of( planBuilder.tableScan( tableScan -> tableScan .setSymbols(ImmutableList.of(sourceSymbol1)) .setAssignments(ImmutableMap.of(sourceSymbol1, new TestingColumnHandle("col"))) .setStatistics(Optional.of(stats(10)))), planBuilder.values(new PlanNodeId("valuesNode"), 20, sourceSymbol2)))), 30.0); } @Test public void testJoinNode() { PlanBuilder planBuilder = planBuilder(); Symbol sourceSymbol1 = planBuilder.symbol("source1"); Symbol sourceSymbol2 = planBuilder.symbol("soruce2"); assertEquals( getSourceTablesRowCount( planBuilder.join( INNER, planBuilder.values(sourceSymbol1), planBuilder.values(sourceSymbol2))), NaN); } private double getSourceTablesRowCount(PlanNode planNode) { return UseNonPartitionedJoinLookupSource.getSourceTablesRowCount( planNode, noLookup(), testStatsProvider()); } private PlanBuilder planBuilder() { return new PlanBuilder(new PlanNodeIdAllocator(), dummyMetadata(), testSessionBuilder().build()); } private static StatsProvider testStatsProvider() { return node -> { if (node instanceof TableScanNode) { return ((TableScanNode) node).getStatistics().orElse(unknown()); } if (node instanceof ValuesNode) { return stats(((ValuesNode) node).getRowCount()); } return unknown(); }; } private static PlanNodeStatsEstimate stats(int rowCount) { return PlanNodeStatsEstimate.builder() .setOutputRowCount(rowCount) .build(); } }
{ "content_hash": "5cdcea77218e083d4941bd90c457c02d", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 135, "avg_line_length": 38.78358208955224, "alnum_prop": 0.5986145853376948, "repo_name": "smartnews/presto", "id": "6bb4eb40040bb34bfb53304e3cf939a8e0e0e211", "size": "5197", "binary": false, "copies": "1", "ref": "refs/heads/smartnews", "path": "core/trino-main/src/test/java/io/trino/sql/planner/iterative/rule/TestGetSourceTablesRowCount.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "50268" }, { "name": "CSS", "bytes": "13515" }, { "name": "Dockerfile", "bytes": "1967" }, { "name": "Groovy", "bytes": "1702" }, { "name": "HTML", "bytes": "30842" }, { "name": "Java", "bytes": "61596519" }, { "name": "JavaScript", "bytes": "232261" }, { "name": "PLSQL", "bytes": "85" }, { "name": "Python", "bytes": "5266" }, { "name": "Scala", "bytes": "10145" }, { "name": "Shell", "bytes": "51516" }, { "name": "Smarty", "bytes": "1938" } ], "symlink_target": "" }
<?php namespace LFP\StructuralAnks\MainBundle\Entity; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\Common\Collections\Collection; use Doctrine\ORM\Mapping as ORM; /** * Publication * * @ORM\Table() * @ORM\Entity */ class Publication { /** * @var integer * * @ORM\Column(name="id", type="integer") * @ORM\Id * @ORM\GeneratedValue(strategy="AUTO") */ private $id; /** * @var string * * @ORM\Column(name="authors", type="string", length=255, nullable=true) */ private $authors; /** * @var string * * @ORM\Column(name="pubmedId", type="string", length=255, nullable=true) */ private $pubmedId; /** * @var string * * @ORM\Column(name="title", type="string", length=255, nullable=true) */ private $title; /** * @var string * * @ORM\Column(name="journal", type="string", length=255, nullable=true) */ private $journal; /** * @var string * * @ORM\Column(name="url", type="string", length=255, nullable=true) */ private $url; /** * @ORM\OneToMany(targetEntity="Structure", mappedBy="publication") */ protected $structures; public function __construct() { $this->structures = new ArrayCollection(); } /** * Get id * * @return integer */ public function getId() { return $this->id; } /** * Set authors * * @param string $authors * @return Publication */ public function setAuthors($authors) { $this->authors = $authors; return $this; } /** * Get authors * * @return string */ public function getAuthors() { return $this->authors; } /** * Set pubmedId * * @param string $pubmedId * @return Publication */ public function setPubmedId($pubmedId) { $this->pubmedId = $pubmedId; return $this; } /** * Get pubmedId * * @return string */ public function getPubmedId() { return $this->pubmedId; } /** * Set title * * @param string $title * @return Publication */ public function setTitle($title) { $this->title = $title; return $this; } /** * Get title * * @return string */ public function getTitle() { return $this->title; } /** * Set journal * * @param string $journal * @return Publication */ public function setJournal($journal) { $this->journal = $journal; return $this; } /** * Get journal * * @return string */ public function getJournal() { return $this->journal; } /** * Set url * * @param string $url * @return Publication */ public function setUrl($url) { $this->url = $url; return $this; } /** * Get url * * @return string */ public function getUrl() { return $this->url; } /** * Set structure * * @param Structure $structure * @return Publication */ public function setStructure(Structure $structure = null) { $this->structure = $structure; return $this; } /** * Get structure * * @return Structure */ public function getStructure() { return $this->structure; } /** * Add structures * * @param Structure $structures * @return Publication */ public function addStructure(Structure $structures) { $this->structures[] = $structures; return $this; } /** * Remove structures * * @param Structure $structures */ public function removeStructure(Structure $structures) { $this->structures->removeElement($structures); } /** * Get structures * * @return Collection */ public function getStructures() { return $this->structures; } }
{ "content_hash": "52bd2c8e76582c70c6a5630757edda8f", "timestamp": "", "source": "github", "line_count": 250, "max_line_length": 77, "avg_line_length": 16.928, "alnum_prop": 0.5051984877126654, "repo_name": "gonzaparra/webankyrins", "id": "502ab6111fd2ae4739549b111c49f169f9ec6899", "size": "4232", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/LFP/StructuralAnks/MainBundle/Entity/Publication.php", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "41152" }, { "name": "Java", "bytes": "1185" }, { "name": "JavaScript", "bytes": "1559145" }, { "name": "PHP", "bytes": "19522555" }, { "name": "Perl", "bytes": "12003" }, { "name": "Python", "bytes": "51655" }, { "name": "Shell", "bytes": "1781" }, { "name": "XSLT", "bytes": "1660" } ], "symlink_target": "" }
package org.keyboardplaying.mapper.engine; import static org.junit.Assert.fail; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import org.junit.Test; import org.keyboardplaying.mapper.annotation.Metadata; import org.keyboardplaying.mapper.annotation.Nested; import org.keyboardplaying.mapper.annotation.Temporal.TemporalType; import org.keyboardplaying.mapper.exception.MapperException; import org.keyboardplaying.mapper.exception.MappingException; import org.keyboardplaying.mapper.mock.bean.TestBean; import org.keyboardplaying.mapper.mock.bean.TestDefaultedBean; import org.keyboardplaying.mapper.mock.bean.TestInnerBean; import org.keyboardplaying.mapper.mock.bean.TestInnerImpl; import org.keyboardplaying.mapper.mock.bean.TestSubBean; /** * Tests for the {@link MappingEngine}. * * @author Cyrille Chopelet (https://keyboardplaying.org) */ @SuppressWarnings("javadoc") public class MappingEngineTest { private final MappingEngine mappingEngine = new MappingEngine(); /** * Tests mapping a {@code null} bean. */ @Test(expected = NullPointerException.class) public void testMapWithNullBean() throws MapperException { mappingEngine.map(null); } /** * Tests the mapping of a {@code null} bean while specifying the map. */ @Test(expected = NullPointerException.class) public void testMapWithNullBean2() throws MapperException { mappingEngine.map(null, new HashMap<>()); } /** * Tests the mapping of a bean to a {@code null} map. */ @Test(expected = NullPointerException.class) public void testMapWithNullMap() throws MapperException { mappingEngine.map(makeMinimalBean(), null); } /** * Tests the mapping of a bean with a {@link Nested} field. */ @Test public void testMapNested() throws MapperException { /* Prepare */ // bean TestInnerImpl innerBean = new TestInnerImpl(); innerBean.setHello("hello"); TestBean bean = makeMinimalBean(); bean.setInnerImpl(innerBean); // expected map Map<String, String> expected = makeEmptyExpectedMap(); expected.put("hello_world_inner", "hello"); /* Execute */ Map<String, String> map = mappingEngine.map(bean); /* Assert */ assertContentEquals(expected, map); } /** * Tests the default value when no value is set. */ @Test public void testDefaultValueNotSet() throws MapperException { /* Prepare */ // beans TestBean bean1 = makeMinimalBean(); TestDefaultedBean bean2 = new TestDefaultedBean(); bean2.setNotNullString("I'm not null!"); // expected map Map<String, String> expected1 = makeEmptyExpectedMap(); Map<String, String> expected2 = new HashMap<>(); expected2.put("the_answer", null); expected2.put("not_null_string", "I'm not null!"); expected2.put("the_doctor", "Doctor Who?"); expected2.put("the_companion", ""); /* Execute */ Map<String, String> map1 = mappingEngine.map(bean1); Map<String, String> map2 = mappingEngine.map(bean2); /* Assert */ assertContentEquals(expected1, map1); assertContentEquals(expected2, map2); } /** * Tests the default value when a value is set. */ @Test public void testDefaultValueSet() throws MapperException { /* Prepare */ // bean TestBean bean = makeMinimalBean(); bean.setHello("Hello, gorgeous!"); // expected map Map<String, String> expected = makeEmptyExpectedMap(); expected.put("hello_world", "Hello, gorgeous!"); /* Execute */ Map<String, String> map = mappingEngine.map(bean); /* Assert */ assertContentEquals(expected, map); } /** * Tests the mapping of a bean with a mandatory field left blank. */ @Test(expected = MappingException.class) public void testMapWithMandatoryFieldNotSet() throws MapperException { /* Prepare */ TestBean bean = makeMinimalBean(); bean.setInnerImpl(new TestInnerImpl()); /* Execute */ mappingEngine.map(bean); } /** * Tests the mapping of a bean with a mandatory field with a default value but no default metadata left blank. */ @Test(expected = MappingException.class) public void testMapWithMandatoryFieldWithDefaultValueNotSet() throws MapperException { /* Prepare */ TestDefaultedBean bean = new TestDefaultedBean(); /* Execute */ mappingEngine.map(bean); } /** * Tests overwriting a map with the default value for a field. * <p/> * Also ensures that existing metadata is overwritten only for fields present in class. */ @Test public void testOverwriteDefaultValueNotSet() throws MapperException { /* Prepare */ // bean TestBean bean = makeMinimalBean(); // expected map Map<String, String> expected = makeEmptyExpectedMap(); expected.put("some_alien_meta", "This should not be erased."); // overwritten map Map<String, String> map = new HashMap<>(); map.put("hello_world", "That's me!"); map.put("some_alien_meta", "This should not be erased."); /* Execute */ map = mappingEngine.map(bean, map); /* Assert */ assertContentEquals(expected, map); } /** * Tests overwriting a map with the value for a field. * <p/> * Also ensures that existing metadata is overwritten only for fields present in class. */ @Test public void testOverwriteDefaultValueSet() throws MapperException { /* Prepare */ // bean TestBean bean = makeMinimalBean(); bean.setHello("That's my cue."); // expected map Map<String, String> expected = makeEmptyExpectedMap(); expected.put("hello_world", "That's my cue."); expected.put("some_alien_meta", "This should not be erased."); // overwritten map Map<String, String> map = new HashMap<>(); map.put("hello_world", "That's me!"); map.put("some_alien_meta", "This should not be erased."); /* Execute */ map = mappingEngine.map(bean, map); /* Assert */ assertContentEquals(expected, map); } // TODO test custom getter /** * Tests the mapping of a bean to a map. */ @Test public void testMapToBean() throws MapperException { /* Prepare */ Date now = new Date(); Calendar cal = Calendar.getInstance(); cal.setTime(now); // bean TestBean bean = makeMinimalBean(); bean.setSomeBool(true); bean.setSomeYesNo(true); bean.setSomeInt(42); bean.setSomeLong(4815162342L); bean.setSomeBig(BigInteger.valueOf(1337)); bean.setCal(cal); bean.setContact("Cyrille (555-1337)"); // expected map Map<String, String> expected = makeEmptyExpectedMap(); expected.put("some_bool", "true"); expected.put("some_yesno", "YES"); expected.put("some_int", "42"); expected.put("some_long", "4815162342"); expected.put("some_bigint", "1337"); expected.put("some_important_date", new SimpleDateFormat(TemporalType.DATETIME.getFormat()).format(now)); expected.put("somebody_s_name", "Cyrille"); expected.put("somebody_s_phone", "555-1337"); /* Execute */ Map<String, String> map = mappingEngine.map(bean); /* Assert */ assertContentEquals(expected, map); } @Test public void testMapSubclassedBean() throws MapperException { /* Prepare */ // bean TestSubBean bean = new TestSubBean(); bean.setMandatory("De da da da"); bean.setHelloSub("Please give me a sub-teryaki..."); // expected map Map<String, String> expected = makeEmptyExpectedMap(); expected.put("hello_world_sub", "Please give me a sub-teryaki..."); /* Execute */ Map<String, String> map = mappingEngine.map(bean); /* Assert */ assertContentEquals(expected, map); } private TestBean makeMinimalBean() { TestBean bean = new TestBean(); bean.setMandatory("De da da da"); return bean; } private Map<String, String> makeEmptyExpectedMap() { Map<String, String> expected = new HashMap<>(); expected.put("hello_world", "Didn't send hello... :("); expected.put("Do do do", "De da da da"); expected.put("some_bool", "false"); expected.put("some_yesno", "NO"); expected.put("some_int", "0"); expected.put("some_long", null); expected.put("some_bigint", null); expected.put("somebody_s_name", null); expected.put("somebody_s_phone", null); expected.put("some_even_more_important_date", null); expected.put("some_important_date", null); return expected; } private void assertContentEquals(Map<?, ?> expected, Map<?, ?> actual) { if (expected.size() != actual.size()) { fail("Expected " + expected + " but found " + actual); } for (Entry<?, ?> entry : expected.entrySet()) { if (!actual.containsKey(entry.getKey()) || !Objects.equals(entry.getValue(), actual.get(entry.getKey()))) { fail("Expected " + entry.getKey() + "=" + entry.getValue() + " but found " + entry.getKey() + "=" + actual.get(entry.getKey())); } } } /** * Ensures the mapping fails if no key was supplied for the field. */ @Test(expected = MappingException.class) public void testMapWithEmptyMetadataKey() throws MapperException { /* Execute */ mappingEngine.map(new Object() { @Metadata private String hello; @SuppressWarnings("unused") public String getHello() { return hello; } @SuppressWarnings("unused") public void setHello(String hello) { this.hello = hello; } }, new HashMap<>()); } /** * Ensures the mapping fails if a mandatory nested is null. */ @Test(expected = MappingException.class) public void testMapUnexistingNested() throws MapperException { /* Execute */ mappingEngine.map(new Object() { @Nested(mandatory = true) private TestInnerBean inner; @SuppressWarnings("unused") public TestInnerBean getInner() { return inner; } @SuppressWarnings("unused") public void setInner(TestInnerBean inner) { this.inner = inner; } }, new HashMap<>()); } /** * Ensures the mapping fails if the getter is absent. */ @Test(expected = MappingException.class) public void testMapWithNoGetter() throws MapperException { /* Execute */ mappingEngine.map(new Object() { @Metadata("hello") private String hello; @SuppressWarnings("unused") public void setHello(String hello) { this.hello = hello; } }, new HashMap<>()); } }
{ "content_hash": "aba494efed79fa8a12b8f96a7b0a8284", "timestamp": "", "source": "github", "line_count": 357, "max_line_length": 119, "avg_line_length": 32.53501400560224, "alnum_prop": 0.6043908738699957, "repo_name": "cyChop/property-mapper", "id": "86bdddfb4df7bb06c60ff3d3f62376b1936a28cc", "size": "11615", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/java/org/keyboardplaying/mapper/engine/MappingEngineTest.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "144699" } ], "symlink_target": "" }
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "alert.h" #include "checkpoints.h" #include "db.h" #include "txdb.h" #include "net.h" #include "init.h" #include "ui_interface.h" #include "checkqueue.h" #include <boost/algorithm/string/replace.hpp> #include <boost/filesystem.hpp> #include <boost/filesystem/fstream.hpp> using namespace std; using namespace boost; // // Global state // CCriticalSection cs_setpwalletRegistered; set<CWallet*> setpwalletRegistered; CCriticalSection cs_main; CTxMemPool mempool; unsigned int nTransactionsUpdated = 0; map<uint256, CBlockIndex*> mapBlockIndex; uint256 hashGenesisBlock("0x000000007aeecc59aab937f8c65010b69f3e05c63b869c3d702106875804e206"); static CBigNum bnProofOfWorkLimit(~uint256(0) >> 32); CBlockIndex* pindexGenesisBlock = NULL; int nBestHeight = -1; uint256 nBestChainWork = 0; uint256 nBestInvalidWork = 0; uint256 hashBestChain = 0; CBlockIndex* pindexBest = NULL; set<CBlockIndex*, CBlockIndexWorkComparator> setBlockIndexValid; // may contain all CBlockIndex*'s that have validness >=BLOCK_VALID_TRANSACTIONS, and must contain those who aren't failed int64 nTimeBestReceived = 0; int nScriptCheckThreads = 0; bool fImporting = false; bool fReindex = false; bool fBenchmark = false; bool fTxIndex = false; unsigned int nCoinCacheSize = 5000; /** Fees smaller than this (in satoshi) are considered zero fee (for transaction creation) */ int64 CTransaction::nMinTxFee = 10000; // Override with -mintxfee /** Fees smaller than this (in satoshi) are considered zero fee (for relaying) */ int64 CTransaction::nMinRelayTxFee = 10000; CMedianFilter<int> cPeerBlockCounts(8, 0); // Amount of blocks that other nodes claim to have map<uint256, CBlock*> mapOrphanBlocks; multimap<uint256, CBlock*> mapOrphanBlocksByPrev; map<uint256, CTransaction> mapOrphanTransactions; map<uint256, set<uint256> > mapOrphanTransactionsByPrev; // Constant stuff for coinbase transactions we create: CScript COINBASE_FLAGS; const string strMessageMagic = "BullCityCoin Signed Message:\n"; double dHashesPerSec = 0.0; int64 nHPSTimerStart = 0; // Settings int64 nTransactionFee = 0; ////////////////////////////////////////////////////////////////////////////// // // dispatching functions // // These functions dispatch to one or all registered wallets void RegisterWallet(CWallet* pwalletIn) { { LOCK(cs_setpwalletRegistered); setpwalletRegistered.insert(pwalletIn); } } void UnregisterWallet(CWallet* pwalletIn) { { LOCK(cs_setpwalletRegistered); setpwalletRegistered.erase(pwalletIn); } } // get the wallet transaction with the given hash (if it exists) bool static GetTransaction(const uint256& hashTx, CWalletTx& wtx) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) if (pwallet->GetTransaction(hashTx,wtx)) return true; return false; } // erases transaction with the given hash from all wallets void static EraseFromWallets(uint256 hash) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->EraseFromWallet(hash); } // make sure all wallets know about the given transaction, in the given block void SyncWithWallets(const uint256 &hash, const CTransaction& tx, const CBlock* pblock, bool fUpdate) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->AddToWalletIfInvolvingMe(hash, tx, pblock, fUpdate); } // notify wallets about a new best chain void static SetBestChain(const CBlockLocator& loc) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->SetBestChain(loc); } // notify wallets about an updated transaction void static UpdatedTransaction(const uint256& hashTx) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->UpdatedTransaction(hashTx); } // dump all wallets void static PrintWallets(const CBlock& block) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->PrintWallet(block); } // notify wallets about an incoming inventory (for request counts) void static Inventory(const uint256& hash) { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->Inventory(hash); } // ask wallets to resend their transactions void static ResendWalletTransactions() { BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered) pwallet->ResendWalletTransactions(); } ////////////////////////////////////////////////////////////////////////////// // // CCoinsView implementations // bool CCoinsView::GetCoins(const uint256 &txid, CCoins &coins) { return false; } bool CCoinsView::SetCoins(const uint256 &txid, const CCoins &coins) { return false; } bool CCoinsView::HaveCoins(const uint256 &txid) { return false; } CBlockIndex *CCoinsView::GetBestBlock() { return NULL; } bool CCoinsView::SetBestBlock(CBlockIndex *pindex) { return false; } bool CCoinsView::BatchWrite(const std::map<uint256, CCoins> &mapCoins, CBlockIndex *pindex) { return false; } bool CCoinsView::GetStats(CCoinsStats &stats) { return false; } CCoinsViewBacked::CCoinsViewBacked(CCoinsView &viewIn) : base(&viewIn) { } bool CCoinsViewBacked::GetCoins(const uint256 &txid, CCoins &coins) { return base->GetCoins(txid, coins); } bool CCoinsViewBacked::SetCoins(const uint256 &txid, const CCoins &coins) { return base->SetCoins(txid, coins); } bool CCoinsViewBacked::HaveCoins(const uint256 &txid) { return base->HaveCoins(txid); } CBlockIndex *CCoinsViewBacked::GetBestBlock() { return base->GetBestBlock(); } bool CCoinsViewBacked::SetBestBlock(CBlockIndex *pindex) { return base->SetBestBlock(pindex); } void CCoinsViewBacked::SetBackend(CCoinsView &viewIn) { base = &viewIn; } bool CCoinsViewBacked::BatchWrite(const std::map<uint256, CCoins> &mapCoins, CBlockIndex *pindex) { return base->BatchWrite(mapCoins, pindex); } bool CCoinsViewBacked::GetStats(CCoinsStats &stats) { return base->GetStats(stats); } CCoinsViewCache::CCoinsViewCache(CCoinsView &baseIn, bool fDummy) : CCoinsViewBacked(baseIn), pindexTip(NULL) { } bool CCoinsViewCache::GetCoins(const uint256 &txid, CCoins &coins) { if (cacheCoins.count(txid)) { coins = cacheCoins[txid]; return true; } if (base->GetCoins(txid, coins)) { cacheCoins[txid] = coins; return true; } return false; } std::map<uint256,CCoins>::iterator CCoinsViewCache::FetchCoins(const uint256 &txid) { std::map<uint256,CCoins>::iterator it = cacheCoins.lower_bound(txid); if (it != cacheCoins.end() && it->first == txid) return it; CCoins tmp; if (!base->GetCoins(txid,tmp)) return cacheCoins.end(); std::map<uint256,CCoins>::iterator ret = cacheCoins.insert(it, std::make_pair(txid, CCoins())); tmp.swap(ret->second); return ret; } CCoins &CCoinsViewCache::GetCoins(const uint256 &txid) { std::map<uint256,CCoins>::iterator it = FetchCoins(txid); assert(it != cacheCoins.end()); return it->second; } bool CCoinsViewCache::SetCoins(const uint256 &txid, const CCoins &coins) { cacheCoins[txid] = coins; return true; } bool CCoinsViewCache::HaveCoins(const uint256 &txid) { return FetchCoins(txid) != cacheCoins.end(); } CBlockIndex *CCoinsViewCache::GetBestBlock() { if (pindexTip == NULL) pindexTip = base->GetBestBlock(); return pindexTip; } bool CCoinsViewCache::SetBestBlock(CBlockIndex *pindex) { pindexTip = pindex; return true; } bool CCoinsViewCache::BatchWrite(const std::map<uint256, CCoins> &mapCoins, CBlockIndex *pindex) { for (std::map<uint256, CCoins>::const_iterator it = mapCoins.begin(); it != mapCoins.end(); it++) cacheCoins[it->first] = it->second; pindexTip = pindex; return true; } bool CCoinsViewCache::Flush() { bool fOk = base->BatchWrite(cacheCoins, pindexTip); if (fOk) cacheCoins.clear(); return fOk; } unsigned int CCoinsViewCache::GetCacheSize() { return cacheCoins.size(); } /** CCoinsView that brings transactions from a memorypool into view. It does not check for spendings by memory pool transactions. */ CCoinsViewMemPool::CCoinsViewMemPool(CCoinsView &baseIn, CTxMemPool &mempoolIn) : CCoinsViewBacked(baseIn), mempool(mempoolIn) { } bool CCoinsViewMemPool::GetCoins(const uint256 &txid, CCoins &coins) { if (base->GetCoins(txid, coins)) return true; if (mempool.exists(txid)) { const CTransaction &tx = mempool.lookup(txid); coins = CCoins(tx, MEMPOOL_HEIGHT); return true; } return false; } bool CCoinsViewMemPool::HaveCoins(const uint256 &txid) { return mempool.exists(txid) || base->HaveCoins(txid); } CCoinsViewCache *pcoinsTip = NULL; CBlockTreeDB *pblocktree = NULL; ////////////////////////////////////////////////////////////////////////////// // // mapOrphanTransactions // bool AddOrphanTx(const CTransaction& tx) { uint256 hash = tx.GetHash(); if (mapOrphanTransactions.count(hash)) return false; // Ignore big transactions, to avoid a // send-big-orphans memory exhaustion attack. If a peer has a legitimate // large transaction with a missing parent then we assume // it will rebroadcast it later, after the parent transaction(s) // have been mined or received. // 10,000 orphans, each of which is at most 5,000 bytes big is // at most 500 megabytes of orphans: unsigned int sz = tx.GetSerializeSize(SER_NETWORK, CTransaction::CURRENT_VERSION); if (sz > 5000) { printf("ignoring large orphan tx (size: %u, hash: %s)\n", sz, hash.ToString().c_str()); return false; } mapOrphanTransactions[hash] = tx; BOOST_FOREACH(const CTxIn& txin, tx.vin) mapOrphanTransactionsByPrev[txin.prevout.hash].insert(hash); printf("stored orphan tx %s (mapsz %"PRIszu")\n", hash.ToString().c_str(), mapOrphanTransactions.size()); return true; } void static EraseOrphanTx(uint256 hash) { if (!mapOrphanTransactions.count(hash)) return; const CTransaction& tx = mapOrphanTransactions[hash]; BOOST_FOREACH(const CTxIn& txin, tx.vin) { mapOrphanTransactionsByPrev[txin.prevout.hash].erase(hash); if (mapOrphanTransactionsByPrev[txin.prevout.hash].empty()) mapOrphanTransactionsByPrev.erase(txin.prevout.hash); } mapOrphanTransactions.erase(hash); } unsigned int LimitOrphanTxSize(unsigned int nMaxOrphans) { unsigned int nEvicted = 0; while (mapOrphanTransactions.size() > nMaxOrphans) { // Evict a random orphan: uint256 randomhash = GetRandHash(); map<uint256, CTransaction>::iterator it = mapOrphanTransactions.lower_bound(randomhash); if (it == mapOrphanTransactions.end()) it = mapOrphanTransactions.begin(); EraseOrphanTx(it->first); ++nEvicted; } return nEvicted; } ////////////////////////////////////////////////////////////////////////////// // // CTransaction / CTxOut // bool CTxOut::IsDust() const { // "Dust" is defined in terms of CTransaction::nMinRelayTxFee, // which has units satoshis-per-kilobyte. // If you'd pay more than 1/3 in fees // to spend something, then we consider it dust. // A typical txout is 34 bytes big, and will // need a CTxIn of at least 148 bytes to spend, // so dust is a txout less than 54 uBTC // (5460 satoshis) with default nMinRelayTxFee return ((nValue*1000)/(3*((int)GetSerializeSize(SER_DISK,0)+148)) < CTransaction::nMinRelayTxFee); } bool CTransaction::IsStandard(string& strReason) const { if (nVersion > CTransaction::CURRENT_VERSION || nVersion < 1) { strReason = "version"; return false; } if (!IsFinal()) { strReason = "not-final"; return false; } // Extremely large transactions with lots of inputs can cost the network // almost as much to process as they cost the sender in fees, because // computing signature hashes is O(ninputs*txsize). Limiting transactions // to MAX_STANDARD_TX_SIZE mitigates CPU exhaustion attacks. unsigned int sz = this->GetSerializeSize(SER_NETWORK, CTransaction::CURRENT_VERSION); if (sz >= MAX_STANDARD_TX_SIZE) { strReason = "tx-size"; return false; } BOOST_FOREACH(const CTxIn& txin, vin) { // Biggest 'standard' txin is a 3-signature 3-of-3 CHECKMULTISIG // pay-to-script-hash, which is 3 ~80-byte signatures, 3 // ~65-byte public keys, plus a few script ops. if (txin.scriptSig.size() > 500) { strReason = "scriptsig-size"; return false; } if (!txin.scriptSig.IsPushOnly()) { strReason = "scriptsig-not-pushonly"; return false; } } BOOST_FOREACH(const CTxOut& txout, vout) { if (!::IsStandard(txout.scriptPubKey)) { strReason = "scriptpubkey"; return false; } if (txout.IsDust()) { strReason = "dust"; return false; } } return true; } // // Check transaction inputs, and make sure any // pay-to-script-hash transactions are evaluating IsStandard scripts // // Why bother? To avoid denial-of-service attacks; an attacker // can submit a standard HASH... OP_EQUAL transaction, // which will get accepted into blocks. The redemption // script can be anything; an attacker could use a very // expensive-to-check-upon-redemption script like: // DUP CHECKSIG DROP ... repeated 100 times... OP_1 // bool CTransaction::AreInputsStandard(CCoinsViewCache& mapInputs) const { if (IsCoinBase()) return true; // Coinbases don't use vin normally for (unsigned int i = 0; i < vin.size(); i++) { const CTxOut& prev = GetOutputFor(vin[i], mapInputs); vector<vector<unsigned char> > vSolutions; txnouttype whichType; // get the scriptPubKey corresponding to this input: const CScript& prevScript = prev.scriptPubKey; if (!Solver(prevScript, whichType, vSolutions)) return false; int nArgsExpected = ScriptSigArgsExpected(whichType, vSolutions); if (nArgsExpected < 0) return false; // Transactions with extra stuff in their scriptSigs are // non-standard. Note that this EvalScript() call will // be quick, because if there are any operations // beside "push data" in the scriptSig the // IsStandard() call returns false vector<vector<unsigned char> > stack; if (!EvalScript(stack, vin[i].scriptSig, *this, i, false, 0)) return false; if (whichType == TX_SCRIPTHASH) { if (stack.empty()) return false; CScript subscript(stack.back().begin(), stack.back().end()); vector<vector<unsigned char> > vSolutions2; txnouttype whichType2; if (!Solver(subscript, whichType2, vSolutions2)) return false; if (whichType2 == TX_SCRIPTHASH) return false; int tmpExpected; tmpExpected = ScriptSigArgsExpected(whichType2, vSolutions2); if (tmpExpected < 0) return false; nArgsExpected += tmpExpected; } if (stack.size() != (unsigned int)nArgsExpected) return false; } return true; } unsigned int CTransaction::GetLegacySigOpCount() const { unsigned int nSigOps = 0; BOOST_FOREACH(const CTxIn& txin, vin) { nSigOps += txin.scriptSig.GetSigOpCount(false); } BOOST_FOREACH(const CTxOut& txout, vout) { nSigOps += txout.scriptPubKey.GetSigOpCount(false); } return nSigOps; } int CMerkleTx::SetMerkleBranch(const CBlock* pblock) { CBlock blockTmp; if (pblock == NULL) { CCoins coins; if (pcoinsTip->GetCoins(GetHash(), coins)) { CBlockIndex *pindex = FindBlockByHeight(coins.nHeight); if (pindex) { if (!blockTmp.ReadFromDisk(pindex)) return 0; pblock = &blockTmp; } } } if (pblock) { // Update the tx's hashBlock hashBlock = pblock->GetHash(); // Locate the transaction for (nIndex = 0; nIndex < (int)pblock->vtx.size(); nIndex++) if (pblock->vtx[nIndex] == *(CTransaction*)this) break; if (nIndex == (int)pblock->vtx.size()) { vMerkleBranch.clear(); nIndex = -1; printf("ERROR: SetMerkleBranch() : couldn't find tx in block\n"); return 0; } // Fill in merkle branch vMerkleBranch = pblock->GetMerkleBranch(nIndex); } // Is the tx in a block that's in the main chain map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock); if (mi == mapBlockIndex.end()) return 0; CBlockIndex* pindex = (*mi).second; if (!pindex || !pindex->IsInMainChain()) return 0; return pindexBest->nHeight - pindex->nHeight + 1; } bool CTransaction::CheckTransaction(CValidationState &state) const { // Basic checks that don't depend on any context if (vin.empty()) return state.DoS(10, error("CTransaction::CheckTransaction() : vin empty")); if (vout.empty()) return state.DoS(10, error("CTransaction::CheckTransaction() : vout empty")); // Size limits if (::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION) > MAX_BLOCK_SIZE) return state.DoS(100, error("CTransaction::CheckTransaction() : size limits failed")); // Check for negative or overflow output values int64 nValueOut = 0; BOOST_FOREACH(const CTxOut& txout, vout) { if (txout.nValue < 0) return state.DoS(100, error("CTransaction::CheckTransaction() : txout.nValue negative")); if (txout.nValue > MAX_MONEY) return state.DoS(100, error("CTransaction::CheckTransaction() : txout.nValue too high")); nValueOut += txout.nValue; if (!MoneyRange(nValueOut)) return state.DoS(100, error("CTransaction::CheckTransaction() : txout total out of range")); } // Check for duplicate inputs set<COutPoint> vInOutPoints; BOOST_FOREACH(const CTxIn& txin, vin) { if (vInOutPoints.count(txin.prevout)) return state.DoS(100, error("CTransaction::CheckTransaction() : duplicate inputs")); vInOutPoints.insert(txin.prevout); } if (IsCoinBase()) { if (vin[0].scriptSig.size() < 2 || vin[0].scriptSig.size() > 100) return state.DoS(100, error("CTransaction::CheckTransaction() : coinbase script size")); } else { BOOST_FOREACH(const CTxIn& txin, vin) if (txin.prevout.IsNull()) return state.DoS(10, error("CTransaction::CheckTransaction() : prevout is null")); } return true; } int64 CTransaction::GetMinFee(unsigned int nBlockSize, bool fAllowFree, enum GetMinFee_mode mode) const { // Base fee is either nMinTxFee or nMinRelayTxFee int64 nBaseFee = (mode == GMF_RELAY) ? nMinRelayTxFee : nMinTxFee; unsigned int nBytes = ::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION); unsigned int nNewBlockSize = nBlockSize + nBytes; int64 nMinFee = (1 + (int64)nBytes / 1000) * nBaseFee; if (fAllowFree) { // There is a free transaction area in blocks created by most miners, // * If we are relaying we allow transactions up to DEFAULT_BLOCK_PRIORITY_SIZE - 1000 // to be considered to fall into this category. We don't want to encourage sending // multiple transactions instead of one big transaction to avoid fees. // * If we are creating a transaction we allow transactions up to 1,000 bytes // to be considered safe and assume they can likely make it into this section. if (nBytes < (mode == GMF_SEND ? 1000 : (DEFAULT_BLOCK_PRIORITY_SIZE - 1000))) nMinFee = 0; } // This code can be removed after enough miners have upgraded to version 0.9. // Until then, be safe when sending and require a fee if any output // is less than CENT: if (nMinFee < nBaseFee && mode == GMF_SEND) { BOOST_FOREACH(const CTxOut& txout, vout) if (txout.nValue < CENT) nMinFee = nBaseFee; } // Raise the price as the block approaches full if (nBlockSize != 1 && nNewBlockSize >= MAX_BLOCK_SIZE_GEN/2) { if (nNewBlockSize >= MAX_BLOCK_SIZE_GEN) return MAX_MONEY; nMinFee *= MAX_BLOCK_SIZE_GEN / (MAX_BLOCK_SIZE_GEN - nNewBlockSize); } if (!MoneyRange(nMinFee)) nMinFee = MAX_MONEY; return nMinFee; } void CTxMemPool::pruneSpent(const uint256 &hashTx, CCoins &coins) { LOCK(cs); std::map<COutPoint, CInPoint>::iterator it = mapNextTx.lower_bound(COutPoint(hashTx, 0)); // iterate over all COutPoints in mapNextTx whose hash equals the provided hashTx while (it != mapNextTx.end() && it->first.hash == hashTx) { coins.Spend(it->first.n); // and remove those outputs from coins it++; } } bool CTxMemPool::accept(CValidationState &state, CTransaction &tx, bool fCheckInputs, bool fLimitFree, bool* pfMissingInputs) { if (pfMissingInputs) *pfMissingInputs = false; if (!tx.CheckTransaction(state)) return error("CTxMemPool::accept() : CheckTransaction failed"); // Coinbase is only valid in a block, not as a loose transaction if (tx.IsCoinBase()) return state.DoS(100, error("CTxMemPool::accept() : coinbase as individual tx")); // To help v0.1.5 clients who would see it as a negative number if ((int64)tx.nLockTime > std::numeric_limits<int>::max()) return error("CTxMemPool::accept() : not accepting nLockTime beyond 2038 yet"); // Rather not work on nonstandard transactions (unless -testnet) string strNonStd; if (!fTestNet && !tx.IsStandard(strNonStd)) return error("CTxMemPool::accept() : nonstandard transaction (%s)", strNonStd.c_str()); // is it already in the memory pool? uint256 hash = tx.GetHash(); { LOCK(cs); if (mapTx.count(hash)) return false; } // Check for conflicts with in-memory transactions CTransaction* ptxOld = NULL; for (unsigned int i = 0; i < tx.vin.size(); i++) { COutPoint outpoint = tx.vin[i].prevout; if (mapNextTx.count(outpoint)) { // Disable replacement feature for now return false; // Allow replacing with a newer version of the same transaction if (i != 0) return false; ptxOld = mapNextTx[outpoint].ptx; if (ptxOld->IsFinal()) return false; if (!tx.IsNewerThan(*ptxOld)) return false; for (unsigned int i = 0; i < tx.vin.size(); i++) { COutPoint outpoint = tx.vin[i].prevout; if (!mapNextTx.count(outpoint) || mapNextTx[outpoint].ptx != ptxOld) return false; } break; } } if (fCheckInputs) { CCoinsView dummy; CCoinsViewCache view(dummy); { LOCK(cs); CCoinsViewMemPool viewMemPool(*pcoinsTip, *this); view.SetBackend(viewMemPool); // do we already have it? if (view.HaveCoins(hash)) return false; // do all inputs exist? // Note that this does not check for the presence of actual outputs (see the next check for that), // only helps filling in pfMissingInputs (to determine missing vs spent). BOOST_FOREACH(const CTxIn txin, tx.vin) { if (!view.HaveCoins(txin.prevout.hash)) { if (pfMissingInputs) *pfMissingInputs = true; return false; } } // are the actual inputs available? if (!tx.HaveInputs(view)) return state.Invalid(error("CTxMemPool::accept() : inputs already spent")); // Bring the best block into scope view.GetBestBlock(); // we have all inputs cached now, so switch back to dummy, so we don't need to keep lock on mempool view.SetBackend(dummy); } // Check for non-standard pay-to-script-hash in inputs if (!tx.AreInputsStandard(view) && !fTestNet) return error("CTxMemPool::accept() : nonstandard transaction input"); // Note: if you modify this code to accept non-standard transactions, then // you should add code here to check that the transaction does a // reasonable number of ECDSA signature verifications. int64 nFees = tx.GetValueIn(view)-tx.GetValueOut(); unsigned int nSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION); // Don't accept it if it can't get into a block int64 txMinFee = tx.GetMinFee(1000, true, GMF_RELAY); if (fLimitFree && nFees < txMinFee) return error("CTxMemPool::accept() : not enough fees %s, %"PRI64d" < %"PRI64d, hash.ToString().c_str(), nFees, txMinFee); // Continuously rate-limit free transactions // This mitigates 'penny-flooding' -- sending thousands of free transactions just to // be annoying or make others' transactions take longer to confirm. if (fLimitFree && nFees < CTransaction::nMinRelayTxFee) { static double dFreeCount; static int64 nLastTime; int64 nNow = GetTime(); LOCK(cs); // Use an exponentially decaying ~10-minute window: dFreeCount *= pow(1.0 - 1.0/600.0, (double)(nNow - nLastTime)); nLastTime = nNow; // -limitfreerelay unit is thousand-bytes-per-minute // At default rate it would take over a month to fill 1GB if (dFreeCount >= GetArg("-limitfreerelay", 15)*10*1000) return error("CTxMemPool::accept() : free transaction rejected by rate limiter"); if (fDebug) printf("Rate limit dFreeCount: %g => %g\n", dFreeCount, dFreeCount+nSize); dFreeCount += nSize; } // Check against previous transactions // This is done last to help prevent CPU exhaustion denial-of-service attacks. if (!tx.CheckInputs(state, view, true, SCRIPT_VERIFY_P2SH | SCRIPT_VERIFY_STRICTENC)) { return error("CTxMemPool::accept() : ConnectInputs failed %s", hash.ToString().c_str()); } } // Store transaction in memory { LOCK(cs); if (ptxOld) { printf("CTxMemPool::accept() : replacing tx %s with new version\n", ptxOld->GetHash().ToString().c_str()); remove(*ptxOld); } addUnchecked(hash, tx); } ///// are we sure this is ok when loading transactions or restoring block txes // If updated, erase old tx from wallet if (ptxOld) EraseFromWallets(ptxOld->GetHash()); SyncWithWallets(hash, tx, NULL, true); return true; } bool CTransaction::AcceptToMemoryPool(CValidationState &state, bool fCheckInputs, bool fLimitFree, bool* pfMissingInputs) { try { return mempool.accept(state, *this, fCheckInputs, fLimitFree, pfMissingInputs); } catch(std::runtime_error &e) { return state.Abort(_("System error: ") + e.what()); } } bool CTxMemPool::addUnchecked(const uint256& hash, const CTransaction &tx) { // Add to memory pool without checking anything. Don't call this directly, // call CTxMemPool::accept to properly check the transaction first. { mapTx[hash] = tx; for (unsigned int i = 0; i < tx.vin.size(); i++) mapNextTx[tx.vin[i].prevout] = CInPoint(&mapTx[hash], i); nTransactionsUpdated++; } return true; } bool CTxMemPool::remove(const CTransaction &tx, bool fRecursive) { // Remove transaction from memory pool { LOCK(cs); uint256 hash = tx.GetHash(); if (fRecursive) { for (unsigned int i = 0; i < tx.vout.size(); i++) { std::map<COutPoint, CInPoint>::iterator it = mapNextTx.find(COutPoint(hash, i)); if (it != mapNextTx.end()) remove(*it->second.ptx, true); } } if (mapTx.count(hash)) { BOOST_FOREACH(const CTxIn& txin, tx.vin) mapNextTx.erase(txin.prevout); mapTx.erase(hash); nTransactionsUpdated++; } } return true; } bool CTxMemPool::removeConflicts(const CTransaction &tx) { // Remove transactions which depend on inputs of tx, recursively LOCK(cs); BOOST_FOREACH(const CTxIn &txin, tx.vin) { std::map<COutPoint, CInPoint>::iterator it = mapNextTx.find(txin.prevout); if (it != mapNextTx.end()) { const CTransaction &txConflict = *it->second.ptx; if (txConflict != tx) remove(txConflict, true); } } return true; } void CTxMemPool::clear() { LOCK(cs); mapTx.clear(); mapNextTx.clear(); ++nTransactionsUpdated; } void CTxMemPool::queryHashes(std::vector<uint256>& vtxid) { vtxid.clear(); LOCK(cs); vtxid.reserve(mapTx.size()); for (map<uint256, CTransaction>::iterator mi = mapTx.begin(); mi != mapTx.end(); ++mi) vtxid.push_back((*mi).first); } int CMerkleTx::GetDepthInMainChain(CBlockIndex* &pindexRet) const { if (hashBlock == 0 || nIndex == -1) return 0; // Find the block it claims to be in map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock); if (mi == mapBlockIndex.end()) return 0; CBlockIndex* pindex = (*mi).second; if (!pindex || !pindex->IsInMainChain()) return 0; // Make sure the merkle branch connects to this block if (!fMerkleVerified) { if (CBlock::CheckMerkleBranch(GetHash(), vMerkleBranch, nIndex) != pindex->hashMerkleRoot) return 0; fMerkleVerified = true; } pindexRet = pindex; return pindexBest->nHeight - pindex->nHeight + 1; } int CMerkleTx::GetBlocksToMaturity() const { if (!IsCoinBase()) return 0; return max(0, (COINBASE_MATURITY+2) - GetDepthInMainChain()); } bool CMerkleTx::AcceptToMemoryPool(bool fCheckInputs, bool fLimitFree) { CValidationState state; return CTransaction::AcceptToMemoryPool(state, fCheckInputs, fLimitFree); } bool CWalletTx::AcceptWalletTransaction(bool fCheckInputs) { { LOCK(mempool.cs); // Add previous supporting transactions first BOOST_FOREACH(CMerkleTx& tx, vtxPrev) { if (!tx.IsCoinBase()) { uint256 hash = tx.GetHash(); if (!mempool.exists(hash) && pcoinsTip->HaveCoins(hash)) tx.AcceptToMemoryPool(fCheckInputs, false); } } return AcceptToMemoryPool(fCheckInputs, false); } return false; } // Return transaction in tx, and if it was found inside a block, its hash is placed in hashBlock bool GetTransaction(const uint256 &hash, CTransaction &txOut, uint256 &hashBlock, bool fAllowSlow) { CBlockIndex *pindexSlow = NULL; { LOCK(cs_main); { LOCK(mempool.cs); if (mempool.exists(hash)) { txOut = mempool.lookup(hash); return true; } } if (fTxIndex) { CDiskTxPos postx; if (pblocktree->ReadTxIndex(hash, postx)) { CAutoFile file(OpenBlockFile(postx, true), SER_DISK, CLIENT_VERSION); CBlockHeader header; try { file >> header; fseek(file, postx.nTxOffset, SEEK_CUR); file >> txOut; } catch (std::exception &e) { return error("%s() : deserialize or I/O error", __PRETTY_FUNCTION__); } hashBlock = header.GetHash(); if (txOut.GetHash() != hash) return error("%s() : txid mismatch", __PRETTY_FUNCTION__); return true; } } if (fAllowSlow) { // use coin database to locate block that contains transaction, and scan it int nHeight = -1; { CCoinsViewCache &view = *pcoinsTip; CCoins coins; if (view.GetCoins(hash, coins)) nHeight = coins.nHeight; } if (nHeight > 0) pindexSlow = FindBlockByHeight(nHeight); } } if (pindexSlow) { CBlock block; if (block.ReadFromDisk(pindexSlow)) { BOOST_FOREACH(const CTransaction &tx, block.vtx) { if (tx.GetHash() == hash) { txOut = tx; hashBlock = pindexSlow->GetBlockHash(); return true; } } } } return false; } ////////////////////////////////////////////////////////////////////////////// // // CBlock and CBlockIndex // static CBlockIndex* pblockindexFBBHLast; CBlockIndex* FindBlockByHeight(int nHeight) { CBlockIndex *pblockindex; if (nHeight < nBestHeight / 2) pblockindex = pindexGenesisBlock; else pblockindex = pindexBest; if (pblockindexFBBHLast && abs(nHeight - pblockindex->nHeight) > abs(nHeight - pblockindexFBBHLast->nHeight)) pblockindex = pblockindexFBBHLast; while (pblockindex->nHeight > nHeight) pblockindex = pblockindex->pprev; while (pblockindex->nHeight < nHeight) pblockindex = pblockindex->pnext; pblockindexFBBHLast = pblockindex; return pblockindex; } bool CBlock::ReadFromDisk(const CBlockIndex* pindex) { if (!ReadFromDisk(pindex->GetBlockPos())) return false; if (GetHash() != pindex->GetBlockHash()) return error("CBlock::ReadFromDisk() : GetHash() doesn't match index"); return true; } uint256 static GetOrphanRoot(const CBlockHeader* pblock) { // Work back to the first block in the orphan chain while (mapOrphanBlocks.count(pblock->hashPrevBlock)) pblock = mapOrphanBlocks[pblock->hashPrevBlock]; return pblock->GetHash(); } int64 static GetBlockValue(int nHeight, int64 nFees) { int64 nSubsidy = 700 * COIN; // Subsidy is cut in half every 50000000 blocks, which will occur approximately every 4 years nSubsidy >>= (nHeight / 50000000); return nSubsidy + nFees; } static const int64 nTargetTimespan = 604800; static const int64 nTargetSpacing = 600; static const int64 nInterval = nTargetTimespan / nTargetSpacing; // // minimum amount of work that could possibly be required nTime after // minimum work required was nBase // unsigned int ComputeMinWork(unsigned int nBase, int64 nTime) { // Testnet has min-difficulty blocks // after nTargetSpacing*2 time between blocks: if (fTestNet && nTime > nTargetSpacing*2) return bnProofOfWorkLimit.GetCompact(); CBigNum bnResult; bnResult.SetCompact(nBase); while (nTime > 0 && bnResult < bnProofOfWorkLimit) { // Maximum 400% adjustment... bnResult *= 4; // ... in best-case exactly 4-times-normal target time nTime -= nTargetTimespan*4; } if (bnResult > bnProofOfWorkLimit) bnResult = bnProofOfWorkLimit; return bnResult.GetCompact(); } unsigned int static GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlockHeader *pblock) { unsigned int nProofOfWorkLimit = bnProofOfWorkLimit.GetCompact(); // Genesis block if (pindexLast == NULL) return nProofOfWorkLimit; // Only change once per interval if ((pindexLast->nHeight+1) % nInterval != 0) { // Special difficulty rule for testnet: if (fTestNet) { // If the new block's timestamp is more than 2* 10 minutes // then allow mining of a min-difficulty block. if (pblock->nTime > pindexLast->nTime + nTargetSpacing*2) return nProofOfWorkLimit; else { // Return the last non-special-min-difficulty-rules-block const CBlockIndex* pindex = pindexLast; while (pindex->pprev && pindex->nHeight % nInterval != 0 && pindex->nBits == nProofOfWorkLimit) pindex = pindex->pprev; return pindex->nBits; } } return pindexLast->nBits; } // Go back by what we want to be 14 days worth of blocks const CBlockIndex* pindexFirst = pindexLast; for (int i = 0; pindexFirst && i < nInterval-1; i++) pindexFirst = pindexFirst->pprev; assert(pindexFirst); // Limit adjustment step int64 nActualTimespan = pindexLast->GetBlockTime() - pindexFirst->GetBlockTime(); printf(" nActualTimespan = %"PRI64d" before bounds\n", nActualTimespan); if (nActualTimespan < nTargetTimespan/4) nActualTimespan = nTargetTimespan/4; if (nActualTimespan > nTargetTimespan*4) nActualTimespan = nTargetTimespan*4; // Retarget CBigNum bnNew; bnNew.SetCompact(pindexLast->nBits); bnNew *= nActualTimespan; bnNew /= nTargetTimespan; if (bnNew > bnProofOfWorkLimit) bnNew = bnProofOfWorkLimit; /// debug print printf("GetNextWorkRequired RETARGET\n"); printf("nTargetTimespan = %"PRI64d" nActualTimespan = %"PRI64d"\n", nTargetTimespan, nActualTimespan); printf("Before: %08x %s\n", pindexLast->nBits, CBigNum().SetCompact(pindexLast->nBits).getuint256().ToString().c_str()); printf("After: %08x %s\n", bnNew.GetCompact(), bnNew.getuint256().ToString().c_str()); return bnNew.GetCompact(); } bool CheckProofOfWork(uint256 hash, unsigned int nBits) { CBigNum bnTarget; bnTarget.SetCompact(nBits); // Check range if (bnTarget <= 0 || bnTarget > bnProofOfWorkLimit) return error("CheckProofOfWork() : nBits below minimum work"); // Check proof of work matches claimed amount if (hash > bnTarget.getuint256()) return error("CheckProofOfWork() : hash doesn't match nBits"); return true; } // Return maximum amount of blocks that other nodes claim to have int GetNumBlocksOfPeers() { return std::max(cPeerBlockCounts.median(), Checkpoints::GetTotalBlocksEstimate()); } bool IsInitialBlockDownload() { if (pindexBest == NULL || fImporting || fReindex || nBestHeight < Checkpoints::GetTotalBlocksEstimate()) return true; static int64 nLastUpdate; static CBlockIndex* pindexLastBest; if (pindexBest != pindexLastBest) { pindexLastBest = pindexBest; nLastUpdate = GetTime(); } return (GetTime() - nLastUpdate < 10 && pindexBest->GetBlockTime() < GetTime() - 24 * 60 * 60); } void static InvalidChainFound(CBlockIndex* pindexNew) { if (pindexNew->nChainWork > nBestInvalidWork) { nBestInvalidWork = pindexNew->nChainWork; pblocktree->WriteBestInvalidWork(CBigNum(nBestInvalidWork)); uiInterface.NotifyBlocksChanged(); } printf("InvalidChainFound: invalid block=%s height=%d log2_work=%.8g date=%s\n", pindexNew->GetBlockHash().ToString().c_str(), pindexNew->nHeight, log(pindexNew->nChainWork.getdouble())/log(2.0), DateTimeStrFormat("%Y-%m-%d %H:%M:%S", pindexNew->GetBlockTime()).c_str()); printf("InvalidChainFound: current best=%s height=%d log2_work=%.8g date=%s\n", hashBestChain.ToString().c_str(), nBestHeight, log(nBestChainWork.getdouble())/log(2.0), DateTimeStrFormat("%Y-%m-%d %H:%M:%S", pindexBest->GetBlockTime()).c_str()); if (pindexBest && nBestInvalidWork > nBestChainWork + (pindexBest->GetBlockWork() * 6).getuint256()) printf("InvalidChainFound: Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.\n"); } void static InvalidBlockFound(CBlockIndex *pindex) { pindex->nStatus |= BLOCK_FAILED_VALID; pblocktree->WriteBlockIndex(CDiskBlockIndex(pindex)); setBlockIndexValid.erase(pindex); InvalidChainFound(pindex); if (pindex->pnext) { CValidationState stateDummy; ConnectBestBlock(stateDummy); // reorganise away from the failed block } } bool ConnectBestBlock(CValidationState &state) { do { CBlockIndex *pindexNewBest; { std::set<CBlockIndex*,CBlockIndexWorkComparator>::reverse_iterator it = setBlockIndexValid.rbegin(); if (it == setBlockIndexValid.rend()) return true; pindexNewBest = *it; } if (pindexNewBest == pindexBest || (pindexBest && pindexNewBest->nChainWork == pindexBest->nChainWork)) return true; // nothing to do // check ancestry CBlockIndex *pindexTest = pindexNewBest; std::vector<CBlockIndex*> vAttach; do { if (pindexTest->nStatus & BLOCK_FAILED_MASK) { // mark descendants failed CBlockIndex *pindexFailed = pindexNewBest; while (pindexTest != pindexFailed) { pindexFailed->nStatus |= BLOCK_FAILED_CHILD; setBlockIndexValid.erase(pindexFailed); pblocktree->WriteBlockIndex(CDiskBlockIndex(pindexFailed)); pindexFailed = pindexFailed->pprev; } InvalidChainFound(pindexNewBest); break; } if (pindexBest == NULL || pindexTest->nChainWork > pindexBest->nChainWork) vAttach.push_back(pindexTest); if (pindexTest->pprev == NULL || pindexTest->pnext != NULL) { reverse(vAttach.begin(), vAttach.end()); BOOST_FOREACH(CBlockIndex *pindexSwitch, vAttach) { boost::this_thread::interruption_point(); try { if (!SetBestChain(state, pindexSwitch)) return false; } catch(std::runtime_error &e) { return state.Abort(_("System error: ") + e.what()); } } return true; } pindexTest = pindexTest->pprev; } while(true); } while(true); } void CBlockHeader::UpdateTime(const CBlockIndex* pindexPrev) { nTime = max(pindexPrev->GetMedianTimePast()+1, GetAdjustedTime()); // Updating time can change work required on testnet: if (fTestNet) nBits = GetNextWorkRequired(pindexPrev, this); } const CTxOut &CTransaction::GetOutputFor(const CTxIn& input, CCoinsViewCache& view) { const CCoins &coins = view.GetCoins(input.prevout.hash); assert(coins.IsAvailable(input.prevout.n)); return coins.vout[input.prevout.n]; } int64 CTransaction::GetValueIn(CCoinsViewCache& inputs) const { if (IsCoinBase()) return 0; int64 nResult = 0; for (unsigned int i = 0; i < vin.size(); i++) nResult += GetOutputFor(vin[i], inputs).nValue; return nResult; } unsigned int CTransaction::GetP2SHSigOpCount(CCoinsViewCache& inputs) const { if (IsCoinBase()) return 0; unsigned int nSigOps = 0; for (unsigned int i = 0; i < vin.size(); i++) { const CTxOut &prevout = GetOutputFor(vin[i], inputs); if (prevout.scriptPubKey.IsPayToScriptHash()) nSigOps += prevout.scriptPubKey.GetSigOpCount(vin[i].scriptSig); } return nSigOps; } void CTransaction::UpdateCoins(CValidationState &state, CCoinsViewCache &inputs, CTxUndo &txundo, int nHeight, const uint256 &txhash) const { // mark inputs spent if (!IsCoinBase()) { BOOST_FOREACH(const CTxIn &txin, vin) { CCoins &coins = inputs.GetCoins(txin.prevout.hash); CTxInUndo undo; assert(coins.Spend(txin.prevout, undo)); txundo.vprevout.push_back(undo); } } // add outputs assert(inputs.SetCoins(txhash, CCoins(*this, nHeight))); } bool CTransaction::HaveInputs(CCoinsViewCache &inputs) const { if (!IsCoinBase()) { // first check whether information about the prevout hash is available for (unsigned int i = 0; i < vin.size(); i++) { const COutPoint &prevout = vin[i].prevout; if (!inputs.HaveCoins(prevout.hash)) return false; } // then check whether the actual outputs are available for (unsigned int i = 0; i < vin.size(); i++) { const COutPoint &prevout = vin[i].prevout; const CCoins &coins = inputs.GetCoins(prevout.hash); if (!coins.IsAvailable(prevout.n)) return false; } } return true; } bool CScriptCheck::operator()() const { const CScript &scriptSig = ptxTo->vin[nIn].scriptSig; if (!VerifyScript(scriptSig, scriptPubKey, *ptxTo, nIn, nFlags, nHashType)) return error("CScriptCheck() : %s VerifySignature failed", ptxTo->GetHash().ToString().c_str()); return true; } bool VerifySignature(const CCoins& txFrom, const CTransaction& txTo, unsigned int nIn, unsigned int flags, int nHashType) { return CScriptCheck(txFrom, txTo, nIn, flags, nHashType)(); } bool CTransaction::CheckInputs(CValidationState &state, CCoinsViewCache &inputs, bool fScriptChecks, unsigned int flags, std::vector<CScriptCheck> *pvChecks) const { if (!IsCoinBase()) { if (pvChecks) pvChecks->reserve(vin.size()); // This doesn't trigger the DoS code on purpose; if it did, it would make it easier // for an attacker to attempt to split the network. if (!HaveInputs(inputs)) return state.Invalid(error("CheckInputs() : %s inputs unavailable", GetHash().ToString().c_str())); // While checking, GetBestBlock() refers to the parent block. // This is also true for mempool checks. int nSpendHeight = inputs.GetBestBlock()->nHeight + 1; int64 nValueIn = 0; int64 nFees = 0; for (unsigned int i = 0; i < vin.size(); i++) { const COutPoint &prevout = vin[i].prevout; const CCoins &coins = inputs.GetCoins(prevout.hash); // If prev is coinbase, check that it's matured if (coins.IsCoinBase()) { if (nSpendHeight - coins.nHeight < COINBASE_MATURITY) return state.Invalid(error("CheckInputs() : tried to spend coinbase at depth %d", nSpendHeight - coins.nHeight)); } // Check for negative or overflow input values nValueIn += coins.vout[prevout.n].nValue; if (!MoneyRange(coins.vout[prevout.n].nValue) || !MoneyRange(nValueIn)) return state.DoS(100, error("CheckInputs() : txin values out of range")); } if (nValueIn < GetValueOut()) return state.DoS(100, error("CheckInputs() : %s value in < value out", GetHash().ToString().c_str())); // Tally transaction fees int64 nTxFee = nValueIn - GetValueOut(); if (nTxFee < 0) return state.DoS(100, error("CheckInputs() : %s nTxFee < 0", GetHash().ToString().c_str())); nFees += nTxFee; if (!MoneyRange(nFees)) return state.DoS(100, error("CheckInputs() : nFees out of range")); // The first loop above does all the inexpensive checks. // Only if ALL inputs pass do we perform expensive ECDSA signature checks. // Helps prevent CPU exhaustion attacks. // Skip ECDSA signature verification when connecting blocks // before the last block chain checkpoint. This is safe because block merkle hashes are // still computed and checked, and any change will be caught at the next checkpoint. if (fScriptChecks) { for (unsigned int i = 0; i < vin.size(); i++) { const COutPoint &prevout = vin[i].prevout; const CCoins &coins = inputs.GetCoins(prevout.hash); // Verify signature CScriptCheck check(coins, *this, i, flags, 0); if (pvChecks) { pvChecks->push_back(CScriptCheck()); check.swap(pvChecks->back()); } else if (!check()) { if (flags & SCRIPT_VERIFY_STRICTENC) { // For now, check whether the failure was caused by non-canonical // encodings or not; if so, don't trigger DoS protection. CScriptCheck check(coins, *this, i, flags & (~SCRIPT_VERIFY_STRICTENC), 0); if (check()) return state.Invalid(); } return state.DoS(100,false); } } } } return true; } bool CBlock::DisconnectBlock(CValidationState &state, CBlockIndex *pindex, CCoinsViewCache &view, bool *pfClean) { assert(pindex == view.GetBestBlock()); if (pfClean) *pfClean = false; bool fClean = true; CBlockUndo blockUndo; CDiskBlockPos pos = pindex->GetUndoPos(); if (pos.IsNull()) return error("DisconnectBlock() : no undo data available"); if (!blockUndo.ReadFromDisk(pos, pindex->pprev->GetBlockHash())) return error("DisconnectBlock() : failure reading undo data"); if (blockUndo.vtxundo.size() + 1 != vtx.size()) return error("DisconnectBlock() : block and undo data inconsistent"); // undo transactions in reverse order for (int i = vtx.size() - 1; i >= 0; i--) { const CTransaction &tx = vtx[i]; uint256 hash = tx.GetHash(); // check that all outputs are available if (!view.HaveCoins(hash)) { fClean = fClean && error("DisconnectBlock() : outputs still spent? database corrupted"); view.SetCoins(hash, CCoins()); } CCoins &outs = view.GetCoins(hash); CCoins outsBlock = CCoins(tx, pindex->nHeight); // The CCoins serialization does not serialize negative numbers. // No network rules currently depend on the version here, so an inconsistency is harmless // but it must be corrected before txout nversion ever influences a network rule. if (outsBlock.nVersion < 0) outs.nVersion = outsBlock.nVersion; if (outs != outsBlock) fClean = fClean && error("DisconnectBlock() : added transaction mismatch? database corrupted"); // remove outputs outs = CCoins(); // restore inputs if (i > 0) { // not coinbases const CTxUndo &txundo = blockUndo.vtxundo[i-1]; if (txundo.vprevout.size() != tx.vin.size()) return error("DisconnectBlock() : transaction and undo data inconsistent"); for (unsigned int j = tx.vin.size(); j-- > 0;) { const COutPoint &out = tx.vin[j].prevout; const CTxInUndo &undo = txundo.vprevout[j]; CCoins coins; view.GetCoins(out.hash, coins); // this can fail if the prevout was already entirely spent if (undo.nHeight != 0) { // undo data contains height: this is the last output of the prevout tx being spent if (!coins.IsPruned()) fClean = fClean && error("DisconnectBlock() : undo data overwriting existing transaction"); coins = CCoins(); coins.fCoinBase = undo.fCoinBase; coins.nHeight = undo.nHeight; coins.nVersion = undo.nVersion; } else { if (coins.IsPruned()) fClean = fClean && error("DisconnectBlock() : undo data adding output to missing transaction"); } if (coins.IsAvailable(out.n)) fClean = fClean && error("DisconnectBlock() : undo data overwriting existing output"); if (coins.vout.size() < out.n+1) coins.vout.resize(out.n+1); coins.vout[out.n] = undo.txout; if (!view.SetCoins(out.hash, coins)) return error("DisconnectBlock() : cannot restore coin inputs"); } } } // move best block pointer to prevout block view.SetBestBlock(pindex->pprev); if (pfClean) { *pfClean = fClean; return true; } else { return fClean; } } void static FlushBlockFile(bool fFinalize = false) { LOCK(cs_LastBlockFile); CDiskBlockPos posOld(nLastBlockFile, 0); FILE *fileOld = OpenBlockFile(posOld); if (fileOld) { if (fFinalize) TruncateFile(fileOld, infoLastBlockFile.nSize); FileCommit(fileOld); fclose(fileOld); } fileOld = OpenUndoFile(posOld); if (fileOld) { if (fFinalize) TruncateFile(fileOld, infoLastBlockFile.nUndoSize); FileCommit(fileOld); fclose(fileOld); } } bool FindUndoPos(CValidationState &state, int nFile, CDiskBlockPos &pos, unsigned int nAddSize); static CCheckQueue<CScriptCheck> scriptcheckqueue(128); void ThreadScriptCheck() { RenameThread("bitcoin-scriptch"); scriptcheckqueue.Thread(); } bool CBlock::ConnectBlock(CValidationState &state, CBlockIndex* pindex, CCoinsViewCache &view, bool fJustCheck) { // Check it again in case a previous version let a bad block in if (!CheckBlock(state, !fJustCheck, !fJustCheck)) return false; // verify that the view's current state corresponds to the previous block assert(pindex->pprev == view.GetBestBlock()); // Special case for the genesis block, skipping connection of its transactions // (its coinbase is unspendable) if (GetHash() == hashGenesisBlock) { view.SetBestBlock(pindex); pindexGenesisBlock = pindex; return true; } bool fScriptChecks = pindex->nHeight >= Checkpoints::GetTotalBlocksEstimate(); // Do not allow blocks that contain transactions which 'overwrite' older transactions, // unless those are already completely spent. // If such overwrites are allowed, coinbases and transactions depending upon those // can be duplicated to remove the ability to spend the first instance -- even after // being sent to another address. // See BIP30 and http://r6.ca/blog/20120206T005236Z.html for more information. // This logic is not necessary for memory pool transactions, as AcceptToMemoryPool // already refuses previously-known transaction ids entirely. // This rule was originally applied all blocks whose timestamp was after March 15, 2012, 0:00 UTC. // Now that the whole chain is irreversibly beyond that time it is applied to all blocks except the // two in the chain that violate it. This prevents exploiting the issue against nodes in their // initial block download. bool fEnforceBIP30 = (!pindex->phashBlock) || // Enforce on CreateNewBlock invocations which don't have a hash. !((pindex->nHeight==91842 && pindex->GetBlockHash() == uint256("0x00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec")) || (pindex->nHeight==91880 && pindex->GetBlockHash() == uint256("0x00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721"))); if (fEnforceBIP30) { for (unsigned int i=0; i<vtx.size(); i++) { uint256 hash = GetTxHash(i); if (view.HaveCoins(hash) && !view.GetCoins(hash).IsPruned()) return state.DoS(100, error("ConnectBlock() : tried to overwrite transaction")); } } // BIP16 didn't become active until Apr 1 2012 int64 nBIP16SwitchTime = 1333238400; bool fStrictPayToScriptHash = (pindex->nTime >= nBIP16SwitchTime); unsigned int flags = SCRIPT_VERIFY_NOCACHE | (fStrictPayToScriptHash ? SCRIPT_VERIFY_P2SH : SCRIPT_VERIFY_NONE); CBlockUndo blockundo; CCheckQueueControl<CScriptCheck> control(fScriptChecks && nScriptCheckThreads ? &scriptcheckqueue : NULL); int64 nStart = GetTimeMicros(); int64 nFees = 0; int nInputs = 0; unsigned int nSigOps = 0; CDiskTxPos pos(pindex->GetBlockPos(), GetSizeOfCompactSize(vtx.size())); std::vector<std::pair<uint256, CDiskTxPos> > vPos; vPos.reserve(vtx.size()); for (unsigned int i=0; i<vtx.size(); i++) { const CTransaction &tx = vtx[i]; nInputs += tx.vin.size(); nSigOps += tx.GetLegacySigOpCount(); if (nSigOps > MAX_BLOCK_SIGOPS) return state.DoS(100, error("ConnectBlock() : too many sigops")); if (!tx.IsCoinBase()) { if (!tx.HaveInputs(view)) return state.DoS(100, error("ConnectBlock() : inputs missing/spent")); if (fStrictPayToScriptHash) { // Add in sigops done by pay-to-script-hash inputs; // this is to prevent a "rogue miner" from creating // an incredibly-expensive-to-validate block. nSigOps += tx.GetP2SHSigOpCount(view); if (nSigOps > MAX_BLOCK_SIGOPS) return state.DoS(100, error("ConnectBlock() : too many sigops")); } nFees += tx.GetValueIn(view)-tx.GetValueOut(); std::vector<CScriptCheck> vChecks; if (!tx.CheckInputs(state, view, fScriptChecks, flags, nScriptCheckThreads ? &vChecks : NULL)) return false; control.Add(vChecks); } CTxUndo txundo; tx.UpdateCoins(state, view, txundo, pindex->nHeight, GetTxHash(i)); if (!tx.IsCoinBase()) blockundo.vtxundo.push_back(txundo); vPos.push_back(std::make_pair(GetTxHash(i), pos)); pos.nTxOffset += ::GetSerializeSize(tx, SER_DISK, CLIENT_VERSION); } int64 nTime = GetTimeMicros() - nStart; if (fBenchmark) printf("- Connect %u transactions: %.2fms (%.3fms/tx, %.3fms/txin)\n", (unsigned)vtx.size(), 0.001 * nTime, 0.001 * nTime / vtx.size(), nInputs <= 1 ? 0 : 0.001 * nTime / (nInputs-1)); if (vtx[0].GetValueOut() > GetBlockValue(pindex->nHeight, nFees)) return state.DoS(100, error("ConnectBlock() : coinbase pays too much (actual=%"PRI64d" vs limit=%"PRI64d")", vtx[0].GetValueOut(), GetBlockValue(pindex->nHeight, nFees))); if (!control.Wait()) return state.DoS(100, false); int64 nTime2 = GetTimeMicros() - nStart; if (fBenchmark) printf("- Verify %u txins: %.2fms (%.3fms/txin)\n", nInputs - 1, 0.001 * nTime2, nInputs <= 1 ? 0 : 0.001 * nTime2 / (nInputs-1)); if (fJustCheck) return true; // Write undo information to disk if (pindex->GetUndoPos().IsNull() || (pindex->nStatus & BLOCK_VALID_MASK) < BLOCK_VALID_SCRIPTS) { if (pindex->GetUndoPos().IsNull()) { CDiskBlockPos pos; if (!FindUndoPos(state, pindex->nFile, pos, ::GetSerializeSize(blockundo, SER_DISK, CLIENT_VERSION) + 40)) return error("ConnectBlock() : FindUndoPos failed"); if (!blockundo.WriteToDisk(pos, pindex->pprev->GetBlockHash())) return state.Abort(_("Failed to write undo data")); // update nUndoPos in block index pindex->nUndoPos = pos.nPos; pindex->nStatus |= BLOCK_HAVE_UNDO; } pindex->nStatus = (pindex->nStatus & ~BLOCK_VALID_MASK) | BLOCK_VALID_SCRIPTS; CDiskBlockIndex blockindex(pindex); if (!pblocktree->WriteBlockIndex(blockindex)) return state.Abort(_("Failed to write block index")); } if (fTxIndex) if (!pblocktree->WriteTxIndex(vPos)) return state.Abort(_("Failed to write transaction index")); // add this block to the view's block chain assert(view.SetBestBlock(pindex)); // Watch for transactions paying to me for (unsigned int i=0; i<vtx.size(); i++) SyncWithWallets(GetTxHash(i), vtx[i], this, true); return true; } bool SetBestChain(CValidationState &state, CBlockIndex* pindexNew) { // All modifications to the coin state will be done in this cache. // Only when all have succeeded, we push it to pcoinsTip. CCoinsViewCache view(*pcoinsTip, true); // Find the fork (typically, there is none) CBlockIndex* pfork = view.GetBestBlock(); CBlockIndex* plonger = pindexNew; while (pfork && pfork != plonger) { while (plonger->nHeight > pfork->nHeight) { plonger = plonger->pprev; assert(plonger != NULL); } if (pfork == plonger) break; pfork = pfork->pprev; assert(pfork != NULL); } // List of what to disconnect (typically nothing) vector<CBlockIndex*> vDisconnect; for (CBlockIndex* pindex = view.GetBestBlock(); pindex != pfork; pindex = pindex->pprev) vDisconnect.push_back(pindex); // List of what to connect (typically only pindexNew) vector<CBlockIndex*> vConnect; for (CBlockIndex* pindex = pindexNew; pindex != pfork; pindex = pindex->pprev) vConnect.push_back(pindex); reverse(vConnect.begin(), vConnect.end()); if (vDisconnect.size() > 0) { printf("REORGANIZE: Disconnect %"PRIszu" blocks; %s..\n", vDisconnect.size(), pfork->GetBlockHash().ToString().c_str()); printf("REORGANIZE: Connect %"PRIszu" blocks; ..%s\n", vConnect.size(), pindexNew->GetBlockHash().ToString().c_str()); } // Disconnect shorter branch list<CTransaction> vResurrect; BOOST_FOREACH(CBlockIndex* pindex, vDisconnect) { CBlock block; if (!block.ReadFromDisk(pindex)) return state.Abort(_("Failed to read block")); int64 nStart = GetTimeMicros(); if (!block.DisconnectBlock(state, pindex, view)) return error("SetBestBlock() : DisconnectBlock %s failed", pindex->GetBlockHash().ToString().c_str()); if (fBenchmark) printf("- Disconnect: %.2fms\n", (GetTimeMicros() - nStart) * 0.001); // Queue memory transactions to resurrect. // We only do this for blocks after the last checkpoint (reorganisation before that // point should only happen with -reindex/-loadblock, or a misbehaving peer. BOOST_REVERSE_FOREACH(const CTransaction& tx, block.vtx) if (!tx.IsCoinBase() && pindex->nHeight > Checkpoints::GetTotalBlocksEstimate()) vResurrect.push_front(tx); } // Connect longer branch vector<CTransaction> vDelete; BOOST_FOREACH(CBlockIndex *pindex, vConnect) { CBlock block; if (!block.ReadFromDisk(pindex)) return state.Abort(_("Failed to read block")); int64 nStart = GetTimeMicros(); if (!block.ConnectBlock(state, pindex, view)) { if (state.IsInvalid()) { InvalidChainFound(pindexNew); InvalidBlockFound(pindex); } return error("SetBestBlock() : ConnectBlock %s failed", pindex->GetBlockHash().ToString().c_str()); } if (fBenchmark) printf("- Connect: %.2fms\n", (GetTimeMicros() - nStart) * 0.001); // Queue memory transactions to delete BOOST_FOREACH(const CTransaction& tx, block.vtx) vDelete.push_back(tx); } // Flush changes to global coin state int64 nStart = GetTimeMicros(); int nModified = view.GetCacheSize(); assert(view.Flush()); int64 nTime = GetTimeMicros() - nStart; if (fBenchmark) printf("- Flush %i transactions: %.2fms (%.4fms/tx)\n", nModified, 0.001 * nTime, 0.001 * nTime / nModified); // Make sure it's successfully written to disk before changing memory structure bool fIsInitialDownload = IsInitialBlockDownload(); if (!fIsInitialDownload || pcoinsTip->GetCacheSize() > nCoinCacheSize) { // Typical CCoins structures on disk are around 100 bytes in size. // Pushing a new one to the database can cause it to be written // twice (once in the log, and once in the tables). This is already // an overestimation, as most will delete an existing entry or // overwrite one. Still, use a conservative safety factor of 2. if (!CheckDiskSpace(100 * 2 * 2 * pcoinsTip->GetCacheSize())) return state.Error(); FlushBlockFile(); pblocktree->Sync(); if (!pcoinsTip->Flush()) return state.Abort(_("Failed to write to coin database")); } // At this point, all changes have been done to the database. // Proceed by updating the memory structures. // Disconnect shorter branch BOOST_FOREACH(CBlockIndex* pindex, vDisconnect) if (pindex->pprev) pindex->pprev->pnext = NULL; // Connect longer branch BOOST_FOREACH(CBlockIndex* pindex, vConnect) if (pindex->pprev) pindex->pprev->pnext = pindex; // Resurrect memory transactions that were in the disconnected branch BOOST_FOREACH(CTransaction& tx, vResurrect) { // ignore validation errors in resurrected transactions CValidationState stateDummy; if (!tx.AcceptToMemoryPool(stateDummy, true, false)) mempool.remove(tx, true); } // Delete redundant memory transactions that are in the connected branch BOOST_FOREACH(CTransaction& tx, vDelete) { mempool.remove(tx); mempool.removeConflicts(tx); } // Update best block in wallet (so we can detect restored wallets) if ((pindexNew->nHeight % 20160) == 0 || (!fIsInitialDownload && (pindexNew->nHeight % 144) == 0)) { const CBlockLocator locator(pindexNew); ::SetBestChain(locator); } // New best block hashBestChain = pindexNew->GetBlockHash(); pindexBest = pindexNew; pblockindexFBBHLast = NULL; nBestHeight = pindexBest->nHeight; nBestChainWork = pindexNew->nChainWork; nTimeBestReceived = GetTime(); nTransactionsUpdated++; printf("SetBestChain: new best=%s height=%d log2_work=%.8g tx=%lu date=%s progress=%f\n", hashBestChain.ToString().c_str(), nBestHeight, log(nBestChainWork.getdouble())/log(2.0), (unsigned long)pindexNew->nChainTx, DateTimeStrFormat("%Y-%m-%d %H:%M:%S", pindexBest->GetBlockTime()).c_str(), Checkpoints::GuessVerificationProgress(pindexBest)); // Check the version of the last 100 blocks to see if we need to upgrade: if (!fIsInitialDownload) { int nUpgraded = 0; const CBlockIndex* pindex = pindexBest; for (int i = 0; i < 100 && pindex != NULL; i++) { if (pindex->nVersion > CBlock::CURRENT_VERSION) ++nUpgraded; pindex = pindex->pprev; } if (nUpgraded > 0) printf("SetBestChain: %d of last 100 blocks above version %d\n", nUpgraded, CBlock::CURRENT_VERSION); if (nUpgraded > 100/2) // strMiscWarning is read by GetWarnings(), called by Qt and the JSON-RPC code to warn the user: strMiscWarning = _("Warning: This version is obsolete, upgrade required!"); } std::string strCmd = GetArg("-blocknotify", ""); if (!fIsInitialDownload && !strCmd.empty()) { boost::replace_all(strCmd, "%s", hashBestChain.GetHex()); boost::thread t(runCommand, strCmd); // thread runs free } return true; } bool CBlock::AddToBlockIndex(CValidationState &state, const CDiskBlockPos &pos) { // Check for duplicate uint256 hash = GetHash(); if (mapBlockIndex.count(hash)) return state.Invalid(error("AddToBlockIndex() : %s already exists", hash.ToString().c_str())); // Construct new block index object CBlockIndex* pindexNew = new CBlockIndex(*this); assert(pindexNew); map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.insert(make_pair(hash, pindexNew)).first; pindexNew->phashBlock = &((*mi).first); map<uint256, CBlockIndex*>::iterator miPrev = mapBlockIndex.find(hashPrevBlock); if (miPrev != mapBlockIndex.end()) { pindexNew->pprev = (*miPrev).second; pindexNew->nHeight = pindexNew->pprev->nHeight + 1; } pindexNew->nTx = vtx.size(); pindexNew->nChainWork = (pindexNew->pprev ? pindexNew->pprev->nChainWork : 0) + pindexNew->GetBlockWork().getuint256(); pindexNew->nChainTx = (pindexNew->pprev ? pindexNew->pprev->nChainTx : 0) + pindexNew->nTx; pindexNew->nFile = pos.nFile; pindexNew->nDataPos = pos.nPos; pindexNew->nUndoPos = 0; pindexNew->nStatus = BLOCK_VALID_TRANSACTIONS | BLOCK_HAVE_DATA; setBlockIndexValid.insert(pindexNew); if (!pblocktree->WriteBlockIndex(CDiskBlockIndex(pindexNew))) return state.Abort(_("Failed to write block index")); // New best? if (!ConnectBestBlock(state)) return false; if (pindexNew == pindexBest) { // Notify UI to display prev block's coinbase if it was ours static uint256 hashPrevBestCoinBase; UpdatedTransaction(hashPrevBestCoinBase); hashPrevBestCoinBase = GetTxHash(0); } if (!pblocktree->Flush()) return state.Abort(_("Failed to sync block index")); uiInterface.NotifyBlocksChanged(); return true; } bool FindBlockPos(CValidationState &state, CDiskBlockPos &pos, unsigned int nAddSize, unsigned int nHeight, uint64 nTime, bool fKnown = false) { bool fUpdatedLast = false; LOCK(cs_LastBlockFile); if (fKnown) { if (nLastBlockFile != pos.nFile) { nLastBlockFile = pos.nFile; infoLastBlockFile.SetNull(); pblocktree->ReadBlockFileInfo(nLastBlockFile, infoLastBlockFile); fUpdatedLast = true; } } else { while (infoLastBlockFile.nSize + nAddSize >= MAX_BLOCKFILE_SIZE) { printf("Leaving block file %i: %s\n", nLastBlockFile, infoLastBlockFile.ToString().c_str()); FlushBlockFile(true); nLastBlockFile++; infoLastBlockFile.SetNull(); pblocktree->ReadBlockFileInfo(nLastBlockFile, infoLastBlockFile); // check whether data for the new file somehow already exist; can fail just fine fUpdatedLast = true; } pos.nFile = nLastBlockFile; pos.nPos = infoLastBlockFile.nSize; } infoLastBlockFile.nSize += nAddSize; infoLastBlockFile.AddBlock(nHeight, nTime); if (!fKnown) { unsigned int nOldChunks = (pos.nPos + BLOCKFILE_CHUNK_SIZE - 1) / BLOCKFILE_CHUNK_SIZE; unsigned int nNewChunks = (infoLastBlockFile.nSize + BLOCKFILE_CHUNK_SIZE - 1) / BLOCKFILE_CHUNK_SIZE; if (nNewChunks > nOldChunks) { if (CheckDiskSpace(nNewChunks * BLOCKFILE_CHUNK_SIZE - pos.nPos)) { FILE *file = OpenBlockFile(pos); if (file) { printf("Pre-allocating up to position 0x%x in blk%05u.dat\n", nNewChunks * BLOCKFILE_CHUNK_SIZE, pos.nFile); AllocateFileRange(file, pos.nPos, nNewChunks * BLOCKFILE_CHUNK_SIZE - pos.nPos); fclose(file); } } else return state.Error(); } } if (!pblocktree->WriteBlockFileInfo(nLastBlockFile, infoLastBlockFile)) return state.Abort(_("Failed to write file info")); if (fUpdatedLast) pblocktree->WriteLastBlockFile(nLastBlockFile); return true; } bool FindUndoPos(CValidationState &state, int nFile, CDiskBlockPos &pos, unsigned int nAddSize) { pos.nFile = nFile; LOCK(cs_LastBlockFile); unsigned int nNewSize; if (nFile == nLastBlockFile) { pos.nPos = infoLastBlockFile.nUndoSize; nNewSize = (infoLastBlockFile.nUndoSize += nAddSize); if (!pblocktree->WriteBlockFileInfo(nLastBlockFile, infoLastBlockFile)) return state.Abort(_("Failed to write block info")); } else { CBlockFileInfo info; if (!pblocktree->ReadBlockFileInfo(nFile, info)) return state.Abort(_("Failed to read block info")); pos.nPos = info.nUndoSize; nNewSize = (info.nUndoSize += nAddSize); if (!pblocktree->WriteBlockFileInfo(nFile, info)) return state.Abort(_("Failed to write block info")); } unsigned int nOldChunks = (pos.nPos + UNDOFILE_CHUNK_SIZE - 1) / UNDOFILE_CHUNK_SIZE; unsigned int nNewChunks = (nNewSize + UNDOFILE_CHUNK_SIZE - 1) / UNDOFILE_CHUNK_SIZE; if (nNewChunks > nOldChunks) { if (CheckDiskSpace(nNewChunks * UNDOFILE_CHUNK_SIZE - pos.nPos)) { FILE *file = OpenUndoFile(pos); if (file) { printf("Pre-allocating up to position 0x%x in rev%05u.dat\n", nNewChunks * UNDOFILE_CHUNK_SIZE, pos.nFile); AllocateFileRange(file, pos.nPos, nNewChunks * UNDOFILE_CHUNK_SIZE - pos.nPos); fclose(file); } } else return state.Error(); } return true; } bool CBlock::CheckBlock(CValidationState &state, bool fCheckPOW, bool fCheckMerkleRoot) const { // These are checks that are independent of context // that can be verified before saving an orphan block. // Size limits if (vtx.empty() || vtx.size() > MAX_BLOCK_SIZE || ::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION) > MAX_BLOCK_SIZE) return state.DoS(100, error("CheckBlock() : size limits failed")); // Special short-term limits to avoid 10,000 BDB lock limit: if (GetBlockTime() >= 1363867200 && // start enforcing 21 March 2013, noon GMT GetBlockTime() < 1368576000) // stop enforcing 15 May 2013 00:00:00 { // Rule is: #unique txids referenced <= 4,500 // ... to prevent 10,000 BDB lock exhaustion on old clients set<uint256> setTxIn; for (size_t i = 0; i < vtx.size(); i++) { setTxIn.insert(vtx[i].GetHash()); if (i == 0) continue; // skip coinbase txin BOOST_FOREACH(const CTxIn& txin, vtx[i].vin) setTxIn.insert(txin.prevout.hash); } size_t nTxids = setTxIn.size(); if (nTxids > 4500) return error("CheckBlock() : 15 May maxlocks violation"); } // Check proof of work matches claimed amount if (fCheckPOW && !CheckProofOfWork(GetHash(), nBits)) return state.DoS(50, error("CheckBlock() : proof of work failed")); // Check timestamp if (GetBlockTime() > GetAdjustedTime() + 2 * 60 * 60) return state.Invalid(error("CheckBlock() : block timestamp too far in the future")); // First transaction must be coinbase, the rest must not be if (vtx.empty() || !vtx[0].IsCoinBase()) return state.DoS(100, error("CheckBlock() : first tx is not coinbase")); for (unsigned int i = 1; i < vtx.size(); i++) if (vtx[i].IsCoinBase()) return state.DoS(100, error("CheckBlock() : more than one coinbase")); // Check transactions BOOST_FOREACH(const CTransaction& tx, vtx) if (!tx.CheckTransaction(state)) return error("CheckBlock() : CheckTransaction failed"); // Build the merkle tree already. We need it anyway later, and it makes the // block cache the transaction hashes, which means they don't need to be // recalculated many times during this block's validation. BuildMerkleTree(); // Check for duplicate txids. This is caught by ConnectInputs(), // but catching it earlier avoids a potential DoS attack: set<uint256> uniqueTx; for (unsigned int i=0; i<vtx.size(); i++) { uniqueTx.insert(GetTxHash(i)); } if (uniqueTx.size() != vtx.size()) return state.DoS(100, error("CheckBlock() : duplicate transaction"), true); unsigned int nSigOps = 0; BOOST_FOREACH(const CTransaction& tx, vtx) { nSigOps += tx.GetLegacySigOpCount(); } if (nSigOps > MAX_BLOCK_SIGOPS) return state.DoS(100, error("CheckBlock() : out-of-bounds SigOpCount")); // Check merkle root if (fCheckMerkleRoot && hashMerkleRoot != BuildMerkleTree()) return state.DoS(100, error("CheckBlock() : hashMerkleRoot mismatch")); return true; } bool CBlock::AcceptBlock(CValidationState &state, CDiskBlockPos *dbp) { // Check for duplicate uint256 hash = GetHash(); if (mapBlockIndex.count(hash)) return state.Invalid(error("AcceptBlock() : block already in mapBlockIndex")); // Get prev block index CBlockIndex* pindexPrev = NULL; int nHeight = 0; if (hash != hashGenesisBlock) { map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashPrevBlock); if (mi == mapBlockIndex.end()) return state.DoS(10, error("AcceptBlock() : prev block not found")); pindexPrev = (*mi).second; nHeight = pindexPrev->nHeight+1; // Check proof of work if (nBits != GetNextWorkRequired(pindexPrev, this)) return state.DoS(100, error("AcceptBlock() : incorrect proof of work")); // Check timestamp against prev if (GetBlockTime() <= pindexPrev->GetMedianTimePast()) return state.Invalid(error("AcceptBlock() : block's timestamp is too early")); // Check that all transactions are finalized BOOST_FOREACH(const CTransaction& tx, vtx) if (!tx.IsFinal(nHeight, GetBlockTime())) return state.DoS(10, error("AcceptBlock() : contains a non-final transaction")); // Check that the block chain matches the known block chain up to a checkpoint if (!Checkpoints::CheckBlock(nHeight, hash)) return state.DoS(100, error("AcceptBlock() : rejected by checkpoint lock-in at %d", nHeight)); // Reject block.nVersion=1 blocks when 95% (75% on testnet) of the network has upgraded: if (nVersion < 2) { if ((!fTestNet && CBlockIndex::IsSuperMajority(2, pindexPrev, 950, 1000)) || (fTestNet && CBlockIndex::IsSuperMajority(2, pindexPrev, 75, 100))) { return state.Invalid(error("AcceptBlock() : rejected nVersion=1 block")); } } // Enforce block.nVersion=2 rule that the coinbase starts with serialized block height if (nVersion >= 2) { // if 750 of the last 1,000 blocks are version 2 or greater (51/100 if testnet): if ((!fTestNet && CBlockIndex::IsSuperMajority(2, pindexPrev, 750, 1000)) || (fTestNet && CBlockIndex::IsSuperMajority(2, pindexPrev, 51, 100))) { CScript expect = CScript() << nHeight; if (vtx[0].vin[0].scriptSig.size() < expect.size() || !std::equal(expect.begin(), expect.end(), vtx[0].vin[0].scriptSig.begin())) return state.DoS(100, error("AcceptBlock() : block height mismatch in coinbase")); } } } // Write block to history file try { unsigned int nBlockSize = ::GetSerializeSize(*this, SER_DISK, CLIENT_VERSION); CDiskBlockPos blockPos; if (dbp != NULL) blockPos = *dbp; if (!FindBlockPos(state, blockPos, nBlockSize+8, nHeight, nTime, dbp != NULL)) return error("AcceptBlock() : FindBlockPos failed"); if (dbp == NULL) if (!WriteToDisk(blockPos)) return state.Abort(_("Failed to write block")); if (!AddToBlockIndex(state, blockPos)) return error("AcceptBlock() : AddToBlockIndex failed"); } catch(std::runtime_error &e) { return state.Abort(_("System error: ") + e.what()); } // Relay inventory, but don't relay old inventory during initial block download int nBlockEstimate = Checkpoints::GetTotalBlocksEstimate(); if (hashBestChain == hash) { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) if (nBestHeight > (pnode->nStartingHeight != -1 ? pnode->nStartingHeight - 2000 : nBlockEstimate)) pnode->PushInventory(CInv(MSG_BLOCK, hash)); } return true; } bool CBlockIndex::IsSuperMajority(int minVersion, const CBlockIndex* pstart, unsigned int nRequired, unsigned int nToCheck) { unsigned int nFound = 0; for (unsigned int i = 0; i < nToCheck && nFound < nRequired && pstart != NULL; i++) { if (pstart->nVersion >= minVersion) ++nFound; pstart = pstart->pprev; } return (nFound >= nRequired); } bool ProcessBlock(CValidationState &state, CNode* pfrom, CBlock* pblock, CDiskBlockPos *dbp) { // Check for duplicate uint256 hash = pblock->GetHash(); if (mapBlockIndex.count(hash)) return state.Invalid(error("ProcessBlock() : already have block %d %s", mapBlockIndex[hash]->nHeight, hash.ToString().c_str())); if (mapOrphanBlocks.count(hash)) return state.Invalid(error("ProcessBlock() : already have block (orphan) %s", hash.ToString().c_str())); // Preliminary checks if (!pblock->CheckBlock(state)) return error("ProcessBlock() : CheckBlock FAILED"); CBlockIndex* pcheckpoint = Checkpoints::GetLastCheckpoint(mapBlockIndex); if (pcheckpoint && pblock->hashPrevBlock != hashBestChain) { // Extra checks to prevent "fill up memory by spamming with bogus blocks" int64 deltaTime = pblock->GetBlockTime() - pcheckpoint->nTime; if (deltaTime < 0) { return state.DoS(100, error("ProcessBlock() : block with timestamp before last checkpoint")); } CBigNum bnNewBlock; bnNewBlock.SetCompact(pblock->nBits); CBigNum bnRequired; bnRequired.SetCompact(ComputeMinWork(pcheckpoint->nBits, deltaTime)); if (bnNewBlock > bnRequired) { return state.DoS(100, error("ProcessBlock() : block with too little proof-of-work")); } } // If we don't already have its previous block, shunt it off to holding area until we get it if (pblock->hashPrevBlock != 0 && !mapBlockIndex.count(pblock->hashPrevBlock)) { printf("ProcessBlock: ORPHAN BLOCK, prev=%s\n", pblock->hashPrevBlock.ToString().c_str()); // Accept orphans as long as there is a node to request its parents from if (pfrom) { CBlock* pblock2 = new CBlock(*pblock); mapOrphanBlocks.insert(make_pair(hash, pblock2)); mapOrphanBlocksByPrev.insert(make_pair(pblock2->hashPrevBlock, pblock2)); // Ask this guy to fill in what we're missing pfrom->PushGetBlocks(pindexBest, GetOrphanRoot(pblock2)); } return true; } // Store to disk if (!pblock->AcceptBlock(state, dbp)) return error("ProcessBlock() : AcceptBlock FAILED"); // Recursively process any orphan blocks that depended on this one vector<uint256> vWorkQueue; vWorkQueue.push_back(hash); for (unsigned int i = 0; i < vWorkQueue.size(); i++) { uint256 hashPrev = vWorkQueue[i]; for (multimap<uint256, CBlock*>::iterator mi = mapOrphanBlocksByPrev.lower_bound(hashPrev); mi != mapOrphanBlocksByPrev.upper_bound(hashPrev); ++mi) { CBlock* pblockOrphan = (*mi).second; // Use a dummy CValidationState so someone can't setup nodes to counter-DoS based on orphan resolution (that is, feeding people an invalid block based on LegitBlockX in order to get anyone relaying LegitBlockX banned) CValidationState stateDummy; if (pblockOrphan->AcceptBlock(stateDummy)) vWorkQueue.push_back(pblockOrphan->GetHash()); mapOrphanBlocks.erase(pblockOrphan->GetHash()); delete pblockOrphan; } mapOrphanBlocksByPrev.erase(hashPrev); } printf("ProcessBlock: ACCEPTED\n"); return true; } CMerkleBlock::CMerkleBlock(const CBlock& block, CBloomFilter& filter) { header = block.GetBlockHeader(); vector<bool> vMatch; vector<uint256> vHashes; vMatch.reserve(block.vtx.size()); vHashes.reserve(block.vtx.size()); for (unsigned int i = 0; i < block.vtx.size(); i++) { uint256 hash = block.vtx[i].GetHash(); if (filter.IsRelevantAndUpdate(block.vtx[i], hash)) { vMatch.push_back(true); vMatchedTxn.push_back(make_pair(i, hash)); } else vMatch.push_back(false); vHashes.push_back(hash); } txn = CPartialMerkleTree(vHashes, vMatch); } uint256 CPartialMerkleTree::CalcHash(int height, unsigned int pos, const std::vector<uint256> &vTxid) { if (height == 0) { // hash at height 0 is the txids themself return vTxid[pos]; } else { // calculate left hash uint256 left = CalcHash(height-1, pos*2, vTxid), right; // calculate right hash if not beyong the end of the array - copy left hash otherwise1 if (pos*2+1 < CalcTreeWidth(height-1)) right = CalcHash(height-1, pos*2+1, vTxid); else right = left; // combine subhashes return Hash(BEGIN(left), END(left), BEGIN(right), END(right)); } } void CPartialMerkleTree::TraverseAndBuild(int height, unsigned int pos, const std::vector<uint256> &vTxid, const std::vector<bool> &vMatch) { // determine whether this node is the parent of at least one matched txid bool fParentOfMatch = false; for (unsigned int p = pos << height; p < (pos+1) << height && p < nTransactions; p++) fParentOfMatch |= vMatch[p]; // store as flag bit vBits.push_back(fParentOfMatch); if (height==0 || !fParentOfMatch) { // if at height 0, or nothing interesting below, store hash and stop vHash.push_back(CalcHash(height, pos, vTxid)); } else { // otherwise, don't store any hash, but descend into the subtrees TraverseAndBuild(height-1, pos*2, vTxid, vMatch); if (pos*2+1 < CalcTreeWidth(height-1)) TraverseAndBuild(height-1, pos*2+1, vTxid, vMatch); } } uint256 CPartialMerkleTree::TraverseAndExtract(int height, unsigned int pos, unsigned int &nBitsUsed, unsigned int &nHashUsed, std::vector<uint256> &vMatch) { if (nBitsUsed >= vBits.size()) { // overflowed the bits array - failure fBad = true; return 0; } bool fParentOfMatch = vBits[nBitsUsed++]; if (height==0 || !fParentOfMatch) { // if at height 0, or nothing interesting below, use stored hash and do not descend if (nHashUsed >= vHash.size()) { // overflowed the hash array - failure fBad = true; return 0; } const uint256 &hash = vHash[nHashUsed++]; if (height==0 && fParentOfMatch) // in case of height 0, we have a matched txid vMatch.push_back(hash); return hash; } else { // otherwise, descend into the subtrees to extract matched txids and hashes uint256 left = TraverseAndExtract(height-1, pos*2, nBitsUsed, nHashUsed, vMatch), right; if (pos*2+1 < CalcTreeWidth(height-1)) right = TraverseAndExtract(height-1, pos*2+1, nBitsUsed, nHashUsed, vMatch); else right = left; // and combine them before returning return Hash(BEGIN(left), END(left), BEGIN(right), END(right)); } } CPartialMerkleTree::CPartialMerkleTree(const std::vector<uint256> &vTxid, const std::vector<bool> &vMatch) : nTransactions(vTxid.size()), fBad(false) { // reset state vBits.clear(); vHash.clear(); // calculate height of tree int nHeight = 0; while (CalcTreeWidth(nHeight) > 1) nHeight++; // traverse the partial tree TraverseAndBuild(nHeight, 0, vTxid, vMatch); } CPartialMerkleTree::CPartialMerkleTree() : nTransactions(0), fBad(true) {} uint256 CPartialMerkleTree::ExtractMatches(std::vector<uint256> &vMatch) { vMatch.clear(); // An empty set will not work if (nTransactions == 0) return 0; // check for excessively high numbers of transactions if (nTransactions > MAX_BLOCK_SIZE / 60) // 60 is the lower bound for the size of a serialized CTransaction return 0; // there can never be more hashes provided than one for every txid if (vHash.size() > nTransactions) return 0; // there must be at least one bit per node in the partial tree, and at least one node per hash if (vBits.size() < vHash.size()) return 0; // calculate height of tree int nHeight = 0; while (CalcTreeWidth(nHeight) > 1) nHeight++; // traverse the partial tree unsigned int nBitsUsed = 0, nHashUsed = 0; uint256 hashMerkleRoot = TraverseAndExtract(nHeight, 0, nBitsUsed, nHashUsed, vMatch); // verify that no problems occured during the tree traversal if (fBad) return 0; // verify that all bits were consumed (except for the padding caused by serializing it as a byte sequence) if ((nBitsUsed+7)/8 != (vBits.size()+7)/8) return 0; // verify that all hashes were consumed if (nHashUsed != vHash.size()) return 0; return hashMerkleRoot; } bool AbortNode(const std::string &strMessage) { strMiscWarning = strMessage; printf("*** %s\n", strMessage.c_str()); uiInterface.ThreadSafeMessageBox(strMessage, "", CClientUIInterface::MSG_ERROR); StartShutdown(); return false; } bool CheckDiskSpace(uint64 nAdditionalBytes) { uint64 nFreeBytesAvailable = filesystem::space(GetDataDir()).available; // Check for nMinDiskSpace bytes (currently 50MB) if (nFreeBytesAvailable < nMinDiskSpace + nAdditionalBytes) return AbortNode(_("Error: Disk space is low!")); return true; } CCriticalSection cs_LastBlockFile; CBlockFileInfo infoLastBlockFile; int nLastBlockFile = 0; FILE* OpenDiskFile(const CDiskBlockPos &pos, const char *prefix, bool fReadOnly) { if (pos.IsNull()) return NULL; boost::filesystem::path path = GetDataDir() / "blocks" / strprintf("%s%05u.dat", prefix, pos.nFile); boost::filesystem::create_directories(path.parent_path()); FILE* file = fopen(path.string().c_str(), "rb+"); if (!file && !fReadOnly) file = fopen(path.string().c_str(), "wb+"); if (!file) { printf("Unable to open file %s\n", path.string().c_str()); return NULL; } if (pos.nPos) { if (fseek(file, pos.nPos, SEEK_SET)) { printf("Unable to seek to position %u of %s\n", pos.nPos, path.string().c_str()); fclose(file); return NULL; } } return file; } FILE* OpenBlockFile(const CDiskBlockPos &pos, bool fReadOnly) { return OpenDiskFile(pos, "blk", fReadOnly); } FILE* OpenUndoFile(const CDiskBlockPos &pos, bool fReadOnly) { return OpenDiskFile(pos, "rev", fReadOnly); } CBlockIndex * InsertBlockIndex(uint256 hash) { if (hash == 0) return NULL; // Return existing map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) return (*mi).second; // Create new CBlockIndex* pindexNew = new CBlockIndex(); if (!pindexNew) throw runtime_error("LoadBlockIndex() : new CBlockIndex failed"); mi = mapBlockIndex.insert(make_pair(hash, pindexNew)).first; pindexNew->phashBlock = &((*mi).first); return pindexNew; } bool static LoadBlockIndexDB() { if (!pblocktree->LoadBlockIndexGuts()) return false; boost::this_thread::interruption_point(); // Calculate nChainWork vector<pair<int, CBlockIndex*> > vSortedByHeight; vSortedByHeight.reserve(mapBlockIndex.size()); BOOST_FOREACH(const PAIRTYPE(uint256, CBlockIndex*)& item, mapBlockIndex) { CBlockIndex* pindex = item.second; vSortedByHeight.push_back(make_pair(pindex->nHeight, pindex)); } sort(vSortedByHeight.begin(), vSortedByHeight.end()); BOOST_FOREACH(const PAIRTYPE(int, CBlockIndex*)& item, vSortedByHeight) { CBlockIndex* pindex = item.second; pindex->nChainWork = (pindex->pprev ? pindex->pprev->nChainWork : 0) + pindex->GetBlockWork().getuint256(); pindex->nChainTx = (pindex->pprev ? pindex->pprev->nChainTx : 0) + pindex->nTx; if ((pindex->nStatus & BLOCK_VALID_MASK) >= BLOCK_VALID_TRANSACTIONS && !(pindex->nStatus & BLOCK_FAILED_MASK)) setBlockIndexValid.insert(pindex); } // Load block file info pblocktree->ReadLastBlockFile(nLastBlockFile); printf("LoadBlockIndexDB(): last block file = %i\n", nLastBlockFile); if (pblocktree->ReadBlockFileInfo(nLastBlockFile, infoLastBlockFile)) printf("LoadBlockIndexDB(): last block file info: %s\n", infoLastBlockFile.ToString().c_str()); // Load nBestInvalidWork, OK if it doesn't exist CBigNum bnBestInvalidWork; pblocktree->ReadBestInvalidWork(bnBestInvalidWork); nBestInvalidWork = bnBestInvalidWork.getuint256(); // Check whether we need to continue reindexing bool fReindexing = false; pblocktree->ReadReindexing(fReindexing); fReindex |= fReindexing; // Check whether we have a transaction index pblocktree->ReadFlag("txindex", fTxIndex); printf("LoadBlockIndexDB(): transaction index %s\n", fTxIndex ? "enabled" : "disabled"); // Load hashBestChain pointer to end of best chain pindexBest = pcoinsTip->GetBestBlock(); if (pindexBest == NULL) return true; hashBestChain = pindexBest->GetBlockHash(); nBestHeight = pindexBest->nHeight; nBestChainWork = pindexBest->nChainWork; // set 'next' pointers in best chain CBlockIndex *pindex = pindexBest; while(pindex != NULL && pindex->pprev != NULL) { CBlockIndex *pindexPrev = pindex->pprev; pindexPrev->pnext = pindex; pindex = pindexPrev; } printf("LoadBlockIndexDB(): hashBestChain=%s height=%d date=%s\n", hashBestChain.ToString().c_str(), nBestHeight, DateTimeStrFormat("%Y-%m-%d %H:%M:%S", pindexBest->GetBlockTime()).c_str()); return true; } bool VerifyDB() { if (pindexBest == NULL || pindexBest->pprev == NULL) return true; // Verify blocks in the best chain int nCheckLevel = GetArg("-checklevel", 3); int nCheckDepth = GetArg( "-checkblocks", 288); if (nCheckDepth == 0) nCheckDepth = 1000000000; // suffices until the year 19000 if (nCheckDepth > nBestHeight) nCheckDepth = nBestHeight; nCheckLevel = std::max(0, std::min(4, nCheckLevel)); printf("Verifying last %i blocks at level %i\n", nCheckDepth, nCheckLevel); CCoinsViewCache coins(*pcoinsTip, true); CBlockIndex* pindexState = pindexBest; CBlockIndex* pindexFailure = NULL; int nGoodTransactions = 0; CValidationState state; for (CBlockIndex* pindex = pindexBest; pindex && pindex->pprev; pindex = pindex->pprev) { boost::this_thread::interruption_point(); if (pindex->nHeight < nBestHeight-nCheckDepth) break; CBlock block; // check level 0: read from disk if (!block.ReadFromDisk(pindex)) return error("VerifyDB() : *** block.ReadFromDisk failed at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); // check level 1: verify block validity if (nCheckLevel >= 1 && !block.CheckBlock(state)) return error("VerifyDB() : *** found bad block at %d, hash=%s\n", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); // check level 2: verify undo validity if (nCheckLevel >= 2 && pindex) { CBlockUndo undo; CDiskBlockPos pos = pindex->GetUndoPos(); if (!pos.IsNull()) { if (!undo.ReadFromDisk(pos, pindex->pprev->GetBlockHash())) return error("VerifyDB() : *** found bad undo data at %d, hash=%s\n", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); } } // check level 3: check for inconsistencies during memory-only disconnect of tip blocks if (nCheckLevel >= 3 && pindex == pindexState && (coins.GetCacheSize() + pcoinsTip->GetCacheSize()) <= 2*nCoinCacheSize + 32000) { bool fClean = true; if (!block.DisconnectBlock(state, pindex, coins, &fClean)) return error("VerifyDB() : *** irrecoverable inconsistency in block data at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); pindexState = pindex->pprev; if (!fClean) { nGoodTransactions = 0; pindexFailure = pindex; } else nGoodTransactions += block.vtx.size(); } } if (pindexFailure) return error("VerifyDB() : *** coin database inconsistencies found (last %i blocks, %i good transactions before that)\n", pindexBest->nHeight - pindexFailure->nHeight + 1, nGoodTransactions); // check level 4: try reconnecting blocks if (nCheckLevel >= 4) { CBlockIndex *pindex = pindexState; while (pindex != pindexBest) { boost::this_thread::interruption_point(); pindex = pindex->pnext; CBlock block; if (!block.ReadFromDisk(pindex)) return error("VerifyDB() : *** block.ReadFromDisk failed at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); if (!block.ConnectBlock(state, pindex, coins)) return error("VerifyDB() : *** found unconnectable block at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); } } printf("No coin database inconsistencies in last %i blocks (%i transactions)\n", pindexBest->nHeight - pindexState->nHeight, nGoodTransactions); return true; } void UnloadBlockIndex() { mapBlockIndex.clear(); setBlockIndexValid.clear(); pindexGenesisBlock = NULL; nBestHeight = 0; nBestChainWork = 0; nBestInvalidWork = 0; hashBestChain = 0; pindexBest = NULL; } bool LoadBlockIndex() { if (fTestNet) { pchMessageStart[0] = 0x0b; pchMessageStart[1] = 0x11; pchMessageStart[2] = 0x09; pchMessageStart[3] = 0x07; hashGenesisBlock = uint256("0x000000007aeecc59aab937f8c65010b69f3e05c63b869c3d702106875804e206"); } // // Load block index from databases // if (!fReindex && !LoadBlockIndexDB()) return false; return true; } bool InitBlockIndex() { // Check whether we're already initialized if (pindexGenesisBlock != NULL) return true; // Use the provided setting for -txindex in the new database fTxIndex = GetBoolArg("-txindex", false); pblocktree->WriteFlag("txindex", fTxIndex); printf("Initializing databases...\n"); // Only add the genesis block if not reindexing (in which case we reuse the one already on disk) if (!fReindex) { // Genesis block const char* pszTimestamp = "Mtgox is down"; CTransaction txNew; txNew.vin.resize(1); txNew.vout.resize(1); txNew.vin[0].scriptSig = CScript() << 486604799 << CBigNum(4) << vector<unsigned char>((const unsigned char*)pszTimestamp, (const unsigned char*)pszTimestamp + strlen(pszTimestamp)); txNew.vout[0].nValue = 50 * COIN; txNew.vout[0].scriptPubKey = CScript() << ParseHex("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f") << OP_CHECKSIG; CBlock block; block.vtx.push_back(txNew); block.hashPrevBlock = 0; block.hashMerkleRoot = block.BuildMerkleTree(); block.nVersion = 1; block.nTime = 1393509130; block.nBits = 486604799; block.nNonce = 1223027443; if (fTestNet) { block.nTime = 1393509130; block.nNonce = 1223027443; } //// debug print uint256 hash = block.GetHash(); printf("%s\n", hash.ToString().c_str()); printf("%s\n", hashGenesisBlock.ToString().c_str()); printf("%s\n", block.hashMerkleRoot.ToString().c_str()); assert(block.hashMerkleRoot == uint256("0xa99c7c572f07503bb52506cc6bcc18a0653d78c9c34fd7b576fe093723554bac")); block.print(); assert(hash == hashGenesisBlock); // Start new block file try { unsigned int nBlockSize = ::GetSerializeSize(block, SER_DISK, CLIENT_VERSION); CDiskBlockPos blockPos; CValidationState state; if (!FindBlockPos(state, blockPos, nBlockSize+8, 0, block.nTime)) return error("LoadBlockIndex() : FindBlockPos failed"); if (!block.WriteToDisk(blockPos)) return error("LoadBlockIndex() : writing genesis block to disk failed"); if (!block.AddToBlockIndex(state, blockPos)) return error("LoadBlockIndex() : genesis block not accepted"); } catch(std::runtime_error &e) { return error("LoadBlockIndex() : failed to initialize block database: %s", e.what()); } } return true; } void PrintBlockTree() { // pre-compute tree structure map<CBlockIndex*, vector<CBlockIndex*> > mapNext; for (map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.begin(); mi != mapBlockIndex.end(); ++mi) { CBlockIndex* pindex = (*mi).second; mapNext[pindex->pprev].push_back(pindex); // test //while (rand() % 3 == 0) // mapNext[pindex->pprev].push_back(pindex); } vector<pair<int, CBlockIndex*> > vStack; vStack.push_back(make_pair(0, pindexGenesisBlock)); int nPrevCol = 0; while (!vStack.empty()) { int nCol = vStack.back().first; CBlockIndex* pindex = vStack.back().second; vStack.pop_back(); // print split or gap if (nCol > nPrevCol) { for (int i = 0; i < nCol-1; i++) printf("| "); printf("|\\\n"); } else if (nCol < nPrevCol) { for (int i = 0; i < nCol; i++) printf("| "); printf("|\n"); } nPrevCol = nCol; // print columns for (int i = 0; i < nCol; i++) printf("| "); // print item CBlock block; block.ReadFromDisk(pindex); printf("%d (blk%05u.dat:0x%x) %s tx %"PRIszu"", pindex->nHeight, pindex->GetBlockPos().nFile, pindex->GetBlockPos().nPos, DateTimeStrFormat("%Y-%m-%d %H:%M:%S", block.GetBlockTime()).c_str(), block.vtx.size()); PrintWallets(block); // put the main time-chain first vector<CBlockIndex*>& vNext = mapNext[pindex]; for (unsigned int i = 0; i < vNext.size(); i++) { if (vNext[i]->pnext) { swap(vNext[0], vNext[i]); break; } } // iterate children for (unsigned int i = 0; i < vNext.size(); i++) vStack.push_back(make_pair(nCol+i, vNext[i])); } } bool LoadExternalBlockFile(FILE* fileIn, CDiskBlockPos *dbp) { int64 nStart = GetTimeMillis(); int nLoaded = 0; try { CBufferedFile blkdat(fileIn, 2*MAX_BLOCK_SIZE, MAX_BLOCK_SIZE+8, SER_DISK, CLIENT_VERSION); uint64 nStartByte = 0; if (dbp) { // (try to) skip already indexed part CBlockFileInfo info; if (pblocktree->ReadBlockFileInfo(dbp->nFile, info)) { nStartByte = info.nSize; blkdat.Seek(info.nSize); } } uint64 nRewind = blkdat.GetPos(); while (blkdat.good() && !blkdat.eof()) { boost::this_thread::interruption_point(); blkdat.SetPos(nRewind); nRewind++; // start one byte further next time, in case of failure blkdat.SetLimit(); // remove former limit unsigned int nSize = 0; try { // locate a header unsigned char buf[4]; blkdat.FindByte(pchMessageStart[0]); nRewind = blkdat.GetPos()+1; blkdat >> FLATDATA(buf); if (memcmp(buf, pchMessageStart, 4)) continue; // read size blkdat >> nSize; if (nSize < 80 || nSize > MAX_BLOCK_SIZE) continue; } catch (std::exception &e) { // no valid block header found; don't complain break; } try { // read block uint64 nBlockPos = blkdat.GetPos(); blkdat.SetLimit(nBlockPos + nSize); CBlock block; blkdat >> block; nRewind = blkdat.GetPos(); // process block if (nBlockPos >= nStartByte) { LOCK(cs_main); if (dbp) dbp->nPos = nBlockPos; CValidationState state; if (ProcessBlock(state, NULL, &block, dbp)) nLoaded++; if (state.IsError()) break; } } catch (std::exception &e) { printf("%s() : Deserialize or I/O error caught during load\n", __PRETTY_FUNCTION__); } } fclose(fileIn); } catch(std::runtime_error &e) { AbortNode(_("Error: system error: ") + e.what()); } if (nLoaded > 0) printf("Loaded %i blocks from external file in %"PRI64d"ms\n", nLoaded, GetTimeMillis() - nStart); return nLoaded > 0; } ////////////////////////////////////////////////////////////////////////////// // // CAlert // extern map<uint256, CAlert> mapAlerts; extern CCriticalSection cs_mapAlerts; string GetWarnings(string strFor) { int nPriority = 0; string strStatusBar; string strRPC; if (GetBoolArg("-testsafemode")) strRPC = "test"; if (!CLIENT_VERSION_IS_RELEASE) strStatusBar = _("This is a pre-release test build - use at your own risk - do not use for mining or merchant applications"); // Misc warnings like out of disk space and clock is wrong if (strMiscWarning != "") { nPriority = 1000; strStatusBar = strMiscWarning; } // Longer invalid proof-of-work chain if (pindexBest && nBestInvalidWork > nBestChainWork + (pindexBest->GetBlockWork() * 6).getuint256()) { nPriority = 2000; strStatusBar = strRPC = _("Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade."); } // Alerts { LOCK(cs_mapAlerts); BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts) { const CAlert& alert = item.second; if (alert.AppliesToMe() && alert.nPriority > nPriority) { nPriority = alert.nPriority; strStatusBar = alert.strStatusBar; } } } if (strFor == "statusbar") return strStatusBar; else if (strFor == "rpc") return strRPC; assert(!"GetWarnings() : invalid parameter"); return "error"; } ////////////////////////////////////////////////////////////////////////////// // // Messages // bool static AlreadyHave(const CInv& inv) { switch (inv.type) { case MSG_TX: { bool txInMap = false; { LOCK(mempool.cs); txInMap = mempool.exists(inv.hash); } return txInMap || mapOrphanTransactions.count(inv.hash) || pcoinsTip->HaveCoins(inv.hash); } case MSG_BLOCK: return mapBlockIndex.count(inv.hash) || mapOrphanBlocks.count(inv.hash); } // Don't know what it is, just say we already got one return true; } // The message start string is designed to be unlikely to occur in normal data. // The characters are rarely used upper ASCII, not valid as UTF-8, and produce // a large 4-byte int at any alignment. unsigned char pchMessageStart[4] = { 0xf9, 0xbe, 0xb4, 0xd9 }; void static ProcessGetData(CNode* pfrom) { std::deque<CInv>::iterator it = pfrom->vRecvGetData.begin(); vector<CInv> vNotFound; while (it != pfrom->vRecvGetData.end()) { // Don't bother if send buffer is too full to respond anyway if (pfrom->nSendSize >= SendBufferSize()) break; const CInv &inv = *it; { boost::this_thread::interruption_point(); it++; if (inv.type == MSG_BLOCK || inv.type == MSG_FILTERED_BLOCK) { // Send block from disk map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(inv.hash); if (mi != mapBlockIndex.end()) { CBlock block; block.ReadFromDisk((*mi).second); if (inv.type == MSG_BLOCK) pfrom->PushMessage("block", block); else // MSG_FILTERED_BLOCK) { LOCK(pfrom->cs_filter); if (pfrom->pfilter) { CMerkleBlock merkleBlock(block, *pfrom->pfilter); pfrom->PushMessage("merkleblock", merkleBlock); // CMerkleBlock just contains hashes, so also push any transactions in the block the client did not see // This avoids hurting performance by pointlessly requiring a round-trip // Note that there is currently no way for a node to request any single transactions we didnt send here - // they must either disconnect and retry or request the full block. // Thus, the protocol spec specified allows for us to provide duplicate txn here, // however we MUST always provide at least what the remote peer needs typedef std::pair<unsigned int, uint256> PairType; BOOST_FOREACH(PairType& pair, merkleBlock.vMatchedTxn) if (!pfrom->setInventoryKnown.count(CInv(MSG_TX, pair.second))) pfrom->PushMessage("tx", block.vtx[pair.first]); } // else // no response } // Trigger them to send a getblocks request for the next batch of inventory if (inv.hash == pfrom->hashContinue) { // Bypass PushInventory, this must send even if redundant, // and we want it right after the last block so they don't // wait for other stuff first. vector<CInv> vInv; vInv.push_back(CInv(MSG_BLOCK, hashBestChain)); pfrom->PushMessage("inv", vInv); pfrom->hashContinue = 0; } } } else if (inv.IsKnownType()) { // Send stream from relay memory bool pushed = false; { LOCK(cs_mapRelay); map<CInv, CDataStream>::iterator mi = mapRelay.find(inv); if (mi != mapRelay.end()) { pfrom->PushMessage(inv.GetCommand(), (*mi).second); pushed = true; } } if (!pushed && inv.type == MSG_TX) { LOCK(mempool.cs); if (mempool.exists(inv.hash)) { CTransaction tx = mempool.lookup(inv.hash); CDataStream ss(SER_NETWORK, PROTOCOL_VERSION); ss.reserve(1000); ss << tx; pfrom->PushMessage("tx", ss); pushed = true; } } if (!pushed) { vNotFound.push_back(inv); } } // Track requests for our stuff. Inventory(inv.hash); if (inv.type == MSG_BLOCK || inv.type == MSG_FILTERED_BLOCK) break; } } pfrom->vRecvGetData.erase(pfrom->vRecvGetData.begin(), it); if (!vNotFound.empty()) { // Let the peer know that we didn't find what it asked for, so it doesn't // have to wait around forever. Currently only SPV clients actually care // about this message: it's needed when they are recursively walking the // dependencies of relevant unconfirmed transactions. SPV clients want to // do that because they want to know about (and store and rebroadcast and // risk analyze) the dependencies of transactions relevant to them, without // having to download the entire memory pool. pfrom->PushMessage("notfound", vNotFound); } } bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv) { RandAddSeedPerfmon(); if (fDebug) printf("received: %s (%"PRIszu" bytes)\n", strCommand.c_str(), vRecv.size()); if (mapArgs.count("-dropmessagestest") && GetRand(atoi(mapArgs["-dropmessagestest"])) == 0) { printf("dropmessagestest DROPPING RECV MESSAGE\n"); return true; } if (strCommand == "version") { // Each connection can only send one version message if (pfrom->nVersion != 0) { pfrom->Misbehaving(1); return false; } int64 nTime; CAddress addrMe; CAddress addrFrom; uint64 nNonce = 1; vRecv >> pfrom->nVersion >> pfrom->nServices >> nTime >> addrMe; if (pfrom->nVersion < MIN_PROTO_VERSION) { // Since February 20, 2012, the protocol is initiated at version 209, // and earlier versions are no longer supported printf("partner %s using obsolete version %i; disconnecting\n", pfrom->addr.ToString().c_str(), pfrom->nVersion); pfrom->fDisconnect = true; return false; } if (pfrom->nVersion == 10300) pfrom->nVersion = 300; if (!vRecv.empty()) vRecv >> addrFrom >> nNonce; if (!vRecv.empty()) { vRecv >> pfrom->strSubVer; pfrom->cleanSubVer = SanitizeString(pfrom->strSubVer); } if (!vRecv.empty()) vRecv >> pfrom->nStartingHeight; if (!vRecv.empty()) vRecv >> pfrom->fRelayTxes; // set to true after we get the first filter* message else pfrom->fRelayTxes = true; if (pfrom->fInbound && addrMe.IsRoutable()) { pfrom->addrLocal = addrMe; SeenLocal(addrMe); } // Disconnect if we connected to ourself if (nNonce == nLocalHostNonce && nNonce > 1) { printf("connected to self at %s, disconnecting\n", pfrom->addr.ToString().c_str()); pfrom->fDisconnect = true; return true; } // Be shy and don't send version until we hear if (pfrom->fInbound) pfrom->PushVersion(); pfrom->fClient = !(pfrom->nServices & NODE_NETWORK); AddTimeData(pfrom->addr, nTime); // Change version pfrom->PushMessage("verack"); pfrom->ssSend.SetVersion(min(pfrom->nVersion, PROTOCOL_VERSION)); if (!pfrom->fInbound) { // Advertise our address if (!fNoListen && !IsInitialBlockDownload()) { CAddress addr = GetLocalAddress(&pfrom->addr); if (addr.IsRoutable()) pfrom->PushAddress(addr); } // Get recent addresses if (pfrom->fOneShot || pfrom->nVersion >= CADDR_TIME_VERSION || addrman.size() < 1000) { pfrom->PushMessage("getaddr"); pfrom->fGetAddr = true; } addrman.Good(pfrom->addr); } else { if (((CNetAddr)pfrom->addr) == (CNetAddr)addrFrom) { addrman.Add(addrFrom, addrFrom); addrman.Good(addrFrom); } } // Relay alerts { LOCK(cs_mapAlerts); BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts) item.second.RelayTo(pfrom); } pfrom->fSuccessfullyConnected = true; printf("receive version message: %s: version %d, blocks=%d, us=%s, them=%s, peer=%s\n", pfrom->cleanSubVer.c_str(), pfrom->nVersion, pfrom->nStartingHeight, addrMe.ToString().c_str(), addrFrom.ToString().c_str(), pfrom->addr.ToString().c_str()); cPeerBlockCounts.input(pfrom->nStartingHeight); } else if (pfrom->nVersion == 0) { // Must have a version message before anything else pfrom->Misbehaving(1); return false; } else if (strCommand == "verack") { pfrom->SetRecvVersion(min(pfrom->nVersion, PROTOCOL_VERSION)); } else if (strCommand == "addr") { vector<CAddress> vAddr; vRecv >> vAddr; // Don't want addr from older versions unless seeding if (pfrom->nVersion < CADDR_TIME_VERSION && addrman.size() > 1000) return true; if (vAddr.size() > 1000) { pfrom->Misbehaving(20); return error("message addr size() = %"PRIszu"", vAddr.size()); } // Store the new addresses vector<CAddress> vAddrOk; int64 nNow = GetAdjustedTime(); int64 nSince = nNow - 10 * 60; BOOST_FOREACH(CAddress& addr, vAddr) { boost::this_thread::interruption_point(); if (addr.nTime <= 100000000 || addr.nTime > nNow + 10 * 60) addr.nTime = nNow - 5 * 24 * 60 * 60; pfrom->AddAddressKnown(addr); bool fReachable = IsReachable(addr); if (addr.nTime > nSince && !pfrom->fGetAddr && vAddr.size() <= 10 && addr.IsRoutable()) { // Relay to a limited number of other nodes { LOCK(cs_vNodes); // Use deterministic randomness to send to the same nodes for 24 hours // at a time so the setAddrKnowns of the chosen nodes prevent repeats static uint256 hashSalt; if (hashSalt == 0) hashSalt = GetRandHash(); uint64 hashAddr = addr.GetHash(); uint256 hashRand = hashSalt ^ (hashAddr<<32) ^ ((GetTime()+hashAddr)/(24*60*60)); hashRand = Hash(BEGIN(hashRand), END(hashRand)); multimap<uint256, CNode*> mapMix; BOOST_FOREACH(CNode* pnode, vNodes) { if (pnode->nVersion < CADDR_TIME_VERSION) continue; unsigned int nPointer; memcpy(&nPointer, &pnode, sizeof(nPointer)); uint256 hashKey = hashRand ^ nPointer; hashKey = Hash(BEGIN(hashKey), END(hashKey)); mapMix.insert(make_pair(hashKey, pnode)); } int nRelayNodes = fReachable ? 2 : 1; // limited relaying of addresses outside our network(s) for (multimap<uint256, CNode*>::iterator mi = mapMix.begin(); mi != mapMix.end() && nRelayNodes-- > 0; ++mi) ((*mi).second)->PushAddress(addr); } } // Do not store addresses outside our network if (fReachable) vAddrOk.push_back(addr); } addrman.Add(vAddrOk, pfrom->addr, 2 * 60 * 60); if (vAddr.size() < 1000) pfrom->fGetAddr = false; if (pfrom->fOneShot) pfrom->fDisconnect = true; } else if (strCommand == "inv") { vector<CInv> vInv; vRecv >> vInv; if (vInv.size() > MAX_INV_SZ) { pfrom->Misbehaving(20); return error("message inv size() = %"PRIszu"", vInv.size()); } // find last block in inv vector unsigned int nLastBlock = (unsigned int)(-1); for (unsigned int nInv = 0; nInv < vInv.size(); nInv++) { if (vInv[vInv.size() - 1 - nInv].type == MSG_BLOCK) { nLastBlock = vInv.size() - 1 - nInv; break; } } for (unsigned int nInv = 0; nInv < vInv.size(); nInv++) { const CInv &inv = vInv[nInv]; boost::this_thread::interruption_point(); pfrom->AddInventoryKnown(inv); bool fAlreadyHave = AlreadyHave(inv); if (fDebug) printf(" got inventory: %s %s\n", inv.ToString().c_str(), fAlreadyHave ? "have" : "new"); if (!fAlreadyHave) { if (!fImporting && !fReindex) pfrom->AskFor(inv); } else if (inv.type == MSG_BLOCK && mapOrphanBlocks.count(inv.hash)) { pfrom->PushGetBlocks(pindexBest, GetOrphanRoot(mapOrphanBlocks[inv.hash])); } else if (nInv == nLastBlock) { // In case we are on a very long side-chain, it is possible that we already have // the last block in an inv bundle sent in response to getblocks. Try to detect // this situation and push another getblocks to continue. pfrom->PushGetBlocks(mapBlockIndex[inv.hash], uint256(0)); if (fDebug) printf("force request: %s\n", inv.ToString().c_str()); } // Track requests for our stuff Inventory(inv.hash); } } else if (strCommand == "getdata") { vector<CInv> vInv; vRecv >> vInv; if (vInv.size() > MAX_INV_SZ) { pfrom->Misbehaving(20); return error("message getdata size() = %"PRIszu"", vInv.size()); } if (fDebugNet || (vInv.size() != 1)) printf("received getdata (%"PRIszu" invsz)\n", vInv.size()); if ((fDebugNet && vInv.size() > 0) || (vInv.size() == 1)) printf("received getdata for: %s\n", vInv[0].ToString().c_str()); pfrom->vRecvGetData.insert(pfrom->vRecvGetData.end(), vInv.begin(), vInv.end()); ProcessGetData(pfrom); } else if (strCommand == "getblocks") { CBlockLocator locator; uint256 hashStop; vRecv >> locator >> hashStop; // Find the last block the caller has in the main chain CBlockIndex* pindex = locator.GetBlockIndex(); // Send the rest of the chain if (pindex) pindex = pindex->pnext; int nLimit = 500; printf("getblocks %d to %s limit %d\n", (pindex ? pindex->nHeight : -1), hashStop.ToString().c_str(), nLimit); for (; pindex; pindex = pindex->pnext) { if (pindex->GetBlockHash() == hashStop) { printf(" getblocks stopping at %d %s\n", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); break; } pfrom->PushInventory(CInv(MSG_BLOCK, pindex->GetBlockHash())); if (--nLimit <= 0) { // When this block is requested, we'll send an inv that'll make them // getblocks the next batch of inventory. printf(" getblocks stopping at limit %d %s\n", pindex->nHeight, pindex->GetBlockHash().ToString().c_str()); pfrom->hashContinue = pindex->GetBlockHash(); break; } } } else if (strCommand == "getheaders") { CBlockLocator locator; uint256 hashStop; vRecv >> locator >> hashStop; CBlockIndex* pindex = NULL; if (locator.IsNull()) { // If locator is null, return the hashStop block map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashStop); if (mi == mapBlockIndex.end()) return true; pindex = (*mi).second; } else { // Find the last block the caller has in the main chain pindex = locator.GetBlockIndex(); if (pindex) pindex = pindex->pnext; } // we must use CBlocks, as CBlockHeaders won't include the 0x00 nTx count at the end vector<CBlock> vHeaders; int nLimit = 2000; printf("getheaders %d to %s\n", (pindex ? pindex->nHeight : -1), hashStop.ToString().c_str()); for (; pindex; pindex = pindex->pnext) { vHeaders.push_back(pindex->GetBlockHeader()); if (--nLimit <= 0 || pindex->GetBlockHash() == hashStop) break; } pfrom->PushMessage("headers", vHeaders); } else if (strCommand == "tx") { vector<uint256> vWorkQueue; vector<uint256> vEraseQueue; CDataStream vMsg(vRecv); CTransaction tx; vRecv >> tx; CInv inv(MSG_TX, tx.GetHash()); pfrom->AddInventoryKnown(inv); bool fMissingInputs = false; CValidationState state; if (tx.AcceptToMemoryPool(state, true, true, &fMissingInputs)) { RelayTransaction(tx, inv.hash); mapAlreadyAskedFor.erase(inv); vWorkQueue.push_back(inv.hash); vEraseQueue.push_back(inv.hash); printf("AcceptToMemoryPool: %s %s : accepted %s (poolsz %"PRIszu")\n", pfrom->addr.ToString().c_str(), pfrom->cleanSubVer.c_str(), tx.GetHash().ToString().c_str(), mempool.mapTx.size()); // Recursively process any orphan transactions that depended on this one for (unsigned int i = 0; i < vWorkQueue.size(); i++) { uint256 hashPrev = vWorkQueue[i]; for (set<uint256>::iterator mi = mapOrphanTransactionsByPrev[hashPrev].begin(); mi != mapOrphanTransactionsByPrev[hashPrev].end(); ++mi) { const uint256& orphanHash = *mi; const CTransaction& orphanTx = mapOrphanTransactions[orphanHash]; bool fMissingInputs2 = false; // Use a dummy CValidationState so someone can't setup nodes to counter-DoS based on orphan // resolution (that is, feeding people an invalid transaction based on LegitTxX in order to get // anyone relaying LegitTxX banned) CValidationState stateDummy; if (tx.AcceptToMemoryPool(stateDummy, true, true, &fMissingInputs2)) { printf(" accepted orphan tx %s\n", orphanHash.ToString().c_str()); RelayTransaction(orphanTx, orphanHash); mapAlreadyAskedFor.erase(CInv(MSG_TX, orphanHash)); vWorkQueue.push_back(orphanHash); vEraseQueue.push_back(orphanHash); } else if (!fMissingInputs2) { // invalid or too-little-fee orphan vEraseQueue.push_back(orphanHash); printf(" removed orphan tx %s\n", orphanHash.ToString().c_str()); } } } BOOST_FOREACH(uint256 hash, vEraseQueue) EraseOrphanTx(hash); } else if (fMissingInputs) { AddOrphanTx(tx); // DoS prevention: do not allow mapOrphanTransactions to grow unbounded unsigned int nEvicted = LimitOrphanTxSize(MAX_ORPHAN_TRANSACTIONS); if (nEvicted > 0) printf("mapOrphan overflow, removed %u tx\n", nEvicted); } int nDoS = 0; if (state.IsInvalid(nDoS)) { printf("%s from %s %s was not accepted into the memory pool\n", tx.GetHash().ToString().c_str(), pfrom->addr.ToString().c_str(), pfrom->cleanSubVer.c_str()); if (nDoS > 0) pfrom->Misbehaving(nDoS); } } else if (strCommand == "block" && !fImporting && !fReindex) // Ignore blocks received while importing { CBlock block; vRecv >> block; printf("received block %s\n", block.GetHash().ToString().c_str()); // block.print(); CInv inv(MSG_BLOCK, block.GetHash()); pfrom->AddInventoryKnown(inv); CValidationState state; if (ProcessBlock(state, pfrom, &block) || state.CorruptionPossible()) mapAlreadyAskedFor.erase(inv); int nDoS = 0; if (state.IsInvalid(nDoS)) if (nDoS > 0) pfrom->Misbehaving(nDoS); } else if (strCommand == "getaddr") { pfrom->vAddrToSend.clear(); vector<CAddress> vAddr = addrman.GetAddr(); BOOST_FOREACH(const CAddress &addr, vAddr) pfrom->PushAddress(addr); } else if (strCommand == "mempool") { std::vector<uint256> vtxid; LOCK2(mempool.cs, pfrom->cs_filter); mempool.queryHashes(vtxid); vector<CInv> vInv; BOOST_FOREACH(uint256& hash, vtxid) { CInv inv(MSG_TX, hash); if ((pfrom->pfilter && pfrom->pfilter->IsRelevantAndUpdate(mempool.lookup(hash), hash)) || (!pfrom->pfilter)) vInv.push_back(inv); if (vInv.size() == MAX_INV_SZ) break; } if (vInv.size() > 0) pfrom->PushMessage("inv", vInv); } else if (strCommand == "ping") { if (pfrom->nVersion > BIP0031_VERSION) { uint64 nonce = 0; vRecv >> nonce; // Echo the message back with the nonce. This allows for two useful features: // // 1) A remote node can quickly check if the connection is operational // 2) Remote nodes can measure the latency of the network thread. If this node // is overloaded it won't respond to pings quickly and the remote node can // avoid sending us more work, like chain download requests. // // The nonce stops the remote getting confused between different pings: without // it, if the remote node sends a ping once per second and this node takes 5 // seconds to respond to each, the 5th ping the remote sends would appear to // return very quickly. pfrom->PushMessage("pong", nonce); } } else if (strCommand == "alert") { CAlert alert; vRecv >> alert; uint256 alertHash = alert.GetHash(); if (pfrom->setKnown.count(alertHash) == 0) { if (alert.ProcessAlert()) { // Relay pfrom->setKnown.insert(alertHash); { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) alert.RelayTo(pnode); } } else { // Small DoS penalty so peers that send us lots of // duplicate/expired/invalid-signature/whatever alerts // eventually get banned. // This isn't a Misbehaving(100) (immediate ban) because the // peer might be an older or different implementation with // a different signature key, etc. pfrom->Misbehaving(10); } } } else if (strCommand == "filterload") { CBloomFilter filter; vRecv >> filter; if (!filter.IsWithinSizeConstraints()) // There is no excuse for sending a too-large filter pfrom->Misbehaving(100); else { LOCK(pfrom->cs_filter); delete pfrom->pfilter; pfrom->pfilter = new CBloomFilter(filter); pfrom->pfilter->UpdateEmptyFull(); } pfrom->fRelayTxes = true; } else if (strCommand == "filteradd") { vector<unsigned char> vData; vRecv >> vData; // Nodes must NEVER send a data item > 520 bytes (the max size for a script data object, // and thus, the maximum size any matched object can have) in a filteradd message if (vData.size() > MAX_SCRIPT_ELEMENT_SIZE) { pfrom->Misbehaving(100); } else { LOCK(pfrom->cs_filter); if (pfrom->pfilter) pfrom->pfilter->insert(vData); else pfrom->Misbehaving(100); } } else if (strCommand == "filterclear") { LOCK(pfrom->cs_filter); delete pfrom->pfilter; pfrom->pfilter = new CBloomFilter(); pfrom->fRelayTxes = true; } else { // Ignore unknown commands for extensibility } // Update the last seen time for this node's address if (pfrom->fNetworkNode) if (strCommand == "version" || strCommand == "addr" || strCommand == "inv" || strCommand == "getdata" || strCommand == "ping") AddressCurrentlyConnected(pfrom->addr); return true; } // requires LOCK(cs_vRecvMsg) bool ProcessMessages(CNode* pfrom) { //if (fDebug) // printf("ProcessMessages(%zu messages)\n", pfrom->vRecvMsg.size()); // // Message format // (4) message start // (12) command // (4) size // (4) checksum // (x) data // bool fOk = true; if (!pfrom->vRecvGetData.empty()) ProcessGetData(pfrom); // this maintains the order of responses if (!pfrom->vRecvGetData.empty()) return fOk; std::deque<CNetMessage>::iterator it = pfrom->vRecvMsg.begin(); while (!pfrom->fDisconnect && it != pfrom->vRecvMsg.end()) { // Don't bother if send buffer is too full to respond anyway if (pfrom->nSendSize >= SendBufferSize()) break; // get next message CNetMessage& msg = *it; //if (fDebug) // printf("ProcessMessages(message %u msgsz, %zu bytes, complete:%s)\n", // msg.hdr.nMessageSize, msg.vRecv.size(), // msg.complete() ? "Y" : "N"); // end, if an incomplete message is found if (!msg.complete()) break; // at this point, any failure means we can delete the current message it++; // Scan for message start if (memcmp(msg.hdr.pchMessageStart, pchMessageStart, sizeof(pchMessageStart)) != 0) { printf("\n\nPROCESSMESSAGE: INVALID MESSAGESTART\n\n"); fOk = false; break; } // Read header CMessageHeader& hdr = msg.hdr; if (!hdr.IsValid()) { printf("\n\nPROCESSMESSAGE: ERRORS IN HEADER %s\n\n\n", hdr.GetCommand().c_str()); continue; } string strCommand = hdr.GetCommand(); // Message size unsigned int nMessageSize = hdr.nMessageSize; // Checksum CDataStream& vRecv = msg.vRecv; uint256 hash = Hash(vRecv.begin(), vRecv.begin() + nMessageSize); unsigned int nChecksum = 0; memcpy(&nChecksum, &hash, sizeof(nChecksum)); if (nChecksum != hdr.nChecksum) { printf("ProcessMessages(%s, %u bytes) : CHECKSUM ERROR nChecksum=%08x hdr.nChecksum=%08x\n", strCommand.c_str(), nMessageSize, nChecksum, hdr.nChecksum); continue; } // Process message bool fRet = false; try { { LOCK(cs_main); fRet = ProcessMessage(pfrom, strCommand, vRecv); } boost::this_thread::interruption_point(); } catch (std::ios_base::failure& e) { if (strstr(e.what(), "end of data")) { // Allow exceptions from under-length message on vRecv printf("ProcessMessages(%s, %u bytes) : Exception '%s' caught, normally caused by a message being shorter than its stated length\n", strCommand.c_str(), nMessageSize, e.what()); } else if (strstr(e.what(), "size too large")) { // Allow exceptions from over-long size printf("ProcessMessages(%s, %u bytes) : Exception '%s' caught\n", strCommand.c_str(), nMessageSize, e.what()); } else { PrintExceptionContinue(&e, "ProcessMessages()"); } } catch (boost::thread_interrupted) { throw; } catch (std::exception& e) { PrintExceptionContinue(&e, "ProcessMessages()"); } catch (...) { PrintExceptionContinue(NULL, "ProcessMessages()"); } if (!fRet) printf("ProcessMessage(%s, %u bytes) FAILED\n", strCommand.c_str(), nMessageSize); break; } // In case the connection got shut down, its receive buffer was wiped if (!pfrom->fDisconnect) pfrom->vRecvMsg.erase(pfrom->vRecvMsg.begin(), it); return fOk; } bool SendMessages(CNode* pto, bool fSendTrickle) { TRY_LOCK(cs_main, lockMain); if (lockMain) { // Don't send anything until we get their version message if (pto->nVersion == 0) return true; // Keep-alive ping. We send a nonce of zero because we don't use it anywhere // right now. if (pto->nLastSend && GetTime() - pto->nLastSend > 30 * 60 && pto->vSendMsg.empty()) { uint64 nonce = 0; if (pto->nVersion > BIP0031_VERSION) pto->PushMessage("ping", nonce); else pto->PushMessage("ping"); } // Start block sync if (pto->fStartSync && !fImporting && !fReindex) { pto->fStartSync = false; pto->PushGetBlocks(pindexBest, uint256(0)); } // Resend wallet transactions that haven't gotten in a block yet // Except during reindex, importing and IBD, when old wallet // transactions become unconfirmed and spams other nodes. if (!fReindex && !fImporting && !IsInitialBlockDownload()) { ResendWalletTransactions(); } // Address refresh broadcast static int64 nLastRebroadcast; if (!IsInitialBlockDownload() && (GetTime() - nLastRebroadcast > 24 * 60 * 60)) { { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) { // Periodically clear setAddrKnown to allow refresh broadcasts if (nLastRebroadcast) pnode->setAddrKnown.clear(); // Rebroadcast our address if (!fNoListen) { CAddress addr = GetLocalAddress(&pnode->addr); if (addr.IsRoutable()) pnode->PushAddress(addr); } } } nLastRebroadcast = GetTime(); } // // Message: addr // if (fSendTrickle) { vector<CAddress> vAddr; vAddr.reserve(pto->vAddrToSend.size()); BOOST_FOREACH(const CAddress& addr, pto->vAddrToSend) { // returns true if wasn't already contained in the set if (pto->setAddrKnown.insert(addr).second) { vAddr.push_back(addr); // receiver rejects addr messages larger than 1000 if (vAddr.size() >= 1000) { pto->PushMessage("addr", vAddr); vAddr.clear(); } } } pto->vAddrToSend.clear(); if (!vAddr.empty()) pto->PushMessage("addr", vAddr); } // // Message: inventory // vector<CInv> vInv; vector<CInv> vInvWait; { LOCK(pto->cs_inventory); vInv.reserve(pto->vInventoryToSend.size()); vInvWait.reserve(pto->vInventoryToSend.size()); BOOST_FOREACH(const CInv& inv, pto->vInventoryToSend) { if (pto->setInventoryKnown.count(inv)) continue; // trickle out tx inv to protect privacy if (inv.type == MSG_TX && !fSendTrickle) { // 1/4 of tx invs blast to all immediately static uint256 hashSalt; if (hashSalt == 0) hashSalt = GetRandHash(); uint256 hashRand = inv.hash ^ hashSalt; hashRand = Hash(BEGIN(hashRand), END(hashRand)); bool fTrickleWait = ((hashRand & 3) != 0); // always trickle our own transactions if (!fTrickleWait) { CWalletTx wtx; if (GetTransaction(inv.hash, wtx)) if (wtx.fFromMe) fTrickleWait = true; } if (fTrickleWait) { vInvWait.push_back(inv); continue; } } // returns true if wasn't already contained in the set if (pto->setInventoryKnown.insert(inv).second) { vInv.push_back(inv); if (vInv.size() >= 1000) { pto->PushMessage("inv", vInv); vInv.clear(); } } } pto->vInventoryToSend = vInvWait; } if (!vInv.empty()) pto->PushMessage("inv", vInv); // // Message: getdata // vector<CInv> vGetData; int64 nNow = GetTime() * 1000000; while (!pto->mapAskFor.empty() && (*pto->mapAskFor.begin()).first <= nNow) { const CInv& inv = (*pto->mapAskFor.begin()).second; if (!AlreadyHave(inv)) { if (fDebugNet) printf("sending getdata: %s\n", inv.ToString().c_str()); vGetData.push_back(inv); if (vGetData.size() >= 1000) { pto->PushMessage("getdata", vGetData); vGetData.clear(); } } pto->mapAskFor.erase(pto->mapAskFor.begin()); } if (!vGetData.empty()) pto->PushMessage("getdata", vGetData); } return true; } ////////////////////////////////////////////////////////////////////////////// // // BitcoinMiner // int static FormatHashBlocks(void* pbuffer, unsigned int len) { unsigned char* pdata = (unsigned char*)pbuffer; unsigned int blocks = 1 + ((len + 8) / 64); unsigned char* pend = pdata + 64 * blocks; memset(pdata + len, 0, 64 * blocks - len); pdata[len] = 0x80; unsigned int bits = len * 8; pend[-1] = (bits >> 0) & 0xff; pend[-2] = (bits >> 8) & 0xff; pend[-3] = (bits >> 16) & 0xff; pend[-4] = (bits >> 24) & 0xff; return blocks; } static const unsigned int pSHA256InitState[8] = {0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19}; void SHA256Transform(void* pstate, void* pinput, const void* pinit) { SHA256_CTX ctx; unsigned char data[64]; SHA256_Init(&ctx); for (int i = 0; i < 16; i++) ((uint32_t*)data)[i] = ByteReverse(((uint32_t*)pinput)[i]); for (int i = 0; i < 8; i++) ctx.h[i] = ((uint32_t*)pinit)[i]; SHA256_Update(&ctx, data, sizeof(data)); for (int i = 0; i < 8; i++) ((uint32_t*)pstate)[i] = ctx.h[i]; } // // ScanHash scans nonces looking for a hash with at least some zero bits. // It operates on big endian data. Caller does the byte reversing. // All input buffers are 16-byte aligned. nNonce is usually preserved // between calls, but periodically or if nNonce is 0xffff0000 or above, // the block is rebuilt and nNonce starts over at zero. // unsigned int static ScanHash_CryptoPP(char* pmidstate, char* pdata, char* phash1, char* phash, unsigned int& nHashesDone) { unsigned int& nNonce = *(unsigned int*)(pdata + 12); for (;;) { // Crypto++ SHA256 // Hash pdata using pmidstate as the starting state into // pre-formatted buffer phash1, then hash phash1 into phash nNonce++; SHA256Transform(phash1, pdata, pmidstate); SHA256Transform(phash, phash1, pSHA256InitState); // Return the nonce if the hash has at least some zero bits, // caller will check if it has enough to reach the target if (((unsigned short*)phash)[14] == 0) return nNonce; // If nothing found after trying for a while, return -1 if ((nNonce & 0xffff) == 0) { nHashesDone = 0xffff+1; return (unsigned int) -1; } if ((nNonce & 0xfff) == 0) boost::this_thread::interruption_point(); } } // Some explaining would be appreciated class COrphan { public: CTransaction* ptx; set<uint256> setDependsOn; double dPriority; double dFeePerKb; COrphan(CTransaction* ptxIn) { ptx = ptxIn; dPriority = dFeePerKb = 0; } void print() const { printf("COrphan(hash=%s, dPriority=%.1f, dFeePerKb=%.1f)\n", ptx->GetHash().ToString().c_str(), dPriority, dFeePerKb); BOOST_FOREACH(uint256 hash, setDependsOn) printf(" setDependsOn %s\n", hash.ToString().c_str()); } }; uint64 nLastBlockTx = 0; uint64 nLastBlockSize = 0; // We want to sort transactions by priority and fee, so: typedef boost::tuple<double, double, CTransaction*> TxPriority; class TxPriorityCompare { bool byFee; public: TxPriorityCompare(bool _byFee) : byFee(_byFee) { } bool operator()(const TxPriority& a, const TxPriority& b) { if (byFee) { if (a.get<1>() == b.get<1>()) return a.get<0>() < b.get<0>(); return a.get<1>() < b.get<1>(); } else { if (a.get<0>() == b.get<0>()) return a.get<1>() < b.get<1>(); return a.get<0>() < b.get<0>(); } } }; CBlockTemplate* CreateNewBlock(CReserveKey& reservekey) { // Create new block auto_ptr<CBlockTemplate> pblocktemplate(new CBlockTemplate()); if(!pblocktemplate.get()) return NULL; CBlock *pblock = &pblocktemplate->block; // pointer for convenience // Create coinbase tx CTransaction txNew; txNew.vin.resize(1); txNew.vin[0].prevout.SetNull(); txNew.vout.resize(1); CPubKey pubkey; if (!reservekey.GetReservedKey(pubkey)) return NULL; txNew.vout[0].scriptPubKey << pubkey << OP_CHECKSIG; // Add our coinbase tx as first transaction pblock->vtx.push_back(txNew); pblocktemplate->vTxFees.push_back(-1); // updated at end pblocktemplate->vTxSigOps.push_back(-1); // updated at end // Largest block you're willing to create: unsigned int nBlockMaxSize = GetArg("-blockmaxsize", DEFAULT_BLOCK_MAX_SIZE); // Limit to betweeen 1K and MAX_BLOCK_SIZE-1K for sanity: nBlockMaxSize = std::max((unsigned int)1000, std::min((unsigned int)(MAX_BLOCK_SIZE-1000), nBlockMaxSize)); // Special compatibility rule before 15 May: limit size to 500,000 bytes: if (GetAdjustedTime() < 1368576000) nBlockMaxSize = std::min(nBlockMaxSize, (unsigned int)(MAX_BLOCK_SIZE_GEN)); // How much of the block should be dedicated to high-priority transactions, // included regardless of the fees they pay unsigned int nBlockPrioritySize = GetArg("-blockprioritysize", DEFAULT_BLOCK_PRIORITY_SIZE); nBlockPrioritySize = std::min(nBlockMaxSize, nBlockPrioritySize); // Minimum block size you want to create; block will be filled with free transactions // until there are no more or the block reaches this size: unsigned int nBlockMinSize = GetArg("-blockminsize", 0); nBlockMinSize = std::min(nBlockMaxSize, nBlockMinSize); // Collect memory pool transactions into the block int64 nFees = 0; { LOCK2(cs_main, mempool.cs); CBlockIndex* pindexPrev = pindexBest; CCoinsViewCache view(*pcoinsTip, true); // Priority order to process transactions list<COrphan> vOrphan; // list memory doesn't move map<uint256, vector<COrphan*> > mapDependers; bool fPrintPriority = GetBoolArg("-printpriority"); // This vector will be sorted into a priority queue: vector<TxPriority> vecPriority; vecPriority.reserve(mempool.mapTx.size()); for (map<uint256, CTransaction>::iterator mi = mempool.mapTx.begin(); mi != mempool.mapTx.end(); ++mi) { CTransaction& tx = (*mi).second; if (tx.IsCoinBase() || !tx.IsFinal()) continue; COrphan* porphan = NULL; double dPriority = 0; int64 nTotalIn = 0; bool fMissingInputs = false; BOOST_FOREACH(const CTxIn& txin, tx.vin) { // Read prev transaction if (!view.HaveCoins(txin.prevout.hash)) { // This should never happen; all transactions in the memory // pool should connect to either transactions in the chain // or other transactions in the memory pool. if (!mempool.mapTx.count(txin.prevout.hash)) { printf("ERROR: mempool transaction missing input\n"); if (fDebug) assert("mempool transaction missing input" == 0); fMissingInputs = true; if (porphan) vOrphan.pop_back(); break; } // Has to wait for dependencies if (!porphan) { // Use list for automatic deletion vOrphan.push_back(COrphan(&tx)); porphan = &vOrphan.back(); } mapDependers[txin.prevout.hash].push_back(porphan); porphan->setDependsOn.insert(txin.prevout.hash); nTotalIn += mempool.mapTx[txin.prevout.hash].vout[txin.prevout.n].nValue; continue; } const CCoins &coins = view.GetCoins(txin.prevout.hash); int64 nValueIn = coins.vout[txin.prevout.n].nValue; nTotalIn += nValueIn; int nConf = pindexPrev->nHeight - coins.nHeight + 1; dPriority += (double)nValueIn * nConf; } if (fMissingInputs) continue; // Priority is sum(valuein * age) / txsize unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION); dPriority /= nTxSize; // This is a more accurate fee-per-kilobyte than is used by the client code, because the // client code rounds up the size to the nearest 1K. That's good, because it gives an // incentive to create smaller transactions. double dFeePerKb = double(nTotalIn-tx.GetValueOut()) / (double(nTxSize)/1000.0); if (porphan) { porphan->dPriority = dPriority; porphan->dFeePerKb = dFeePerKb; } else vecPriority.push_back(TxPriority(dPriority, dFeePerKb, &(*mi).second)); } // Collect transactions into block uint64 nBlockSize = 1000; uint64 nBlockTx = 0; int nBlockSigOps = 100; bool fSortedByFee = (nBlockPrioritySize <= 0); TxPriorityCompare comparer(fSortedByFee); std::make_heap(vecPriority.begin(), vecPriority.end(), comparer); while (!vecPriority.empty()) { // Take highest priority transaction off the priority queue: double dPriority = vecPriority.front().get<0>(); double dFeePerKb = vecPriority.front().get<1>(); CTransaction& tx = *(vecPriority.front().get<2>()); std::pop_heap(vecPriority.begin(), vecPriority.end(), comparer); vecPriority.pop_back(); // Size limits unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION); if (nBlockSize + nTxSize >= nBlockMaxSize) continue; // Legacy limits on sigOps: unsigned int nTxSigOps = tx.GetLegacySigOpCount(); if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS) continue; // Skip free transactions if we're past the minimum block size: if (fSortedByFee && (dFeePerKb < CTransaction::nMinTxFee) && (nBlockSize + nTxSize >= nBlockMinSize)) continue; // Prioritize by fee once past the priority size or we run out of high-priority // transactions: if (!fSortedByFee && ((nBlockSize + nTxSize >= nBlockPrioritySize) || (dPriority < COIN * 144 / 250))) { fSortedByFee = true; comparer = TxPriorityCompare(fSortedByFee); std::make_heap(vecPriority.begin(), vecPriority.end(), comparer); } if (!tx.HaveInputs(view)) continue; int64 nTxFees = tx.GetValueIn(view)-tx.GetValueOut(); nTxSigOps += tx.GetP2SHSigOpCount(view); if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS) continue; CValidationState state; if (!tx.CheckInputs(state, view, true, SCRIPT_VERIFY_P2SH)) continue; CTxUndo txundo; uint256 hash = tx.GetHash(); tx.UpdateCoins(state, view, txundo, pindexPrev->nHeight+1, hash); // Added pblock->vtx.push_back(tx); pblocktemplate->vTxFees.push_back(nTxFees); pblocktemplate->vTxSigOps.push_back(nTxSigOps); nBlockSize += nTxSize; ++nBlockTx; nBlockSigOps += nTxSigOps; nFees += nTxFees; if (fPrintPriority) { printf("priority %.1f feeperkb %.1f txid %s\n", dPriority, dFeePerKb, tx.GetHash().ToString().c_str()); } // Add transactions that depend on this one to the priority queue if (mapDependers.count(hash)) { BOOST_FOREACH(COrphan* porphan, mapDependers[hash]) { if (!porphan->setDependsOn.empty()) { porphan->setDependsOn.erase(hash); if (porphan->setDependsOn.empty()) { vecPriority.push_back(TxPriority(porphan->dPriority, porphan->dFeePerKb, porphan->ptx)); std::push_heap(vecPriority.begin(), vecPriority.end(), comparer); } } } } } nLastBlockTx = nBlockTx; nLastBlockSize = nBlockSize; printf("CreateNewBlock(): total size %"PRI64u"\n", nBlockSize); pblock->vtx[0].vout[0].nValue = GetBlockValue(pindexPrev->nHeight+1, nFees); pblocktemplate->vTxFees[0] = -nFees; // Fill in header pblock->hashPrevBlock = pindexPrev->GetBlockHash(); pblock->UpdateTime(pindexPrev); pblock->nBits = GetNextWorkRequired(pindexPrev, pblock); pblock->nNonce = 0; pblock->vtx[0].vin[0].scriptSig = CScript() << OP_0 << OP_0; pblocktemplate->vTxSigOps[0] = pblock->vtx[0].GetLegacySigOpCount(); CBlockIndex indexDummy(*pblock); indexDummy.pprev = pindexPrev; indexDummy.nHeight = pindexPrev->nHeight + 1; CCoinsViewCache viewNew(*pcoinsTip, true); CValidationState state; if (!pblock->ConnectBlock(state, &indexDummy, viewNew, true)) throw std::runtime_error("CreateNewBlock() : ConnectBlock failed"); } return pblocktemplate.release(); } void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce) { // Update nExtraNonce static uint256 hashPrevBlock; if (hashPrevBlock != pblock->hashPrevBlock) { nExtraNonce = 0; hashPrevBlock = pblock->hashPrevBlock; } ++nExtraNonce; unsigned int nHeight = pindexPrev->nHeight+1; // Height first in coinbase required for block.version=2 pblock->vtx[0].vin[0].scriptSig = (CScript() << nHeight << CBigNum(nExtraNonce)) + COINBASE_FLAGS; assert(pblock->vtx[0].vin[0].scriptSig.size() <= 100); pblock->hashMerkleRoot = pblock->BuildMerkleTree(); } void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1) { // // Pre-build hash buffers // struct { struct unnamed2 { int nVersion; uint256 hashPrevBlock; uint256 hashMerkleRoot; unsigned int nTime; unsigned int nBits; unsigned int nNonce; } block; unsigned char pchPadding0[64]; uint256 hash1; unsigned char pchPadding1[64]; } tmp; memset(&tmp, 0, sizeof(tmp)); tmp.block.nVersion = pblock->nVersion; tmp.block.hashPrevBlock = pblock->hashPrevBlock; tmp.block.hashMerkleRoot = pblock->hashMerkleRoot; tmp.block.nTime = pblock->nTime; tmp.block.nBits = pblock->nBits; tmp.block.nNonce = pblock->nNonce; FormatHashBlocks(&tmp.block, sizeof(tmp.block)); FormatHashBlocks(&tmp.hash1, sizeof(tmp.hash1)); // Byte swap all the input buffer for (unsigned int i = 0; i < sizeof(tmp)/4; i++) ((unsigned int*)&tmp)[i] = ByteReverse(((unsigned int*)&tmp)[i]); // Precalc the first half of the first hash, which stays constant SHA256Transform(pmidstate, &tmp.block, pSHA256InitState); memcpy(pdata, &tmp.block, 128); memcpy(phash1, &tmp.hash1, 64); } bool CheckWork(CBlock* pblock, CWallet& wallet, CReserveKey& reservekey) { uint256 hash = pblock->GetHash(); uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256(); if (hash > hashTarget) return false; //// debug print printf("BullCityCoinMiner:\n"); printf("proof-of-work found \n hash: %s \ntarget: %s\n", hash.GetHex().c_str(), hashTarget.GetHex().c_str()); pblock->print(); printf("generated %s\n", FormatMoney(pblock->vtx[0].vout[0].nValue).c_str()); // Found a solution { LOCK(cs_main); if (pblock->hashPrevBlock != hashBestChain) return error("BullCityCoinMiner : generated block is stale"); // Remove key from key pool reservekey.KeepKey(); // Track how many getdata requests this block gets { LOCK(wallet.cs_wallet); wallet.mapRequestCount[pblock->GetHash()] = 0; } // Process this block the same as if we had received it from another node CValidationState state; if (!ProcessBlock(state, NULL, pblock)) return error("BullCityCoinMiner : ProcessBlock, block not accepted"); } return true; } void static BitcoinMiner(CWallet *pwallet) { printf("BullCityCoinMiner started\n"); SetThreadPriority(THREAD_PRIORITY_LOWEST); RenameThread("bitcoin-miner"); // Each thread has its own key and counter CReserveKey reservekey(pwallet); unsigned int nExtraNonce = 0; try { loop { while (vNodes.empty()) MilliSleep(1000); // // Create new block // unsigned int nTransactionsUpdatedLast = nTransactionsUpdated; CBlockIndex* pindexPrev = pindexBest; auto_ptr<CBlockTemplate> pblocktemplate(CreateNewBlock(reservekey)); if (!pblocktemplate.get()) return; CBlock *pblock = &pblocktemplate->block; IncrementExtraNonce(pblock, pindexPrev, nExtraNonce); printf("Running BullCityCoinMiner with %"PRIszu" transactions in block (%u bytes)\n", pblock->vtx.size(), ::GetSerializeSize(*pblock, SER_NETWORK, PROTOCOL_VERSION)); // // Pre-build hash buffers // char pmidstatebuf[32+16]; char* pmidstate = alignup<16>(pmidstatebuf); char pdatabuf[128+16]; char* pdata = alignup<16>(pdatabuf); char phash1buf[64+16]; char* phash1 = alignup<16>(phash1buf); FormatHashBuffers(pblock, pmidstate, pdata, phash1); unsigned int& nBlockTime = *(unsigned int*)(pdata + 64 + 4); unsigned int& nBlockBits = *(unsigned int*)(pdata + 64 + 8); unsigned int& nBlockNonce = *(unsigned int*)(pdata + 64 + 12); // // Search // int64 nStart = GetTime(); uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256(); uint256 hashbuf[2]; uint256& hash = *alignup<16>(hashbuf); loop { unsigned int nHashesDone = 0; unsigned int nNonceFound; // Crypto++ SHA256 nNonceFound = ScanHash_CryptoPP(pmidstate, pdata + 64, phash1, (char*)&hash, nHashesDone); // Check if something found if (nNonceFound != (unsigned int) -1) { for (unsigned int i = 0; i < sizeof(hash)/4; i++) ((unsigned int*)&hash)[i] = ByteReverse(((unsigned int*)&hash)[i]); if (hash <= hashTarget) { // Found a solution pblock->nNonce = ByteReverse(nNonceFound); assert(hash == pblock->GetHash()); SetThreadPriority(THREAD_PRIORITY_NORMAL); CheckWork(pblock, *pwalletMain, reservekey); SetThreadPriority(THREAD_PRIORITY_LOWEST); break; } } // Meter hashes/sec static int64 nHashCounter; if (nHPSTimerStart == 0) { nHPSTimerStart = GetTimeMillis(); nHashCounter = 0; } else nHashCounter += nHashesDone; if (GetTimeMillis() - nHPSTimerStart > 4000) { static CCriticalSection cs; { LOCK(cs); if (GetTimeMillis() - nHPSTimerStart > 4000) { dHashesPerSec = 1000.0 * nHashCounter / (GetTimeMillis() - nHPSTimerStart); nHPSTimerStart = GetTimeMillis(); nHashCounter = 0; static int64 nLogTime; if (GetTime() - nLogTime > 30 * 60) { nLogTime = GetTime(); printf("hashmeter %6.0f khash/s\n", dHashesPerSec/1000.0); } } } } // Check for stop or if block needs to be rebuilt boost::this_thread::interruption_point(); if (vNodes.empty()) break; if (nBlockNonce >= 0xffff0000) break; if (nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60) break; if (pindexPrev != pindexBest) break; // Update nTime every few seconds pblock->UpdateTime(pindexPrev); nBlockTime = ByteReverse(pblock->nTime); if (fTestNet) { // Changing pblock->nTime can change work required on testnet: nBlockBits = ByteReverse(pblock->nBits); hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256(); } } } } catch (boost::thread_interrupted) { printf("BullCityCoinMiner terminated\n"); throw; } } void GenerateBitcoins(bool fGenerate, CWallet* pwallet) { static boost::thread_group* minerThreads = NULL; int nThreads = GetArg("-genproclimit", -1); if (nThreads < 0) nThreads = boost::thread::hardware_concurrency(); if (minerThreads != NULL) { minerThreads->interrupt_all(); delete minerThreads; minerThreads = NULL; } if (nThreads == 0 || !fGenerate) return; minerThreads = new boost::thread_group(); for (int i = 0; i < nThreads; i++) minerThreads->create_thread(boost::bind(&BitcoinMiner, pwallet)); } // Amount compression: // * If the amount is 0, output 0 // * first, divide the amount (in base units) by the largest power of 10 possible; call the exponent e (e is max 9) // * if e<9, the last digit of the resulting number cannot be 0; store it as d, and drop it (divide by 10) // * call the result n // * output 1 + 10*(9*n + d - 1) + e // * if e==9, we only know the resulting number is not zero, so output 1 + 10*(n - 1) + 9 // (this is decodable, as d is in [1-9] and e is in [0-9]) uint64 CTxOutCompressor::CompressAmount(uint64 n) { if (n == 0) return 0; int e = 0; while (((n % 10) == 0) && e < 9) { n /= 10; e++; } if (e < 9) { int d = (n % 10); assert(d >= 1 && d <= 9); n /= 10; return 1 + (n*9 + d - 1)*10 + e; } else { return 1 + (n - 1)*10 + 9; } } uint64 CTxOutCompressor::DecompressAmount(uint64 x) { // x = 0 OR x = 1+10*(9*n + d - 1) + e OR x = 1+10*(n - 1) + 9 if (x == 0) return 0; x--; // x = 10*(9*n + d - 1) + e int e = x % 10; x /= 10; uint64 n = 0; if (e < 9) { // x = 9*n + d - 1 int d = (x % 9) + 1; x /= 9; // x = n n = x*10 + d; } else { n = x+1; } while (e) { n *= 10; e--; } return n; } class CMainCleanup { public: CMainCleanup() {} ~CMainCleanup() { // block headers std::map<uint256, CBlockIndex*>::iterator it1 = mapBlockIndex.begin(); for (; it1 != mapBlockIndex.end(); it1++) delete (*it1).second; mapBlockIndex.clear(); // orphan blocks std::map<uint256, CBlock*>::iterator it2 = mapOrphanBlocks.begin(); for (; it2 != mapOrphanBlocks.end(); it2++) delete (*it2).second; mapOrphanBlocks.clear(); // orphan transactions mapOrphanTransactions.clear(); } } instance_of_cmaincleanup;
{ "content_hash": "d0683208393aa06ea2481cc1123dedfe", "timestamp": "", "source": "github", "line_count": 4771, "max_line_length": 253, "avg_line_length": 35.40494655208552, "alnum_prop": 0.5852519284619073, "repo_name": "BullCityCoin/BullCityCoin", "id": "0381dfb01e0977e082b81789cdc396434dcd91e1", "size": "168917", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "101891" }, { "name": "C++", "bytes": "2424630" }, { "name": "CSS", "bytes": "1127" }, { "name": "IDL", "bytes": "13805" }, { "name": "Objective-C", "bytes": "2734" }, { "name": "Python", "bytes": "3783" }, { "name": "Shell", "bytes": "7983" }, { "name": "TypeScript", "bytes": "5246154" } ], "symlink_target": "" }
package com.conveyal.gtfs.model; import com.conveyal.gtfs.GTFSFeed; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.LineString; import org.mapdb.Fun; import java.util.Map; import static com.conveyal.gtfs.util.GeometryUtil.geometryFactory; /** * Represents a collection of GTFS shape points. Never saved in MapDB but constructed on the fly. */ public class Shape { /** The shape itself */ public LineString geometry; /** shape_dist_traveled for each point in the geometry. TODO how to handle shape dist traveled not specified, or not specified on all stops? */ public double[] shape_dist_traveled; public Shape (GTFSFeed feed, String shape_id) { Map<Fun.Tuple2<String, Integer>, ShapePoint> points = feed.shape_points.subMap(new Fun.Tuple2(shape_id, null), new Fun.Tuple2(shape_id, Fun.HI)); Coordinate[] coords = points.values().stream() .map(point -> new Coordinate(point.shape_pt_lon, point.shape_pt_lat)) .toArray(i -> new Coordinate[i]); geometry = geometryFactory.createLineString(coords); shape_dist_traveled = points.values().stream().mapToDouble(point -> point.shape_dist_traveled).toArray(); } }
{ "content_hash": "32de1b6ce903ba1c18bc483ce3aa64ee", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 147, "avg_line_length": 39, "alnum_prop": 0.6979166666666666, "repo_name": "conveyal/r5", "id": "785455cf41afb934f42b80544496c4a27de0f1b6", "size": "1248", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "src/main/java/com/conveyal/gtfs/model/Shape.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6262" }, { "name": "Dockerfile", "bytes": "640" }, { "name": "HTML", "bytes": "7164" }, { "name": "Java", "bytes": "3492376" }, { "name": "JavaScript", "bytes": "76017" }, { "name": "Python", "bytes": "5457" }, { "name": "Shell", "bytes": "924" } ], "symlink_target": "" }
//------------------------------------------------------------------------------ // <auto-generated>This code was generated by LLBLGen Pro v3.5.</auto-generated> //------------------------------------------------------------------------------ using System; using System.ComponentModel; using System.Runtime.Serialization; using System.Xml.Serialization; using System.Collections.Specialized; using System.Collections.Generic; namespace RawBencher { /// <summary>Class which represents the entity 'Product'.</summary> [Serializable] public partial class Product { /// <summary>Initializes a new instance of the <see cref="Product"/> class.</summary> public Product() : base() { } #region Class Property Declarations /// <summary>Gets or sets the CategoryId field. </summary> public Nullable<System.Int32> CategoryId { get; set;} /// <summary>Gets or sets the Discontinued field. </summary> public System.Boolean Discontinued { get; set;} /// <summary>Gets or sets the ProductId field. </summary> public System.Int32 ProductId { get; set;} /// <summary>Gets or sets the ProductName field. </summary> public System.String ProductName { get; set;} /// <summary>Gets or sets the QuantityPerUnit field. </summary> public System.String QuantityPerUnit { get; set;} /// <summary>Gets or sets the ReorderLevel field. </summary> public Nullable<System.Int16> ReorderLevel { get; set;} /// <summary>Gets or sets the SupplierId field. </summary> public Nullable<System.Int32> SupplierId { get; set;} /// <summary>Gets or sets the UnitPrice field. </summary> public Nullable<System.Decimal> UnitPrice { get; set;} /// <summary>Gets or sets the UnitsInStock field. </summary> public Nullable<System.Int16> UnitsInStock { get; set;} /// <summary>Gets or sets the UnitsOnOrder field. </summary> public Nullable<System.Int16> UnitsOnOrder { get; set;} /// <summary>Represents the navigator which is mapped onto the association 'Product.Category - Category.Products (m:1)'</summary> #endregion } }
{ "content_hash": "5146d3a11771949862591739f39fd50a", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 131, "avg_line_length": 44.391304347826086, "alnum_prop": 0.6704211557296768, "repo_name": "anpete/RawDataAccessBencher", "id": "323cd7b5972b0fa1d52631ab71b535fefee497b1", "size": "2044", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "RawBencher/Product.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "8456746" } ], "symlink_target": "" }
package com.example.tests; import static org.testng.Assert.assertEquals; import java.util.Collections; import java.util.List; import java.util.Random; import org.testng.annotations.Test; public class ContactRemovalTests extends TestBase { @Test public void deleteSomeContact(){ app.getNavigationHelper().openMainPage(); //save old state List<ContactData> oldList = app.getContactHelper().getContacts(); Random rnd = new Random(); int index = rnd.nextInt(oldList.size() - 1); //actions app.getContactHelper().initContactEdit(index); app.getContactHelper().deleteContact(); app.getContactHelper().returnToHomePage(); //save new state List<ContactData> newList = app.getContactHelper().getContacts(); //compare states oldList.remove(index); Collections.sort(oldList); assertEquals(newList, oldList); } }
{ "content_hash": "1c5ae77f98628c4e945fcf1288dbbfed", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 70, "avg_line_length": 24.216216216216218, "alnum_prop": 0.7053571428571429, "repo_name": "DanilenkoEA/test-project", "id": "2576db8c11de0b736cc9166f20411b2c54f92062", "size": "896", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "addressbook-seltests/src/com/example/tests/ContactRemovalTests.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "5337" }, { "name": "Java", "bytes": "17209" }, { "name": "JavaScript", "bytes": "3555" } ], "symlink_target": "" }
<?php namespace Sonata\NotificationBundle\Backend; use PhpAmqpLib\Channel\AMQPChannel; use PhpAmqpLib\Connection\AMQPConnection; use Sonata\NotificationBundle\Exception\BackendNotFoundException; use Sonata\NotificationBundle\Model\MessageInterface; use Symfony\Component\EventDispatcher\EventDispatcherInterface; use ZendDiagnostics\Result\Failure; use ZendDiagnostics\Result\Success; /** * Producer side of the rabbitmq backend. */ class AMQPBackendDispatcher extends QueueBackendDispatcher { /** * @var array */ protected $settings; /** * @var AMQPChannel */ protected $channel; /** * @var AMQPConnection */ protected $connection; protected $backendsInitialized = false; /** * @param array $settings * @param array $queues * @param string $defaultQueue * @param array $backends */ public function __construct(array $settings, array $queues, $defaultQueue, array $backends) { parent::__construct($queues, $defaultQueue, $backends); $this->settings = $settings; } /** * @return AMQPChannel */ public function getChannel() { if (!$this->channel) { $this->connection = new AMQPConnection( $this->settings['host'], $this->settings['port'], $this->settings['user'], $this->settings['pass'], $this->settings['vhost'] ); $this->channel = $this->connection->channel(); register_shutdown_function(array($this, 'shutdown')); } return $this->channel; } /** * {@inheritdoc} */ public function getBackend($type) { if (!$this->backendsInitialized) { foreach ($this->backends as $backend) { $backend['backend']->initialize(); } $this->backendsInitialized = true; } $default = null; if (count($this->queues) === 0) { foreach ($this->backends as $backend) { if ($backend['type'] === 'default') { return $backend['backend']; } } } foreach ($this->backends as $backend) { if ('all' === $type && $backend['type'] === '') { return $backend['backend']; } if ($backend['type'] === $type) { return $backend['backend']; } if ($backend['type'] === $this->defaultQueue) { $default = $backend['backend']; } } if ($default === null) { throw new BackendNotFoundException('Could not find a message backend for the type '.$type); } return $default; } /** * {@inheritdoc} */ public function getIterator() { throw new \RuntimeException( 'You need to use a specific rabbitmq backend supporting the selected queue to run a consumer.' ); } /** * {@inheritdoc} */ public function handle(MessageInterface $message, EventDispatcherInterface $dispatcher) { throw new \RuntimeException( 'You need to use a specific rabbitmq backend supporting the selected queue to run a consumer.' ); } /** * {@inheritdoc} */ public function getStatus() { try { $this->getChannel(); $output = $this->getApiQueueStatus(); $checked = 0; $missingConsumers = array(); foreach ($this->queues as $queue) { foreach ($output as $q) { if ($q['name'] === $queue['queue']) { ++$checked; if ($q['consumers'] === 0) { $missingConsumers[] = $queue['queue']; } } } } if ($checked !== count($this->queues)) { return new Failure( 'Not all queues for the available notification types registered in the rabbitmq broker. ' .'Are the consumer commands running?' ); } if (count($missingConsumers) > 0) { return new Failure( 'There are no rabbitmq consumers running for the queues: '.implode(', ', $missingConsumers) ); } } catch (\Exception $e) { return new Failure($e->getMessage()); } return new Success('Channel is running (RabbitMQ) and consumers for all queues available.'); } /** * {@inheritdoc} */ public function cleanup() { throw new \RuntimeException( 'You need to use a specific rabbitmq backend supporting the selected queue to run a consumer.' ); } public function shutdown() { if ($this->channel) { $this->channel->close(); } if ($this->connection) { $this->connection->close(); } } /** * {@inheritdoc} */ public function initialize() { } /** * Calls the rabbitmq management api /api/<vhost>/queues endpoint to list the available queues. * * @see http://hg.rabbitmq.com/rabbitmq-management/raw-file/3646dee55e02/priv/www-api/help.html * * @return array */ protected function getApiQueueStatus() { if (class_exists('Guzzle\Http\Client') === false) { throw new \RuntimeException( 'The guzzle http client library is required to run rabbitmq health checks. ' .'Make sure to add guzzlehttp/guzzle to your composer.json.' ); } $client = new \Guzzle\Http\Client(); $client->setConfig(array('curl.options' => array(CURLOPT_CONNECTTIMEOUT_MS => 3000))); $request = $client->get(sprintf('%s/queues', $this->settings['console_url'])); $request->setAuth($this->settings['user'], $this->settings['pass']); return json_decode($request->send()->getBody(true), true); } }
{ "content_hash": "31e85cc2c59e6355a1f402360cb09d94", "timestamp": "", "source": "github", "line_count": 227, "max_line_length": 111, "avg_line_length": 27.273127753303964, "alnum_prop": 0.5239864319172993, "repo_name": "os-rescue/SonataNotificationBundle", "id": "bf475aefaa4241ee3ad8b34ea245dc441c17dcf1", "size": "6438", "binary": false, "copies": "1", "ref": "refs/heads/3.x", "path": "Backend/AMQPBackendDispatcher.php", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "6195" }, { "name": "PHP", "bytes": "211579" }, { "name": "Python", "bytes": "7900" }, { "name": "Shell", "bytes": "2242" } ], "symlink_target": "" }
package com.bignerdranch.android.androidquiz; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; public class ImageViewHelper { public ImageViewHelper() {} public enum ROTATION { VERTICAL, HORIZONTAL } public Bitmap flipImage(Bitmap src, ROTATION type) { // create new matrix for transformations Matrix matrix = new Matrix(); // if vertical if (type == ROTATION.VERTICAL) { // y = y * -1 matrix.preScale(1.0f, -1.0f); } // if horizonal else if (type == ROTATION.HORIZONTAL) { // x = x * -1 matrix.preScale(-1.0f, 1.0f); // unknown type } else { return null; } // return transformed image return Bitmap.createBitmap(src, 0, 0, src.getWidth(), src.getHeight(), matrix, true); } public static Bitmap drawableToBitmap (Drawable drawable) { if (drawable instanceof BitmapDrawable) { return ((BitmapDrawable)drawable).getBitmap(); } Bitmap bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight()); drawable.draw(canvas); return bitmap; } }
{ "content_hash": "18db699ebc899d3e6eb9e4b9b4c3f9e0", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 130, "avg_line_length": 29.23076923076923, "alnum_prop": 0.6138157894736842, "repo_name": "claudiordgz/AndroidQuiz", "id": "4928657d783503858fd6e8ae77cd4d172c4bb7d7", "size": "1520", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/bignerdranch/android/androidquiz/ImageViewHelper.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "12629" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("04.BatGoikoTower")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("04.BatGoikoTower")] [assembly: AssemblyCopyright("Copyright © 2013")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("d6fbd2d9-0abb-4977-8f44-bfc545593dc4")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "b8a1efd420e8a7530551318b90a9ae29", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 84, "avg_line_length": 39.02777777777778, "alnum_prop": 0.7451957295373666, "repo_name": "iliantrifonov/TelerikAcademy", "id": "d427db9ea7767b6651f761eae5a5b7fccf0dbdaf", "size": "1408", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "C# part 1/Exam1Preparation/04.BatGoikoTower/Properties/AssemblyInfo.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "272691" }, { "name": "C#", "bytes": "7056889" }, { "name": "CSS", "bytes": "435067" }, { "name": "CoffeeScript", "bytes": "943" }, { "name": "HTML", "bytes": "454999" }, { "name": "JavaScript", "bytes": "5428919" }, { "name": "Ruby", "bytes": "1773" }, { "name": "SQLPL", "bytes": "1673" }, { "name": "Visual Basic", "bytes": "10583" }, { "name": "XSLT", "bytes": "3628" } ], "symlink_target": "" }
module.exports = global.$;
{ "content_hash": "3ee2a41188054f29db7e1178d00e88bd", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 26, "avg_line_length": 27, "alnum_prop": 0.7037037037037037, "repo_name": "dannyfritz/twitch-crowd", "id": "ee0fa64c24478d96491322fa208c6fbcde666885", "size": "27", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "client/src/scripts/jquery.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1294" } ], "symlink_target": "" }
<?php /** * This class is a part of reSlim project * @author M ABD AZIZ ALFIAN <github.com/aalfiann> * * Don't remove this class unless You know what to do * */ namespace classes; use \classes\Upload as Upload; use \classes\Validation as Validation; use PDO; /** * A class for user upload file in reSlim * * @package Core reSlim * @author M ABD AZIZ ALFIAN <github.com/aalfiann> * @copyright Copyright (c) 2016 M ABD AZIZ ALFIAN * @license https://github.com/aalfiann/reSlim/blob/master/license.md MIT License */ class Upload { protected $db; var $username,$datafile,$token,$itemid,$baseurl,$title,$alternate,$externallink,$status,$apikey,$filename,$uniqueid,$bookid; // limit size upload var $maxUploadSize = '100000000'; // for pagination var $page,$itemsPerPage; // for search var $search; function __construct($db=null) { if (!empty($db)) { $this->db = $db; } } /** * Check file on the server is already exist or not * * @param $url is full path filename on the server. Example: http://www.example.com/api/upload/2017/tester.txt * @return boolean true|false */ function isFileOnServer($url){ $result = false; $ch = curl_init($url); curl_setopt($ch, CURLOPT_NOBODY, true); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); curl_exec($ch); $retcode = curl_getinfo($ch, CURLINFO_HTTP_CODE); // $retcode >= 400 -> not found, $retcode = 200, found. curl_close($ch); if ($retcode==200){ $result = true; } return $result; } /** * Check file is allowed or not * * @param $filename is the filename with extension in the end. Example: tester.txt * @return boolean true|false */ function isFileNotAllowed($fileName){ $result = false; $notAllowedExts = array("php","sql","sqlite3","db","dbf","js","json","xml","html"); $temp = explode(".", $fileName); $extension = end($temp); if (in_array($extension, $notAllowedExts)) { $result = true; } return $result; } /** * Process upload to server * @return result process in json encoded data */ function doUpload() { //Auto create subfolder upload in every month $formatFolder = date('m-Y'); $fileFolder = 'upload/'.$formatFolder.'/'; if (!is_dir($fileFolder)) { $newcontent = '<?php header(\'Content-type:application/json; charset=utf-8\');header("Access-Control-Allow-Origin: *");header("Access-Control-Allow-Headers: X-Requested-With, Content-Type, Accept, Origin, Authorization");header(\'HTTP/1.0 403 Forbidden\');echo \'{ "status": "error", "code": "403", "message": "This page is forbidden." }\';?>'; $newprotection = '<Files ~ "\.(php|pdf|js|sql|sqlite3|doc|xls|db|dbf|json|xml|html)$"> Order allow,deny Deny from all </Files>'; mkdir($fileFolder,0775,true); if(!$this->isFileOnServer($this->baseurl.'/upload/index.php')){ $ihandle = fopen('upload/index.php','w+'); fwrite($ihandle,$newcontent); fclose($ihandle); } $handle = fopen($fileFolder.'index.php','w+'); fwrite($handle,$newcontent); fclose($handle); $xhandle = fopen($fileFolder.'.htaccess','w+'); fwrite($xhandle,$newprotection); fclose($xhandle); } $file = $this->datafile; // determine filepath $filePath = $fileFolder.$file->getClientFilename(); // determine filename $fileName = $file->getClientFilename(); // determine filetype $fileType = $file->getClientMediaType(); // determine filesize $fileSize = $file->getSize(); // determine error $fileError = $file->getError(); //Determine if file is not allowed if (!$this->isFileNotAllowed($fileName)){ //Determine if file already exist if(!$this->isFileOnServer($this->baseurl.'/'.$filePath)) { // check if file size is allowed if ($fileSize <= $this->maxUploadSize){ //Check proses upload status if ($file->getError() === UPLOAD_ERR_OK){ $uploadresult = $file->moveTo($fileFolder.$fileName); if ($uploadresult == null){ $newusername = strtolower($this->username); try{ $this->db->beginTransaction(); $sql = "INSERT INTO user_upload (Date_Upload,Filename,Filepath,Filetype,Filesize,Username,StatusID,Title,Alternate,External_link) VALUES(current_timestamp,:filename,:filepath,:filetype,:filesize,:username,'49',:title,:alternate,:externallink);"; $stmt = $this->db->prepare($sql); $stmt->bindParam(':username', $newusername, PDO::PARAM_STR); $stmt->bindparam(':filename', $fileName, PDO::PARAM_STR); $stmt->bindparam(':filepath', $filePath, PDO::PARAM_STR); $stmt->bindparam(':filetype', $fileType, PDO::PARAM_STR); $stmt->bindparam(':filesize', $fileSize, PDO::PARAM_STR); $stmt->bindparam(':title', $this->title, PDO::PARAM_STR); $stmt->bindparam(':alternate', $this->alternate, PDO::PARAM_STR); $stmt->bindparam(':externallink', $this->externallink, PDO::PARAM_STR); if ($stmt->execute()) { $data = [ 'status' => 'success', 'code' => 'RS101', 'message' => CustomHandlers::getreSlimMessage('RS101'), 'datafile' => [ 'Title' => $this->title, 'Alternate' => $this->alternate, 'External_link' => $this->externallink, 'Filename' => $fileName, 'Filepath' => $this->baseurl.'/'.$filePath, 'Filetype' => $fileType, 'Filesize' => $fileSize] ]; } else { $data = [ 'status' => 'error', 'code' => 'RS909', 'message' => CustomHandlers::getreSlimMessage('RS909') ]; } $this->db->commit(); } catch (PDOException $e) { $data = [ 'status' => 'error', 'code' => $e->getCode(), 'message' => $e->getMessage() ]; $this->db->rollBack(); } } else { $data = [ 'status' => 'error', 'code' => '0', 'message' => $uploadresult ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS910', 'message' => CustomHandlers::getreSlimMessage('RS910') ]; } }else{ $data = [ 'status' => 'error', 'code' => 'RS911', 'message' => CustomHandlers::getreSlimMessage('RS911') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS912', 'message' => CustomHandlers::getreSlimMessage('RS912') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS908', 'message' => CustomHandlers::getreSlimMessage('RS908') ]; } return $data; $this->db = null; } /** * Get all data Status User Upload * @return result process in json encoded data */ public function showOptionStatus() { if (Auth::validToken($this->db,$this->token)){ $sql = "SELECT a.StatusID,a.Status FROM core_status a WHERE a.StatusID = '49' OR a.StatusID = '50' ORDER BY a.Status ASC"; $stmt = $this->db->prepare($sql); $stmt->bindParam(':token', $this->token, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $results = $stmt->fetchAll(PDO::FETCH_ASSOC); $data = [ 'result' => $results, 'status' => 'success', 'code' => 'RS501', 'message' => CustomHandlers::getreSlimMessage('RS501') ]; } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS202', 'message' => CustomHandlers::getreSlimMessage('RS202') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; } return json_encode($data, JSON_PRETTY_PRINT); $this->db= null; } /** * Process Upload and verify user * @return result process in json encoded data */ public function process(){ if (Auth::validToken($this->db,$this->token,$this->username)){ $data = $this->doUpload(); } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; } return json_encode($data, JSON_PRETTY_PRINT); } /** * Get all data user upload * @return result process in json encoded data */ public function showAllAsPagination() { $newusername = strtolower($this->username); if (Auth::validToken($this->db,$this->token)){ //count total row $sqlcountrow = "SELECT count(a.ItemID) as TotalRow from user_upload a where a.StatusID = '49' or a.Username=:username;"; $stmt = $this->db->prepare($sqlcountrow); $stmt->bindParam(':username', $newusername, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $single = $stmt->fetch(); $base = $this->baseurl."/"; // Paginate won't work if page and items per page is negative. // So make sure that page and items per page is always return minimum zero number. $newpage = Validation::integerOnly($this->page); $newitemsperpage = Validation::integerOnly($this->itemsPerPage); $limits = (((($newpage-1)*$newitemsperpage) <= 0)?0:(($newpage-1)*$newitemsperpage)); $offsets = (($newitemsperpage <= 0)?0:$newitemsperpage); // Query Data $sql = "SELECT a.ItemID,a.Date_Upload,a.Title,a.Alternate,a.External_link,a.Filename,a.Filepath,concat(:baseurl,a.Filepath) as Fullpath,a.Filetype,a.Filesize,a.Username as 'Upload_by',a.Updated_at,a.Updated_by,a.StatusID,b.`Status` from user_upload a inner join core_status b on a.StatusID=b.StatusID where a.StatusID = '49' or a.Username=:username order by a.Date_Upload desc LIMIT :limpage , :offpage;"; $stmt2 = $this->db->prepare($sql); $stmt2->bindParam(':baseurl', $base, PDO::PARAM_STR); $stmt2->bindParam(':username', $newusername, PDO::PARAM_STR); $stmt2->bindValue(':limpage', (INT) $limits, PDO::PARAM_INT); $stmt2->bindValue(':offpage', (INT) $offsets, PDO::PARAM_INT); if ($stmt2->execute()){ $pagination = new \classes\Pagination(); $pagination->totalRow = $single['TotalRow']; $pagination->page = $this->page; $pagination->itemsPerPage = $this->itemsPerPage; $pagination->fetchAllAssoc = $stmt2->fetchAll(PDO::FETCH_ASSOC); $data = $pagination->toDataArray(); } else { $data = [ 'status' => 'error', 'code' => 'RS202', 'message' => CustomHandlers::getreSlimMessage('RS202') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS202', 'message' => CustomHandlers::getreSlimMessage('RS202') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; } return json_encode($data, JSON_PRETTY_PRINT); $this->db= null; } /** * Search all data user paginated * @return result process in json encoded data */ public function searchAllAsPagination() { if (Auth::validToken($this->db,$this->token)){ $newusername = strtolower($this->username); $search = "%$this->search%"; //count total row $sqlcountrow = "SELECT count(a.ItemID) as TotalRow from user_upload a where a.StatusID = '49' and a.Filename like :search or a.Username=:username and a.Filename like :search or a.StatusID = '49' and a.Title like :search or a.Username=:username and a.Title like :search;"; $stmt = $this->db->prepare($sqlcountrow); $stmt->bindParam(':username', $newusername, PDO::PARAM_STR); $stmt->bindParam(':search', $search, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $single = $stmt->fetch(); $base = $this->baseurl."/"; // Paginate won't work if page and items per page is negative. // So make sure that page and items per page is always return minimum zero number. $newpage = Validation::integerOnly($this->page); $newitemsperpage = Validation::integerOnly($this->itemsPerPage); $limits = (((($newpage-1)*$newitemsperpage) <= 0)?0:(($newpage-1)*$newitemsperpage)); $offsets = (($newitemsperpage <= 0)?0:$newitemsperpage); // Query Data $sql = "SELECT a.ItemID,a.Date_Upload,a.Title,a.Alternate,a.External_link,a.Filename,a.Filepath,concat(:baseurl,a.Filepath) as Fullpath,a.Filetype,a.Filesize,a.Username as 'Upload_by',a.Updated_at,a.Updated_by,a.StatusID,b.`Status` from user_upload a inner join core_status b on a.StatusID=b.StatusID where a.StatusID = '49' and a.Filename like :search or a.Username=:username and a.Filename like :search or a.StatusID = '49' and a.Title like :search or a.Username=:username and a.Title like :search order by a.Date_Upload desc LIMIT :limpage , :offpage;"; $stmt2 = $this->db->prepare($sql); $stmt2->bindParam(':baseurl', $base, PDO::PARAM_STR); $stmt2->bindParam(':username', $newusername, PDO::PARAM_STR); $stmt2->bindParam(':search', $search, PDO::PARAM_STR); $stmt2->bindValue(':limpage', (INT) $limits, PDO::PARAM_INT); $stmt2->bindValue(':offpage', (INT) $offsets, PDO::PARAM_INT); if ($stmt2->execute()){ $pagination = new \classes\Pagination(); $pagination->totalRow = $single['TotalRow']; $pagination->page = $this->page; $pagination->itemsPerPage = $this->itemsPerPage; $pagination->fetchAllAssoc = $stmt2->fetchAll(PDO::FETCH_ASSOC); $data = $pagination->toDataArray(); } else { $data = [ 'status' => 'error', 'code' => 'RS202', 'message' => CustomHandlers::getreSlimMessage('RS202') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS202', 'message' => CustomHandlers::getreSlimMessage('RS202') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; } return json_encode($data, JSON_PRETTY_PRINT); $this->db= null; } /** * Get data single user upload by Item ID * @return result process in json encoded data */ public function showItem() { $sql = "SELECT a.ItemID,a.Date_Upload,a.Title,a.Alternate,a.External_link,a.Filename,a.Filepath,a.Filetype,a.Filesize,a.Username as 'Upload_by',a.Updated_at,a.Updated_by,b.`Status` from user_upload a inner join core_status b on a.StatusID=b.StatusID where a.StatusID = '49' and a.ItemID=:itemid or a.Username=:username and a.ItemID=:itemid;"; $newusername = strtolower($this->username); $stmt = $this->db->prepare($sql); $stmt->bindParam(':username', $newusername, PDO::PARAM_STR); $stmt->bindParam(':itemid', $this->itemid, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $results = $stmt->fetchAll(PDO::FETCH_ASSOC); $data = [ 'result' => $results, 'status' => 'success', 'code' => 'RS501', 'message' => CustomHandlers::getreSlimMessage('RS501') ]; } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS202', 'message' => CustomHandlers::getreSlimMessage('RS202') ]; } return json_encode($data, JSON_PRETTY_PRINT); $this->db= null; } /** * Update user upload item * @return result process in json encoded data */ function doUpdate(){ try { $this->db->beginTransaction(); if (Auth::getRoleID($this->db,$this->token) == '1'){ $sql = "UPDATE user_upload SET Title=:title,Alternate=:alternate,External_link=:external,StatusID=:status,Updated_by=:username WHERE ItemID=:itemid;"; } else if (Auth::getRoleID($this->db,$this->token) == '2'){ $sql = "UPDATE user_upload SET Title=:title,Alternate=:alternate,External_link=:external,StatusID=:status,Updated_by=:username WHERE ItemID=:itemid;"; } else { $sql = "UPDATE user_upload SET Title=:title,Alternate=:alternate,External_link=:external,StatusID=:status,Updated_by=:username WHERE ItemID=:itemid and Username=:username;"; } $newusername = strtolower($this->username); $stmt = $this->db->prepare($sql); $stmt->bindParam(':title', $this->title, PDO::PARAM_STR); $stmt->bindParam(':alternate', $this->alternate, PDO::PARAM_STR); $stmt->bindParam(':external', $this->externallink, PDO::PARAM_STR); $stmt->bindParam(':itemid', $this->itemid, PDO::PARAM_STR); $stmt->bindParam(':status', $this->status, PDO::PARAM_STR); $stmt->bindParam(':username', $newusername, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $data = [ 'status' => 'success', 'code' => 'RS103', 'message' => CustomHandlers::getreSlimMessage('RS103') ]; } else { $data = [ 'status' => 'error', 'code' => 'RS203', 'message' => CustomHandlers::getreSlimMessage('RS203') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS904', 'message' => CustomHandlers::getreSlimMessage('RS904') ]; } $this->db->commit(); } catch (PDOException $e) { $data = [ 'status' => 'error', 'code' => $e->getCode(), 'message' => $e->getMessage() ]; $this->db->rollBack(); } return $data; $this->db = null; } /** * Delete user upload item * @return result process in json encoded data */ function doDelete(){ $sqldata = "SELECT a.Filepath FROM user_upload a WHERE a.ItemID = :itemid;"; $stmt = $this->db->prepare($sqldata); $stmt->bindParam(':itemid', $this->itemid, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $single = $stmt->fetch(); $filepath = $single['Filepath']; try { $this->db->beginTransaction(); if (Auth::getRoleID($this->db,$this->token) == '1'){ $sql = "DELETE from user_upload WHERE ItemID=:itemid;"; $newusername = strtolower($this->username); $stmt2 = $this->db->prepare($sql); $stmt2->bindParam(':itemid', $this->itemid, PDO::PARAM_STR); } else { $sql = "DELETE from user_upload WHERE ItemID=:itemid and Username=:username;"; $newusername = strtolower($this->username); $stmt2 = $this->db->prepare($sql); $stmt2->bindParam(':itemid', $this->itemid, PDO::PARAM_STR); $stmt2->bindParam(':username', $newusername, PDO::PARAM_STR); } if ($stmt2->execute()) { if ($stmt2->rowCount() > 0){ if (file_exists($filepath)){ if(unlink($filepath)){ $data = [ 'status' => 'success', 'code' => 'RS104', 'message' => CustomHandlers::getreSlimMessage('RS104') ]; } else { $data = [ 'status' => 'error', 'code' => 'RS913', 'message' => CustomHandlers::getreSlimMessage('RS913') ]; } } else { $data = [ 'status' => 'success', 'code' => 'RS104', 'message' => CustomHandlers::getreSlimMessage('RS104') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS204', 'message' => CustomHandlers::getreSlimMessage('RS204') ]; } } else { $data = [ 'status' => 'error', 'code' => 'RS904', 'message' => CustomHandlers::getreSlimMessage('RS904') ]; } $this->db->commit(); } catch (PDOException $e) { $data = [ 'status' => 'error', 'code' => $e->getCode(), 'message' => $e->getMessage() ]; $this->db->rollBack(); } } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; } } return $data; $this->db = null; } /** * Update user upload item * @return result process in json encoded data */ public function update(){ if (Auth::validToken($this->db,$this->token,$this->username)){ $data = $this->doUpdate(); } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; } return json_encode($data, JSON_PRETTY_PRINT); } /** * Delete user upload item * @return result process in json encoded data */ public function delete(){ if (Auth::validToken($this->db,$this->token,$this->username)){ $data = $this->doDelete(); } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; } return json_encode($data, JSON_PRETTY_PRINT); } /** * Determine is filename exist on our server * @return string */ private function isFilenameInExplorer(){ $r = false; $sql = "SELECT a.Filepath FROM user_upload a WHERE a.Filename=:filename;"; $stmt = $this->db->prepare($sql); $stmt->bindParam(':filename', $this->filename, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $single = $stmt->fetch(); $r = $single['Filepath']; } } return $r; $this->db = null; } /** * Determine is filename are paid * @return string */ private function isFilenamePaid(){ $newbookid = filter_var(Validation::integerOnly($this->bookid),FILTER_SANITIZE_STRING); $newuniqueid = filter_var($this->uniqueid,FILTER_SANITIZE_STRING); $newfilename = filter_var(strtolower($this->filename),FILTER_SANITIZE_STRING); $newusername = strtolower($this->username); $r = false; if (strpos($newfilename, 'sample') !== false) { $r=true; } else { $sql = "SELECT a.StatusID FROM book_library a WHERE a.Guid=:uniqueid and a.Username=:username and a.BookID=:bookid and a.StatusID='34';"; $stmt = $this->db->prepare($sql); $stmt->bindParam(':uniqueid', $newuniqueid, PDO::PARAM_STR); $stmt->bindParam(':bookid', $newbookid, PDO::PARAM_STR); $stmt->bindParam(':username', $newusername, PDO::PARAM_STR); if ($stmt->execute()) { if ($stmt->rowCount() > 0){ $r = true; } } } return $r; $this->db = null; } /** * Force stream inline or attachment to protect from hotlinking * @var $stream = deliver content. Default true means inline * @var $age = this is the max age for cache control in header * @return result stream data or process in json encoded data */ public function forceStream($stream=true,$age=86400){ if (Auth::validToken($this->db,$this->token)){ $datapath = $this->isFilenameInExplorer(); if ( $datapath != false){ if ($stream == false){ $disposition = 'attachment'; } else { $disposition = 'inline'; } $path = realpath(__DIR__ . DIRECTORY_SEPARATOR . '..').'/api/'.$datapath; $fp = fopen($path, "r") ; header('HTTP/1.0 200 OK'); header('Access-Control-Allow-Origin: *'); header('Access-Control-Allow-Methods: POST, GET, OPTIONS'); header('Access-Control-Allow-Headers: X-Requested-With, Content-Type, Accept, Origin, Authorization, Range'); header('Cache-Control: public, must-revalidate, max-age='.$age.''); header('Accept-Ranges: bytes'); header('Content-Description: File Transfer'); header('Content-Transfer-Encoding: binary'); header('Content-Disposition: '.$disposition.'; filename="'.$this->filename.'"'); header('Content-length: '.filesize($path)); header('Content-type: '.pathinfo($path, PATHINFO_EXTENSION)); ob_clean(); flush(); while (!feof($fp)) { $buff = fread($fp, 1024); print $buff; } exit; } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; return json_encode($data, JSON_PRETTY_PRINT); } } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; return json_encode($data, JSON_PRETTY_PRINT); } $this->db= null; exit; } /** * Force strict stream inline or attachment to protect from hotlinking * @var $stream = deliver content. Default true means inline * @var $age = this is the max age for cache control in header * @return result stream data or process in json encoded data */ public function forceStrictStream($stream=true,$age=86400){ if (Auth::validToken($this->db,$this->token,$this->username)){ if ($this->isFilenamePaid()) { $datapath = $this->isFilenameInExplorer(); if ( $datapath != false){ if ($stream == false){ $disposition = 'attachment'; } else { $disposition = 'inline'; } $path = realpath(__DIR__ . DIRECTORY_SEPARATOR . '..').'/api/'.$datapath; $fp = fopen($path, "r") ; header('HTTP/1.0 200 OK'); header('Access-Control-Allow-Origin: *'); header('Access-Control-Allow-Methods: POST, GET, OPTIONS'); header('Access-Control-Allow-Headers: X-Requested-With, Content-Type, Accept, Origin, Authorization, Range'); header('Cache-Control: public, must-revalidate, max-age='.$age.''); header('Accept-Ranges: bytes'); header('Content-Description: File Transfer'); header('Content-Transfer-Encoding: binary'); header('Content-Disposition: '.$disposition.'; filename="'.$this->filename.'"'); header('Content-length: '.filesize($path)); header('Content-type: '.pathinfo($path, PATHINFO_EXTENSION)); ob_clean(); flush(); while (!feof($fp)) { $buff = fread($fp, 1024); print $buff; } exit; } else { $data = [ 'status' => 'error', 'code' => 'RS601', 'message' => CustomHandlers::getreSlimMessage('RS601') ]; return json_encode($data, JSON_PRETTY_PRINT); } } else { $data = [ 'status' => 'error', 'code' => 'RS801', 'message' => CustomHandlers::getreSlimMessage('RS801') ]; return json_encode($data, JSON_PRETTY_PRINT); } } else { $data = [ 'status' => 'error', 'code' => 'RS401', 'message' => CustomHandlers::getreSlimMessage('RS401') ]; return json_encode($data, JSON_PRETTY_PRINT); } $this->db= null; exit; } }
{ "content_hash": "2af101bc405ad3a7e5dbd6d0b79118f9", "timestamp": "", "source": "github", "line_count": 852, "max_line_length": 268, "avg_line_length": 34.181924882629104, "alnum_prop": 0.5378223397314837, "repo_name": "aalfiann/reSlim-bookstore", "id": "2fc3c272d73b05d2bbc0bf2a84946e838e76cc72", "size": "29123", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/classes/Upload.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "3917" }, { "name": "CSS", "bytes": "339869" }, { "name": "JavaScript", "bytes": "1987310" }, { "name": "PHP", "bytes": "1218485" } ], "symlink_target": "" }
package com.github.polok.routedrawer.model; public class GeoPoint { public double lat; public double lng; }
{ "content_hash": "f85696eb876035bfada5f6386aa89276", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 43, "avg_line_length": 14.875, "alnum_prop": 0.7226890756302521, "repo_name": "polok/RouteDrawer", "id": "8fc61186888048eb61e8782e534dd7e88ecb9a4e", "size": "680", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "library/src/main/java/com/github/polok/routedrawer/model/GeoPoint.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "19634" } ], "symlink_target": "" }
using UnityEngine; using System.Collections; namespace Zeltex.Characters { /// <summary> /// Class that stores a game event. /// </summary> [System.Serializable] public class LogEvent { public float TimeHappened = 0f; public string EventType = ""; public LogEvent(string NewEvent, float TimeHappened_) { EventType = NewEvent; TimeHappened = TimeHappened_; } public string GetLabelText() { float MyTime = ((int)(TimeHappened * 100f)) / 100f; TimeHappened = TimeHappened % 60f; string MyTimeLabel = MyTime.ToString(); string MyTabs = "\t"; if (MyTimeLabel.Length == 1) MyTabs += "\t"; if (MyTimeLabel.Length >= 4) MyTabs = ""; return "[" + MyTimeLabel + "] : " + MyTabs + EventType; } } }
{ "content_hash": "b3b6961cd10f108c35aa9ccb820f2d41", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 67, "avg_line_length": 27.029411764705884, "alnum_prop": 0.5277475516866159, "repo_name": "Deus0/Zeltexium", "id": "475ee85dc2e2b0d48dcff3beb206479fd355bc58", "size": "921", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Assets/Scripts/Characters/LogEvent.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "4987172" }, { "name": "CSS", "bytes": "23201" }, { "name": "GLSL", "bytes": "9054" }, { "name": "HLSL", "bytes": "242510" }, { "name": "HTML", "bytes": "3998" }, { "name": "JavaScript", "bytes": "24275" }, { "name": "PHP", "bytes": "1236" }, { "name": "ShaderLab", "bytes": "323057" }, { "name": "Smalltalk", "bytes": "11543" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <title>CommitsController</title> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> <link rel="stylesheet" href="../css/reset.css" type="text/css" media="screen" /> <link rel="stylesheet" href="../css/main.css" type="text/css" media="screen" /> <link rel="stylesheet" href="../css/github.css" type="text/css" media="screen" /> <script src="../js/jquery-1.3.2.min.js" type="text/javascript" charset="utf-8"></script> <script src="../js/jquery-effect.js" type="text/javascript" charset="utf-8"></script> <script src="../js/main.js" type="text/javascript" charset="utf-8"></script> <script src="../js/highlight.pack.js" type="text/javascript" charset="utf-8"></script> </head> <body> <div class="banner"> <h1> <span class="type">Class</span> CommitsController <span class="parent">&lt; <a href="ProjectResourceController.html">ProjectResourceController</a> </span> </h1> <ul class="files"> <li><a href="../files/app/controllers/commits_controller_rb.html">app/controllers/commits_controller.rb</a></li> </ul> </div> <div id="bodyContent"> <div id="content"> <!-- Method ref --> <div class="sectiontitle">Methods</div> <dl class="methods"> <dt>S</dt> <dd> <ul> <li> <a href="#method-i-show">show</a> </li> </ul> </dd> </dl> <!-- Includes --> <div class="sectiontitle">Included Modules</div> <ul> <li> <a href="ExtractsPath.html"> ExtractsPath </a> </li> </ul> <!-- Methods --> <div class="sectiontitle">Instance Public methods</div> <div class="method"> <div class="title method-title" id="method-i-show"> <b>show</b>() <a href="../classes/CommitsController.html#method-i-show" name="method-i-show" class="permalink">Link</a> </div> <div class="description"> </div> <div class="sourcecode"> <p class="source-link"> Source: <a href="javascript:toggleSource('method-i-show_source')" id="l_method-i-show_source">show</a> </p> <div id="method-i-show_source" class="dyn-source"> <pre><span class="ruby-comment"># File app/controllers/commits_controller.rb, line 11</span> <span class="ruby-keyword">def</span> <span class="ruby-keyword ruby-title">show</span> <span class="ruby-ivar">@repo</span> = <span class="ruby-ivar">@project</span>.<span class="ruby-identifier">repo</span> <span class="ruby-ivar">@limit</span>, <span class="ruby-ivar">@offset</span> = (<span class="ruby-identifier">params</span>[<span class="ruby-value">:limit</span>] <span class="ruby-operator">||</span> <span class="ruby-number">40</span>), (<span class="ruby-identifier">params</span>[<span class="ruby-value">:offset</span>] <span class="ruby-operator">||</span> <span class="ruby-number">0</span>) <span class="ruby-ivar">@commits</span> = <span class="ruby-ivar">@project</span>.<span class="ruby-identifier">commits</span>(<span class="ruby-ivar">@ref</span>, <span class="ruby-ivar">@path</span>, <span class="ruby-ivar">@limit</span>, <span class="ruby-ivar">@offset</span>) <span class="ruby-ivar">@commits</span> = <span class="ruby-constant">CommitDecorator</span>.<span class="ruby-identifier">decorate</span>(<span class="ruby-ivar">@commits</span>) <span class="ruby-identifier">respond_to</span> <span class="ruby-keyword">do</span> <span class="ruby-operator">|</span><span class="ruby-identifier">format</span><span class="ruby-operator">|</span> <span class="ruby-identifier">format</span>.<span class="ruby-identifier">html</span> <span class="ruby-comment"># index.html.erb</span> <span class="ruby-identifier">format</span>.<span class="ruby-identifier">js</span> <span class="ruby-identifier">format</span>.<span class="ruby-identifier">atom</span> { <span class="ruby-identifier">render</span> <span class="ruby-identifier">layout</span><span class="ruby-operator">:</span> <span class="ruby-keyword">false</span> } <span class="ruby-keyword">end</span> <span class="ruby-keyword">end</span></pre> </div> </div> </div> </div> </div> </body> </html>
{ "content_hash": "09892181dc22cd1f2041016e8c6ce712", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 402, "avg_line_length": 33.39354838709677, "alnum_prop": 0.5471406491499228, "repo_name": "riyad/gitlabhq", "id": "53d329673f80b29898aa15ab74629c2d8335e36a", "size": "5176", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/code/classes/CommitsController.html", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "11536" }, { "name": "JavaScript", "bytes": "39338" }, { "name": "Ruby", "bytes": "680528" }, { "name": "Shell", "bytes": "1657" } ], "symlink_target": "" }
function cg = set(cg, propName, val) %SET Set CHEBGUI properties. % % 'type' - 'bvp','pde','eig' % 'domain' - spatial domain of BVP/PDE % 'timedomain' - time domain of PDE % 'de' - the differential operator or RHS F in u_t = F(x,t,u) % 'lbc' - left boundary conditions % 'rbc' - right boundary conditions % 'bc' - general boundary conditions % 'tol' - tolerance % 'init' - initial condition/guess for nonlinear BVPs/PDEs % 'sigma' - desired eigenvalues: 'LM','SM','LA','SA','LR','SR','LI','SI' % 'options' - a structure containing the below % 'ivpsolver' - solver used for solving IVPs % 'numeigs' - number of desired eigenvalues % 'damping' - damping in newton iteration [true/false] % 'plotting' - plotting in nonlinear solves/PDEs [true/false] % 'grid' - display a grid on these plots [true/false] % 'pdesolver' - solver used for solving PDEs % 'pdeholdplot' - keep plots for each PDE timestep (i.e. hold on) % 'fixn' - fixed spatial discretisation for PDEs (experimental) % 'fixyaxislower' - fix y axis on plots (lower) % 'fixyaxisupper' - fix y axis on plots (upper) % 'discretization' - whether we want ultraS or colloc discretization for % ODEs % Copyright 2016 by The University of Oxford and The Chebfun Developers. % See http://www.chebfun.org/ for Chebfun information. % Avoid storing {''} in fields, rather store '' if ( iscell(val) && isempty(val{1}) ) val = ''; end % Store strings, not numbers. if ( isnumeric(val) ) val = num2str(val); end switch ( lower(propName) ) case 'type' if ( ~any(strcmpi(val, {'bvp', 'ivp', 'pde', 'eig'})) ) error('CHEBFUN:CHEBGUI:set:type',... [val,' is not a valid type of problem.']) else cg.type = val; end case 'domain' cg.domain = val; case 'timedomain' cg.timedomain = val; case 'de' cg.DE = val; case 'lbc' cg.LBC = val; case 'rbc' cg.RBC = val; case 'bc' cg.BC = val; case 'tol' cg.tol = val; case 'init' cg.init = val; case 'sigma' cg.sigma = val; case 'options' cg.options = val; case 'damping' cg.options.damping = val; case 'plotting' if ( isnumeric(val) ) val = num2str(val); end cg.options.plotting = val; case 'grid' % This really should be stored as a double, not a string... cg.options.grid = str2double(val); case 'pdeholdplot' cg.options.pdeholdplot = val; case 'fixn' cg.options.fixN = val; case 'fixyaxislower' cg.options.fixYaxisLower = val; case 'fixyaxisupper' cg.options.fixYaxisUpper = val; case 'numeigs' cg.options.numeigs = val; case 'discretization' cg.options.discretization = val; case 'ivpsolver' cg.options.ivpSolver = val; case 'pdesolver' cg.options.pdeSolver = val; otherwise error('CHEBFUN:CHEBGUI:set:propName',... [propName,' is not a valid chebgui property.']) end end
{ "content_hash": "82edbe5d91cec0394aa0bd6302038fc3", "timestamp": "", "source": "github", "line_count": 101, "max_line_length": 79, "avg_line_length": 31.336633663366335, "alnum_prop": 0.5867298578199052, "repo_name": "alshedivat/chebfun", "id": "0d165a0c7096d422a38d64cfc3e425acb049c7a8", "size": "3165", "binary": false, "copies": "1", "ref": "refs/heads/development", "path": "@chebgui/set.m", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "M", "bytes": "4938" }, { "name": "Matlab", "bytes": "6012627" }, { "name": "Objective-C", "bytes": "977" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "6361b764c4209ca4b3c4b8f63ed9b120", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "70fac639ef0d638ff3e909fcb5a3afe4765dd0d7", "size": "196", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Psychotria/Psychotria clivorum/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.github.czyzby.reinvent.inject.mockup; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import com.github.czyzby.reinvent.inject.annotation.Initiate; @Singleton public class RequestedMockedInstance { @Inject private MockDependency mockDependency; @Inject @Named(MockNamedDependency.NAME) private MockNamedDependency mockNamedDependency; private final MockDependency cachedConstructorParam; private boolean initiated; public RequestedMockedInstance(final MockDependency mockDependency) { assert mockDependency != null; cachedConstructorParam = mockDependency; } public MockDependency getMockDependency() { assert mockDependency == cachedConstructorParam; return mockDependency; } public MockNamedDependency getMockNamedDependency() { return mockNamedDependency; } @Initiate private void initiate() { initiated = true; } public boolean isInitiated() { return initiated; } }
{ "content_hash": "c151249a9124befa24f39142ef7d9393", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 93, "avg_line_length": 25.536585365853657, "alnum_prop": 0.7373447946513849, "repo_name": "czyzby/reinvent", "id": "727879c66c605e5effdd221a7e6f3e2b8f9587a5", "size": "1047", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/src/com/github/czyzby/reinvent/inject/mockup/RequestedMockedInstance.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "635624" } ], "symlink_target": "" }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.07.07 at 05:42:14 PM CEST // package generated_e57; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{}jpeg_image_size"/> * &lt;element ref="{}png_image_size"/> * &lt;element ref="{}image_mask_size"/> * &lt;element ref="{}image_width"/> * &lt;element ref="{}image_height"/> * &lt;element ref="{}pixel_width"/> * &lt;element ref="{}pixel_height"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "jpegImageSize", "pngImageSize", "imageMaskSize", "imageWidth", "imageHeight", "pixelWidth", "pixelHeight" }) @XmlRootElement(name = "spherical_representation") public class SphericalRepresentation { @XmlElement(name = "jpeg_image_size") protected int jpegImageSize; @XmlElement(name = "png_image_size") protected int pngImageSize; @XmlElement(name = "image_mask_size") protected byte imageMaskSize; @XmlElement(name = "image_width") protected short imageWidth; @XmlElement(name = "image_height") protected short imageHeight; @XmlElement(name = "pixel_width", required = true) protected BigDecimal pixelWidth; @XmlElement(name = "pixel_height", required = true) protected BigDecimal pixelHeight; /** * Gets the value of the jpegImageSize property. * */ public int getJpegImageSize() { return jpegImageSize; } /** * Sets the value of the jpegImageSize property. * */ public void setJpegImageSize(int value) { this.jpegImageSize = value; } /** * Gets the value of the pngImageSize property. * */ public int getPngImageSize() { return pngImageSize; } /** * Sets the value of the pngImageSize property. * */ public void setPngImageSize(int value) { this.pngImageSize = value; } /** * Gets the value of the imageMaskSize property. * */ public byte getImageMaskSize() { return imageMaskSize; } /** * Sets the value of the imageMaskSize property. * */ public void setImageMaskSize(byte value) { this.imageMaskSize = value; } /** * Gets the value of the imageWidth property. * */ public short getImageWidth() { return imageWidth; } /** * Sets the value of the imageWidth property. * */ public void setImageWidth(short value) { this.imageWidth = value; } /** * Gets the value of the imageHeight property. * */ public short getImageHeight() { return imageHeight; } /** * Sets the value of the imageHeight property. * */ public void setImageHeight(short value) { this.imageHeight = value; } /** * Gets the value of the pixelWidth property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getPixelWidth() { return pixelWidth; } /** * Sets the value of the pixelWidth property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setPixelWidth(BigDecimal value) { this.pixelWidth = value; } /** * Gets the value of the pixelHeight property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getPixelHeight() { return pixelHeight; } /** * Sets the value of the pixelHeight property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setPixelHeight(BigDecimal value) { this.pixelHeight = value; } }
{ "content_hash": "89c8596de118c214a6dc259aa1eafaa8", "timestamp": "", "source": "github", "line_count": 200, "max_line_length": 111, "avg_line_length": 23.88, "alnum_prop": 0.5971524288107203, "repo_name": "DURAARK/sip-generator-tib", "id": "774f0610bc5b9d310cae36fd9ef1f8c536b1e25a", "size": "4776", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/generated_e57/SphericalRepresentation.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "626430" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <set xmlns:android="http://schemas.android.com/apk/res/android"> <translate android:duration="400" android:fromYDelta="0.0" android:toYDelta="-100.0%p" /> <alpha android:duration="300" android:fromAlpha="1.0" android:toAlpha="0.0" /> </set>
{ "content_hash": "43aef98ed04ed5100a19afcb33ce518d", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 93, "avg_line_length": 48.166666666666664, "alnum_prop": 0.6885813148788927, "repo_name": "quding0308/sxrk", "id": "a678a5f4e00c4c19f935b21242c1857291cf16a8", "size": "289", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Sxrk/res/anim/slide_out_up_page.xml", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1724167" } ], "symlink_target": "" }
<!DOCTYPE frameset SYSTEM "frameset.dtd"> <frameset> <predicate lemma="farming"> <roleset framnet="agriculture" id="farming.01" name="to cultivate or produce a crop on" source="verb-farm.01" vncls=""> <roles> <role descr="farmer" f="" n="0"/> <role descr="land/crop" f="ppt" n="1"/> <note/> </roles> <example name="Plant farmed" src="" type=""> <text>kelp farming on the other side of the globe</text> <arg f="" n="1">kelp</arg> <rel f="">farming</rel> <arg f="loc" n="m">on the other side of the globe</arg> <note/> </example> <note>Based on sentences in nouns-00040. Comparison to farm.01. No VN class. FN class agriculture. Framed by Katie.</note> </roleset> <note/> </predicate> <note/> </frameset>
{ "content_hash": "8993e4c728d09f4d4cff6763bae7815d", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 134, "avg_line_length": 37.18518518518518, "alnum_prop": 0.47410358565737054, "repo_name": "TeamSPoon/logicmoo_workspace", "id": "9f407ae66c13c2963649f55cfaa086e8e5149cd3", "size": "1004", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packs_sys/logicmoo_nlu/ext/pldata/propbank-frames-2.1.5/frames/farming-n.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "342" }, { "name": "C", "bytes": "1" }, { "name": "C++", "bytes": "1" }, { "name": "CSS", "bytes": "126627" }, { "name": "HTML", "bytes": "839172" }, { "name": "Java", "bytes": "11116" }, { "name": "JavaScript", "bytes": "238700" }, { "name": "PHP", "bytes": "42253" }, { "name": "Perl 6", "bytes": "23" }, { "name": "Prolog", "bytes": "440882" }, { "name": "PureBasic", "bytes": "1334" }, { "name": "Rich Text Format", "bytes": "3436542" }, { "name": "Roff", "bytes": "42" }, { "name": "Shell", "bytes": "61603" }, { "name": "TeX", "bytes": "99504" } ], "symlink_target": "" }
package org.colorcoding.ibas.importexport.transformer.template; import java.io.File; import java.io.IOException; /** * 文件写入者 * * @author Niuren.Zhu * */ public abstract class FileWriter { private Template template; public final Template getTemplate() { return template; } public final void setTemplate(Template template) { this.template = template; } public abstract void write(File file) throws WriteFileException, IOException; }
{ "content_hash": "f8c6a75c64feb4e80560801f607972eb", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 78, "avg_line_length": 18.12, "alnum_prop": 0.7483443708609272, "repo_name": "color-coding/ibas.importexport", "id": "85501ac5696583948e690930ac92ad29452d0645", "size": "463", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ibas.importexport.excel/src/main/java/org/colorcoding/ibas/importexport/transformer/template/FileWriter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3882" }, { "name": "HTML", "bytes": "1492" }, { "name": "Java", "bytes": "372965" }, { "name": "Shell", "bytes": "4860" }, { "name": "TypeScript", "bytes": "384125" } ], "symlink_target": "" }
package org.apache.syncope.client.cli.commands.realm; import java.util.List; import java.util.Map; import org.apache.syncope.client.cli.commands.CommonsResultManager; import org.apache.syncope.common.lib.to.RealmTO; public class RealmResultManager extends CommonsResultManager { public void printRealms(final List<RealmTO> realmTOs) { System.out.println(""); for (final RealmTO realmTO : realmTOs) { printRealm(realmTO); } } private void printRealm(final RealmTO realmTO) { System.out.println(" > REALM KEY: " + realmTO.getKey()); System.out.println(" name: " + realmTO.getName()); System.out.println(" full path: " + realmTO.getFullPath()); System.out.println(" actions: " + realmTO.getActionsClassNames()); System.out.println(" templates: " + realmTO.getTemplates()); System.out.println(" parent key: " + realmTO.getParent()); System.out.println(" account policy key: " + realmTO.getAccountPolicy()); System.out.println(" password policy key: " + realmTO.getPasswordPolicy()); System.out.println(""); } public void printDetails(final Map<String, String> details) { printDetails("realms details", details); } }
{ "content_hash": "ff62ea6a0085efc408f4f31b99fcf2a0", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 86, "avg_line_length": 38.75757575757576, "alnum_prop": 0.656763096168882, "repo_name": "nscendoni/syncope", "id": "e9814f834b122c4b71404f23d9cc2b4ad8111bf9", "size": "2086", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "client/cli/src/main/java/org/apache/syncope/client/cli/commands/realm/RealmResultManager.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2079" }, { "name": "CSS", "bytes": "254638" }, { "name": "Groovy", "bytes": "34664" }, { "name": "HTML", "bytes": "393467" }, { "name": "Java", "bytes": "7881406" }, { "name": "JavaScript", "bytes": "141855" }, { "name": "PLpgSQL", "bytes": "20311" }, { "name": "Shell", "bytes": "12870" }, { "name": "XSLT", "bytes": "25736" } ], "symlink_target": "" }
from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('demo_app', '0002_auto_20211209_2305'), ] operations = [ migrations.AlterModelOptions( name='modelwithchangeddefaultpermissions', options={'default_permissions': ('view',)}, ), ]
{ "content_hash": "e6ac0c06c3aa968364454a718e7e9a67", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 55, "avg_line_length": 22.466666666666665, "alnum_prop": 0.6053412462908012, "repo_name": "RamezIssac/django-tabular-permissions", "id": "a906c1ab2c6e9fca9ac45343239df325c6fa7223", "size": "384", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "demo_proj/demo_app/migrations/0003_auto_20211209_2306.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "HTML", "bytes": "7041" }, { "name": "JavaScript", "bytes": "2404" }, { "name": "Python", "bytes": "36253" } ], "symlink_target": "" }
{!!app('\App\Http\Controllers\disciplinarios\DSC_ProcesosController')->show($proceso->iddsc_procesos)!!} <hr> <div class='row'> <div class='col-sm-12'> <div class="panel panel-default"><!-- Panel Validacion Pruebas --> <div class="panel-heading">AGREGAR NUEVAS PRUEBAS AL PROCESO</div> <div class="panel-body"> <div class='col-sm-8' id='contenedor_pruebas'></div> <div class='col-sm-4' id='contenedor_boton_agregar_pruebas' style='padding:20px;'> <p class="btn btn-primary" id="btn_agregarprueba"> + Agregar Prueba</p> </div> </div> </div> {!!Form::hidden('numeropruebas',0,['id'=>'numeropruebas'])!!} {!!Form::hidden('dsc_procesos_iddsc_procesos',$proceso->iddsc_procesos)!!} </div> </div>
{ "content_hash": "091f6fc681ff5f870360ba3e41843b6a", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 104, "avg_line_length": 30.125, "alnum_prop": 0.6639004149377593, "repo_name": "adsofmelk/art", "id": "a40eee01f7d22f586eb7741ec5639a5aa4d62274", "size": "723", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "exportado/resources/views/disciplinarios/_form_ampliacion.blade.php", "mode": "33261", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "553" }, { "name": "HTML", "bytes": "363079" }, { "name": "PHP", "bytes": "245748" }, { "name": "Shell", "bytes": "159" }, { "name": "Vue", "bytes": "563" } ], "symlink_target": "" }
<?php defined('_JEXEC') or die; /** * Client model. * * @package Joomla.Administrator * @subpackage com_banners * @since 1.6 */ class BannersModelClient extends JModelAdmin { /** * Method to test whether a record can be deleted. * * @param object A record object. * @return boolean True if allowed to delete the record. Defaults to the permission set in the component. * @since 1.6 */ protected function canDelete($record) { if (!empty($record->id)) { if ($record->state != -2) { return; } $user = JFactory::getUser(); if (!empty($record->catid)) { return $user->authorise('core.delete', 'com_banners.category.'.(int) $record->catid); } else { return $user->authorise('core.delete', 'com_banners'); } } } /** * Method to test whether a record can be deleted. * * @param object A record object. * @return boolean True if allowed to change the state of the record. Defaults to the permission set in the component. * @since 1.6 */ protected function canEditState($record) { $user = JFactory::getUser(); if (!empty($record->catid)) { return $user->authorise('core.edit.state', 'com_banners.category.'.(int) $record->catid); } else { return $user->authorise('core.edit.state', 'com_banners'); } } /** * Returns a reference to the a Table object, always creating it. * * @param type The table type to instantiate * @param string A prefix for the table class name. Optional. * @param array Configuration array for model. Optional. * @return JTable A database object * @since 1.6 */ public function getTable($type = 'Client', $prefix = 'BannersTable', $config = array()) { return JTable::getInstance($type, $prefix, $config); } /** * Method to get the record form. * * @param array $data Data for the form. * @param boolean $loadData True if the form is to load its own data (default case), false if not. * @return mixed A JForm object on success, false on failure * @since 1.6 */ public function getForm($data = array(), $loadData = true) { // Get the form. $form = $this->loadForm('com_banners.client', 'client', array('control' => 'jform', 'load_data' => $loadData)); if (empty($form)) { return false; } return $form; } /** * Method to get the data that should be injected in the form. * * @return mixed The data for the form. * @since 1.6 */ protected function loadFormData() { // Check the session for previously entered form data. $data = JFactory::getApplication()->getUserState('com_banners.edit.client.data', array()); if (empty($data)) { $data = $this->getItem(); } $this->preprocessData('com_banners.client', $data); return $data; } /** * Prepare and sanitise the table data prior to saving. * * @param JTable A JTable object. * @since 1.6 */ protected function prepareTable($table) { $table->name = htmlspecialchars_decode($table->name, ENT_QUOTES); } }
{ "content_hash": "fe6f9fa2a9efb9b387c31235405af4b6", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 121, "avg_line_length": 23.6953125, "alnum_prop": 0.6333663039894494, "repo_name": "OscarMesa/ascolsa", "id": "af4092609e4edf9fa76880df28047553f8c5d281", "size": "3276", "binary": false, "copies": "82", "ref": "refs/heads/master", "path": "administrator/components/com_banners/models/client.php", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2477904" }, { "name": "JavaScript", "bytes": "2038053" }, { "name": "PHP", "bytes": "14142843" }, { "name": "Perl", "bytes": "56400" }, { "name": "XSLT", "bytes": "21232" } ], "symlink_target": "" }
<?php namespace Bundle\ChessBundle\Controller; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Symfony\Component\HttpFoundation\Response; use Bundle\ChessBundle\Entity\Move; use Bundle\ChessBundle\Entity\Game; use Bundle\ChessBundle\Entity\Player; use Bundle\ChessBundle\Entity\Friend; class TurnController extends Controller { public function checkturnAction() { $gameidIn = $_POST['sendid']; $whosturn = $_POST['myturn']; $em = $this -> getDoctrine()-> getEntityManager(); $game = $em -> getRepository('BundleChessBundle:Game') -> getGame($gameidIn); $gameboard = $game -> getGameboard(); $turn = $game -> getTurn(); $gameid = $game -> getGameid(); // om det inte är din tur if ($turn == $whosturn) { $game = array("turn" => "0"); } else { if ($turn == "b") { if(!$whitedraws = $game -> getWhitedraws()) { $lastdraw = "0"; } else { $lastdraw = end($whitedraws); } } if ($turn == "w") { if(!$blackdraws = $game -> getBlackdraws()) { $lastdraw = "0"; } else { $lastdraw = end($blackdraws); } } $game = array( "turn" => $turn , "gameid" => $gameid , "lastdraw" => $lastdraw ); } $game = json_encode($game); //här är json som skickas som svar till ajax-requestet $response = new Response(); $response->setContent($game); $response->setStatusCode(200); $response->headers->set('Content-Type', 'text/javascript'); // prints the javascript headers followed by the content return $response; } } ?>
{ "content_hash": "df58706dfc7ad815955b04fbe51e90cf", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 62, "avg_line_length": 23.463768115942027, "alnum_prop": 0.5966646077825818, "repo_name": "palletorsson/symfony-of-chess", "id": "f07ac539447e2a34410a94b517a1b92d95021b16", "size": "1622", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Bundle/ChessBundle/Controller/TurnController.php", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "14914" }, { "name": "PHP", "bytes": "243403" } ], "symlink_target": "" }
"use strict"; var Promise = require('bluebird'), TestFixture = require('./support'), r = TestFixture.r, expect = require('chai').expect; var conditionalDescribe = describe, conditionalIt = it; if (process.version.match(/v0.10./)) { console.log('Buffers are not properly supported with v0.10.x at the moment, skipping...'); conditionalDescribe = describe.skip; conditionalIt = it.skip; } var test = new TestFixture(); describe('Datum', function() { before(function() { return test.setup(); }); after(function() { return test.teardown(); }); it('all raw datum should be defined', function() { return Promise.all([ r.expr(1), r.expr(null), r.expr(false), r.expr(true), r.expr('Hello'), r.expr([0, 1, 2]), r.expr({a: 0, b: 1}) ]) .spread(function(r1, r2, r3, r4, r5, r6, r7) { expect(r1).to.eql(1); expect(r2).to.eql(null); expect(r3).to.eql(false); expect(r4).to.eql(true); expect(r5).to.eql('Hello'); expect(r6).to.eql([0, 1, 2]); expect(r7).to.eql({ a: 0, b: 1 }); }); }); describe('#expr', function() { it('is not defined after a term', function() { var invalid = function() { return r.expr(1).expr('foo'); }; expect(invalid).to.throw('`expr` is not defined after:\nr.expr(1)'); }); it('should take a nestingLevel value and throw if the nesting level is reached', function() { var invalid = function() { return r.expr({a: {b: {c: {d: 1}}}}, 2); }; expect(invalid).to.throw('Nesting depth limit exceeded.\nYou probably have a circular reference somewhere.'); }); it('should throw when setNestingLevel is too small', function() { r.setNestingLevel(2); var invalid = function() { return r.expr({a: {b: {c: {d: 1}}}}); }; expect(invalid).to.throw('Nesting depth limit exceeded.\nYou probably have a circular reference somewhere.'); // reset nesting level r.setNestingLevel(r._nestingLevel); }); it('should work when setNestingLevel set back to 100', function() { r.setNestingLevel(2); r.setNestingLevel(100); return r.expr({a: {b: {c: {d: 1}}}}) .then(function(result) { expect(result).to.eql({a: {b: {c: {d: 1}}}}); }); }); it('should throw when arrayLimit is too small', function() { var invalid = r.expr([0, 1, 2, 3, 4, 5, 6, 8, 9]).run({arrayLimit: 2}); expect(invalid).to.eventually.be.rejectedWith(/Array over size limit `2` in/); }); it('should throw when arrayLimit is too small - options in run take precedence', function() { r.setArrayLimit(100); var invalid = r.expr([0, 1, 2, 3, 4, 5, 6, 8, 9]).run({ arrayLimit: 2 }); expect(invalid).to.eventually.be.rejectedWith(/Array over size limit `2` in/); // reset array limit r.setArrayLimit(r._arrayLimit); }); it('should throw when setArrayLimit is too small', function() { r.setArrayLimit(1); var invalid = r.expr([0, 1, 2, 3, 4, 5, 6, 8, 9]); expect(invalid).to.eventually.be.rejectedWith(/Array over size limit `1` in/); }); it('should work when setArrayLimit set back to 100000', function() { r.setArrayLimit(1); r.setArrayLimit(100000); return r.expr([0, 1, 2, 3, 4, 5, 6, 8, 9]) .then(function(result) { expect(result).to.eql([0, 1, 2, 3, 4, 5, 6, 8, 9]); }); }); it('should fail with NaN', function() { var invalid = r.expr(NaN); expect(invalid).to.eventually.be.rejectedWith(/Cannot convert `NaN` to JSON/); }); it('should not throw with NaN if not run', function() { r.expr(NaN); }); it('should fail with Infinity', function() { var invalid = r.expr(Infinity); expect(invalid).to.eventually.be.rejectedWith(/Cannot convert `Infinity` to JSON/); }); it('should not throw with Infinity if not run', function() { r.expr(Infinity); }); it('should work with high unicode character', function() { return r.expr('“').then(function(result) { expect(result).to.eql('“'); }); }); conditionalIt('should work with Buffers', function() { return r.expr(new Buffer([1, 2, 3, 4, 5, 6])) .then(function(result) { expect(result.toJSON().data).to.eql([1, 2, 3, 4, 5, 6]); }); }); }); conditionalDescribe('#binary', function() { it('should work - with a buffer', function() { return r.binary(new Buffer([1, 2, 3, 4, 5, 6])) .then(function(result) { expect(result).to.be.an.instanceOf(Buffer); expect(result.toJSON().data).to.eql([1, 2, 3, 4, 5, 6]); }); }); it('should work - with a ReQL term', function() { return r.binary(r.expr('foo')) .then(function(result) { expect(result).to.be.an.instanceOf(Buffer); return r.expr(result).coerceTo('STRING'); }) .then(function(result) { expect(result).to.equal('foo'); }); }); }); });
{ "content_hash": "59b3e030230843723269d97de9e0a5b4", "timestamp": "", "source": "github", "line_count": 148, "max_line_length": 115, "avg_line_length": 33.83783783783784, "alnum_prop": 0.5818690095846646, "repo_name": "mbroadst/rethunk", "id": "243cf997a4d419fa98f6cf3f4d06839925192c0c", "size": "5012", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/datum.test.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "691750" }, { "name": "Makefile", "bytes": "533" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <parent> <artifactId>org.collectionspace.services.workflow</artifactId> <groupId>org.collectionspace.services</groupId> <version>4.3-SNAPSHOT</version> </parent> <modelVersion>4.0.0</modelVersion> <groupId>org.collectionspace.services</groupId> <artifactId>org.collectionspace.services.workflow.jaxb</artifactId> <name>services.workflow.jaxb</name> <dependencies> <dependency> <groupId>org.collectionspace.services</groupId> <artifactId>org.collectionspace.services.jaxb</artifactId> <version>${project.version}</version> </dependency> </dependencies> <build> <finalName>collectionspace-services-workflow-jaxb</finalName> <defaultGoal>install</defaultGoal> <plugins> <plugin> <groupId>org.jvnet.jaxb2.maven2</groupId> <artifactId>maven-jaxb2-plugin</artifactId> </plugin> </plugins> </build> </project>
{ "content_hash": "2b114e280fbcb786817998a640695787", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 100, "avg_line_length": 36.57142857142857, "alnum_prop": 0.63359375, "repo_name": "cherryhill/collectionspace-services", "id": "643335695d506d827c17fe9a71fed6f9cdee19f9", "size": "1280", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "services/workflow/jaxb/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1217" }, { "name": "Java", "bytes": "4782521" }, { "name": "JavaScript", "bytes": "834" }, { "name": "PLpgSQL", "bytes": "1794" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using ICSharpCode.NRefactory.Ast; namespace ICSharpCode.NRefactory.Visitors { /// <summary> /// Prefixes the names of the specified fields with the prefix and replaces the use. /// </summary> public class PrefixFieldsVisitor : AbstractAstVisitor { List<VariableDeclaration> fields; List<string> curBlock = new List<string>(); Stack<List<string>> blocks = new Stack<List<string>>(); string prefix; public PrefixFieldsVisitor(List<VariableDeclaration> fields, string prefix) { this.fields = fields; this.prefix = prefix; } public void Run(INode typeDeclaration) { typeDeclaration.AcceptVisitor(this, null); foreach (VariableDeclaration decl in fields) { decl.Name = prefix + decl.Name; } } public override object VisitTypeDeclaration(TypeDeclaration typeDeclaration, object data) { Push(); object result = base.VisitTypeDeclaration(typeDeclaration, data); Pop(); return result; } public override object VisitBlockStatement(BlockStatement blockStatement, object data) { Push(); object result = base.VisitBlockStatement(blockStatement, data); Pop(); return result; } public override object VisitMethodDeclaration(MethodDeclaration md, object data) { Push(); object result = base.VisitMethodDeclaration(md, data); Pop(); return result; } public override object VisitPropertyDeclaration(PropertyDeclaration propertyDeclaration, object data) { Push(); object result = base.VisitPropertyDeclaration(propertyDeclaration, data); Pop(); return result; } public override object VisitConstructorDeclaration(ConstructorDeclaration constructorDeclaration, object data) { Push(); object result = base.VisitConstructorDeclaration(constructorDeclaration, data); Pop(); return result; } private void Push() { blocks.Push(curBlock); curBlock = new List<string>(); } private void Pop() { curBlock = blocks.Pop(); } public override object VisitVariableDeclaration(VariableDeclaration vd, object data) { // process local variables only if (fields.Contains(vd)) { return null; } curBlock.Add(vd.Name); return base.VisitVariableDeclaration(vd, data); } public override object VisitParameterDeclarationExpression(ParameterDeclarationExpression parameterDeclarationExpression, object data) { curBlock.Add(parameterDeclarationExpression.ParameterName); //print("add parameter ${parameterDeclarationExpression.ParameterName} to block") return base.VisitParameterDeclarationExpression(parameterDeclarationExpression, data); } public override object VisitForeachStatement(ForeachStatement foreachStatement, object data) { curBlock.Add(foreachStatement.VariableName); return base.VisitForeachStatement(foreachStatement, data); } public override object VisitIdentifierExpression(IdentifierExpression identifierExpression, object data) { string name = identifierExpression.Identifier; foreach (VariableDeclaration var in fields) { if (var.Name == name && !IsLocal(name)) { identifierExpression.Identifier = prefix + name; break; } } return base.VisitIdentifierExpression(identifierExpression, data); } public override object VisitMemberReferenceExpression(MemberReferenceExpression fieldReferenceExpression, object data) { if (fieldReferenceExpression.TargetObject is ThisReferenceExpression) { string name = fieldReferenceExpression.MemberName; foreach (VariableDeclaration var in fields) { if (var.Name == name) { fieldReferenceExpression.MemberName = prefix + name; break; } } } return base.VisitMemberReferenceExpression(fieldReferenceExpression, data); } bool IsLocal(string name) { foreach (List<string> block in blocks) { if (block.Contains(name)) return true; } return curBlock.Contains(name); } } }
{ "content_hash": "25d888ef5aee9ec8cb07ef53abd70c38", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 136, "avg_line_length": 29.26950354609929, "alnum_prop": 0.7099588078507391, "repo_name": "jbuedel/AgentRalphPlugin", "id": "6c4580f1fea40e75778d5c6ec383571e4c06d5de", "size": "4368", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "NRefactory/Project/Src/Visitors/PrefixFieldsVisitor.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C#", "bytes": "3197275" }, { "name": "Perl", "bytes": "9287" }, { "name": "PowerShell", "bytes": "2507" }, { "name": "Shell", "bytes": "373" }, { "name": "XSLT", "bytes": "12790" } ], "symlink_target": "" }
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="UnixUtils.cs" company="SoftChains"> // Copyright 2016 Dan Gershony // // Licensed under the MIT license. See LICENSE file in the project root for full license information. // // THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, // // EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES // // OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. // </copyright> // -------------------------------------------------------------------------------------------------------------------- namespace Nako.Extensions { #region Using Directives using System; #endregion /// <summary> /// Internal class providing certain utility functions to other classes. /// </summary> internal sealed class UnixUtils { #region Static Fields /// <summary> /// The Unix start date. /// </summary> private static readonly DateTime UnixStartDate = new DateTime(1970, 1, 1, 0, 0, 0); #endregion #region Methods /// <summary> /// Converts a <see cref="DateTime"/> object into a unix timestamp number. /// </summary> /// <param name="date"> /// The date to convert. /// </param> /// <returns> /// A long for the number of seconds since 1st January 1970, as per unix specification. /// </returns> internal static long DateToUnixTimestamp(DateTime date) { var ts = date - UnixStartDate; return (long)ts.TotalSeconds; } /// <summary> /// Converts a string, representing a unix timestamp number into a <see cref="DateTime"/> object. /// </summary> /// <param name="timestamp"> /// The timestamp, as a string. /// </param> /// <returns> /// The <see cref="DateTime"/> object the time represents. /// </returns> internal static DateTime UnixTimestampToDate(string timestamp) { if (string.IsNullOrEmpty(timestamp)) { return DateTime.MinValue; } return UnixTimestampToDate(long.Parse(timestamp)); } /// <summary> /// Converts a <see cref="long"/>, representing a unix timestamp number into a <see cref="DateTime"/> object. /// </summary> /// <param name="timestamp"> /// The unix timestamp. /// </param> /// <returns> /// The <see cref="DateTime"/> object the time represents. /// </returns> internal static DateTime UnixTimestampToDate(long timestamp) { return UnixStartDate.AddSeconds(timestamp); } #endregion } }
{ "content_hash": "99dcaa2f0ca71a6eaad25c42dfc5d347", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 120, "avg_line_length": 34.07058823529412, "alnum_prop": 0.5293508287292817, "repo_name": "CoinVault/Nako", "id": "8539448302e42ca7d2fba24126826c0fad832c2a", "size": "2898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/nako.indexer/Extensions/UnixUtils.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "57" }, { "name": "C#", "bytes": "321538" }, { "name": "CSS", "bytes": "305457" }, { "name": "Dockerfile", "bytes": "26240" }, { "name": "HTML", "bytes": "56318" }, { "name": "JavaScript", "bytes": "18666" }, { "name": "Makefile", "bytes": "7786" }, { "name": "Shell", "bytes": "1378" } ], "symlink_target": "" }
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'chrome://resources/cr_elements/cr_button/cr_button.m.js'; import 'chrome://resources/cr_elements/shared_vars_css.m.js'; import './ui_trigger_list_object.js'; import './shared_style.js'; import {WebUIListenerBehavior} from 'chrome://resources/js/web_ui_listener_behavior.m.js'; import {html, Polymer} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js'; import {NearbyPrefsBrowserProxy} from './nearby_prefs_browser_proxy.js'; import {NearbyUiTriggerBrowserProxy} from './nearby_ui_trigger_browser_proxy.js'; import {NearbyShareStates, ShareTarget, ShareTargetDiscoveryChange, ShareTargetSelectOption, StatusCode, TimestampedMessage, TransferMetadataStatus} from './types.js'; Polymer({ is: 'ui-trigger-tab', _template: html`{__html_template__}`, behaviors: [ WebUIListenerBehavior, ], properties: { /** @private {!Array<!TimestampedMessage>} */ uiTriggerObjectList_: { type: Array, value: [], }, /** @private {!Array<!ShareTargetSelectOption>} */ shareTargetSelectOptionList_: { type: Array, value: [], }, /** @private {string} ID of the selected ShareTarget or ''*/ selectedShareTargetId_: String, }, /** @private {?NearbyUiTriggerBrowserProxy}*/ browserProxy_: null, /** @private {?NearbyPrefsBrowserProxy}*/ prefsBrowserProxy_: null, /** * Initialize |browserProxy_|,|selectedShareTargetId_|, and * |shareTargetSelectOptionList_|. * @override */ created() { this.browserProxy_ = NearbyUiTriggerBrowserProxy.getInstance(); this.prefsBrowserProxy_ = NearbyPrefsBrowserProxy.getInstance(); }, /** * When the page is initialized, notify the C++ layer to allow JavaScript and * initialize WebUI Listeners. * @override */ attached() { this.addWebUIListener( 'transfer-updated', transferUpdate => this.onTransferUpdateAdded_(transferUpdate)); this.addWebUIListener( 'share-target-discovered', shareTarget => this.onShareTargetDiscovered_(shareTarget)); this.addWebUIListener( 'share-target-lost', shareTarget => this.onShareTargetLost_(shareTarget)); this.addWebUIListener( 'on-status-code-returned', statusCode => this.onStatusCodeReturned_(statusCode)); this.addWebUIListener( 'share-target-map-updated', shareTargetMapUpdate => this.onShareTargetMapChanged_(shareTargetMapUpdate)); this.browserProxy_.initialize(); }, /** * Triggers RegisterSendSurface with Foreground as Send state. * @private */ onRegisterSendSurfaceForegroundClicked_() { this.browserProxy_.registerSendSurfaceForeground().then( statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Triggers RegisterSendSurface with Background as Send state. * @private */ onRegisterSendSurfaceBackgroundClicked_() { this.browserProxy_.registerSendSurfaceBackground().then( statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Triggers UnregisterSendSurface. * @private */ onUnregisterSendSurfaceClicked_() { this.browserProxy_.unregisterSendSurface().then( statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Clears Nearby Share Prefs. * @private */ onClearPrefsButtonClicked_() { this.prefsBrowserProxy_.clearNearbyPrefs(); }, onFastPairErrorNotificationClicked_() { this.browserProxy_.notifyFastPairError(); }, onFastPairDiscoveryNotificationClicked_() { this.browserProxy_.notifyFastPairDiscovery(); }, onFastPairPairingNotificationClicked_() { this.browserProxy_.notifyFastPairPairing(); }, onFastPairAssociateAccountNotificationClicked_() { this.browserProxy_.notifyFastPairAssociateAccount(); }, /** * Triggers RegisterReceiveSurface with Foreground as Receive state. * @private */ onRegisterReceiveSurfaceForegroundClicked_() { this.browserProxy_.registerReceiveSurfaceForeground().then( statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Triggers RegisterReceiveSurface with Background as Receive state. * @private */ onRegisterReceiveSurfaceBackgroundClicked_() { this.browserProxy_.registerReceiveSurfaceBackground().then( statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Triggers UnregisterReceiveSurface. * @private */ onUnregisterReceiveSurfaceClicked_() { this.browserProxy_.unregisterReceiveSurface().then( statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Logs status code returned by triggered events. * @param {!StatusCode} statusCode * @private */ onStatusCodeReturned_(statusCode) { const message = statusCode.triggerEvent + ' Result: ' + statusCode.statusCode; const time = statusCode.time; this.unshift('uiTriggerObjectList_', {'message': message, 'time': time}); }, /** * Updates state variables based on the dictionary returned once triggered * by |GetState|. * @param {!NearbyShareStates} currentStates * @private */ onCurrentStatesReturned_(currentStates) { const time = currentStates.time; const message = `Is Scanning? : ${currentStates.isScanning}\nIs Transferring? : ${ currentStates.isTransferring}\nIs Receiving? : ${ currentStates.isReceiving}\nIs Sending? : ${ currentStates.isSending}\nIs Connecting? : ${ currentStates.isConnecting}\nIs In High Visibility? : ${ currentStates.isInHighVisibility}`; this.unshift('uiTriggerObjectList_', {'message': message, 'time': time}); }, /** * Triggers sendText with selected |shareTargetId|. * @private */ onSendTextClicked_() { this.browserProxy_.sendText(this.selectedShareTargetId_) .then(statusCode => this.onStatusCodeReturned_(statusCode)); }, /** * Triggers Accept with selected |shareTargetId|. * @private */ onAcceptClicked_() { this.browserProxy_.accept(this.selectedShareTargetId_); }, /** * Triggers Reject with selected |shareTargetId|. * @private */ onRejectClicked_() { this.browserProxy_.reject(this.selectedShareTargetId_); }, /** * Triggers Cancel with selected |shareTargetId|. * @private */ onCancelClicked_() { this.browserProxy_.cancel(this.selectedShareTargetId_); }, /** * Triggers Open with selected |shareTargetId|. * @private */ onOpenClicked_() { this.browserProxy_.open(this.selectedShareTargetId_); }, /** * Triggers GetState to retrieve current states and update display * accordingly. * @private */ onGetStatesClicked_() { this.browserProxy_.getState().then( currentStates => this.onCurrentStatesReturned_(currentStates)); }, /** * Updates |selectedShareTargetId_| with the new selected option. * @private */ onSelectChange_() { this.selectedShareTargetId_ = this.shadowRoot.querySelector('#share-select').selectedOptions[0].value; }, /** * Parses an array of ShareTargets and adds to the JavaScript list * |shareTargetSelectOptionList_| to be displayed in select list. * @param {!Array<!ShareTarget>} shareTargetMapUpdate * @private */ onShareTargetMapChanged_(shareTargetMapUpdate) { this.shareTargetSelectOptionList_ = []; shareTargetMapUpdate.forEach((shareTarget) => { const name = `${shareTarget.deviceName} (${shareTarget.shareTargetId})`; const value = shareTarget.shareTargetId; const selected = value === this.selectedShareTargetId_; this.push( 'shareTargetSelectOptionList_', {'name': name, 'selected': selected, 'value': value}); }); }, /** * Handles ShareTargets when they are discovered in the C++. * @param {!ShareTarget} shareTarget * @private */ onShareTargetDiscovered_(shareTarget) { this.convertShareTargetToTimestampedMessageAndAppendToList_( shareTarget, ShareTargetDiscoveryChange.DISCOVERED); }, /** * Handles ShareTargets when they are lost in the C++. * @param {!ShareTarget} shareTarget * @private */ onShareTargetLost_(shareTarget) { this.convertShareTargetToTimestampedMessageAndAppendToList_( shareTarget, ShareTargetDiscoveryChange.LOST); }, /** * Adds |transferUpdate| sent in from WebUI listener to the displayed list. * @param {!TransferMetadataStatus} transferUpdate * @private */ onTransferUpdateAdded_(transferUpdate) { this.convertTransferUpdateTimestampedMessageAndAppendToList_( transferUpdate); }, /** * Converts |transferUpdate| sent in to a generic object to be displayed. * @param {!TransferMetadataStatus} transferUpdate * @private */ convertTransferUpdateTimestampedMessageAndAppendToList_(transferUpdate) { const time = transferUpdate.time; const message = `${transferUpdate.deviceName} (${transferUpdate.shareTargetId}): ${ transferUpdate.transferMetadataStatus}`; this.unshift('uiTriggerObjectList_', {'message': message, 'time': time}); }, /** * Converts |statusCode| sent in to a generic object to be displayed. * @param {!StatusCode} statusCode * @private */ convertStatusCodeToTimestampedMessageAndAppendToList_(statusCode) { const time = statusCode.time; const message = `${statusCode.triggerEvent} ${statusCode.statusCode}`; this.unshift('uiTriggerObjectList_', {'message': message, 'time': time}); }, /** * Converts |shareTarget| sent in to when discovered/lost a generic object to * be displayed. * @private * @param {!ShareTarget} shareTarget * @param {!ShareTargetDiscoveryChange} discoveryChange */ convertShareTargetToTimestampedMessageAndAppendToList_( shareTarget, discoveryChange) { const time = shareTarget.time; const message = `${shareTarget.deviceName} (${shareTarget.shareTargetId}) ${ this.shareTargetDirectionToString_(discoveryChange)}`; this.unshift('uiTriggerObjectList_', {'message': message, 'time': time}); }, /** * Sets the string representation of ShareTargetDiscoveryChange * |discoveryChange|. * @private * @param {!ShareTargetDiscoveryChange} discoveryChange * @return */ shareTargetDirectionToString_(discoveryChange) { switch (discoveryChange) { case ShareTargetDiscoveryChange.DISCOVERED: return 'discovered'; break; case ShareTargetDiscoveryChange.LOST: return 'lost'; break; default: break; } }, });
{ "content_hash": "a326a945249954d148a9adb4d05fd25e", "timestamp": "", "source": "github", "line_count": 360, "max_line_length": 167, "avg_line_length": 29.98611111111111, "alnum_prop": 0.686706808707735, "repo_name": "ric2b/Vivaldi-browser", "id": "8091f15fc826b4c3f55d92dd0d140f77707f21d0", "size": "10795", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chromium/chrome/browser/resources/nearby_internals/ui_trigger_tab.js", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package pl.themolka.arcade.filter; import pl.themolka.arcade.config.Ref; import pl.themolka.arcade.game.Game; import pl.themolka.arcade.game.GameModule; import pl.themolka.arcade.game.IGameConfig; import pl.themolka.arcade.game.IGameModuleConfig; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; public class FiltersGame extends GameModule { private final Map<String, UniqueFilter> filters = new LinkedHashMap<>(); protected FiltersGame(Game game, IGameConfig.Library library, Config config) { for (FilterSet.Config filter : config.filterSets().get()) { this.filters.put(filter.id(), library.getOrDefine(game, filter)); } } public void addFilter(FilterSet filter) { this.filters.put(filter.getId(), filter); } public Filter filterOrDefault(String id, Filter def) { if (id != null) { UniqueFilter result = this.getFilter(id.trim()); if (result != null) { return result; } } return def; } public UniqueFilter getFilter(String id) { return this.getFilter(id, null); } public UniqueFilter getFilter(String id, UniqueFilter def) { return this.filters.getOrDefault(id, def); } public Set<String> getFilterIds() { return this.filters.keySet(); } public Collection<UniqueFilter> getFilters() { return this.filters.values(); } public void removeFilter(UniqueFilter filter) { this.removeFilter(filter.getId()); } public void removeFilter(String id) { this.filters.remove(id); } public interface Config extends IGameModuleConfig<FiltersGame> { Ref<Set<FilterSet.Config>> filterSets(); @Override default FiltersGame create(Game game, Library library) { return new FiltersGame(game, library, this); } } }
{ "content_hash": "9734cb1bb681039f486c5d9f3547772f", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 82, "avg_line_length": 27.125, "alnum_prop": 0.6543778801843319, "repo_name": "ShootGame/Arcade2", "id": "d75aae2146b2cc3a57031a40c5ab397f108b4260", "size": "2557", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/pl/themolka/arcade/filter/FiltersGame.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1961019" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc --> <title>nl.esciencecenter.xenon.schedulers Class Hierarchy (xenon-2.3.0 2.6.0 API)</title> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="nl.esciencecenter.xenon.schedulers Class Hierarchy (xenon-2.3.0 2.6.0 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li class="navBarCell1Rev">Tree</li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../nl/esciencecenter/xenon/filesystems/package-tree.html">Prev</a></li> <li><a href="../../../../nl/esciencecenter/xenon/utils/package-tree.html">Next</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?nl/esciencecenter/xenon/schedulers/package-tree.html" target="_top">Frames</a></li> <li><a href="package-tree.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 class="title">Hierarchy For Package nl.esciencecenter.xenon.schedulers</h1> <span class="packageHierarchyLabel">Package Hierarchies:</span> <ul class="horizontal"> <li><a href="../../../../overview-tree.html">All Packages</a></li> </ul> </div> <div class="contentContainer"> <h2 title="Class Hierarchy">Class Hierarchy</h2> <ul> <li type="circle">java.lang.Object <ul> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/JobDescription.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">JobDescription</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/Scheduler.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">Scheduler</span></a> (implements java.lang.AutoCloseable)</li> <li type="circle">java.lang.Throwable (implements java.io.Serializable) <ul> <li type="circle">java.lang.Exception <ul> <li type="circle">nl.esciencecenter.xenon.<a href="../../../../nl/esciencecenter/xenon/XenonException.html" title="class in nl.esciencecenter.xenon"><span class="typeNameLink">XenonException</span></a> <ul> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/IncompleteJobDescriptionException.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">IncompleteJobDescriptionException</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/InvalidJobDescriptionException.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">InvalidJobDescriptionException</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/NoSuchJobException.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">NoSuchJobException</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/NoSuchQueueException.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">NoSuchQueueException</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/UnsupportedJobDescriptionException.html" title="class in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">UnsupportedJobDescriptionException</span></a></li> </ul> </li> </ul> </li> </ul> </li> </ul> </li> </ul> <h2 title="Interface Hierarchy">Interface Hierarchy</h2> <ul> <li type="circle">nl.esciencecenter.xenon.<a href="../../../../nl/esciencecenter/xenon/AdaptorDescription.html" title="interface in nl.esciencecenter.xenon"><span class="typeNameLink">AdaptorDescription</span></a> <ul> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/SchedulerAdaptorDescription.html" title="interface in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">SchedulerAdaptorDescription</span></a></li> </ul> </li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/JobStatus.html" title="interface in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">JobStatus</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/QueueStatus.html" title="interface in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">QueueStatus</span></a></li> <li type="circle">nl.esciencecenter.xenon.schedulers.<a href="../../../../nl/esciencecenter/xenon/schedulers/Streams.html" title="interface in nl.esciencecenter.xenon.schedulers"><span class="typeNameLink">Streams</span></a></li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li class="navBarCell1Rev">Tree</li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../nl/esciencecenter/xenon/filesystems/package-tree.html">Prev</a></li> <li><a href="../../../../nl/esciencecenter/xenon/utils/package-tree.html">Next</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?nl/esciencecenter/xenon/schedulers/package-tree.html" target="_top">Frames</a></li> <li><a href="package-tree.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
{ "content_hash": "ed4d99ba72ce977e6fd343b9776c4630", "timestamp": "", "source": "github", "line_count": 163, "max_line_length": 279, "avg_line_length": 48.484662576687114, "alnum_prop": 0.6780969252182716, "repo_name": "NLeSC/Xenon", "id": "4db20bb62bd53b668a281c745fe55c9aace3bf5c", "size": "7903", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/versions/2.6.0/javadoc/nl/esciencecenter/xenon/schedulers/package-tree.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "762" }, { "name": "Java", "bytes": "1547681" }, { "name": "Shell", "bytes": "4009" } ], "symlink_target": "" }
package com.hockeyhurd.hcorelib.mod.client.gui; import com.hockeyhurd.hcorelib.api.math.expressions.Expression; import com.hockeyhurd.hcorelib.api.math.expressions.GlobalConstants; import com.hockeyhurd.hcorelib.api.math.expressions.Interpreter; import com.hockeyhurd.hcorelib.api.math.expressions.InterpreterResult; import com.hockeyhurd.hcorelib.mod.ClientProxy; import com.hockeyhurd.hcorelib.mod.HCoreLibMain; import com.hockeyhurd.hcorelib.mod.common.ModRegistry; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.GuiButton; import net.minecraft.client.gui.GuiScreen; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.OpenGlHelper; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.EnumHand; import net.minecraft.util.ResourceLocation; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import org.lwjgl.opengl.GL11; import java.util.HashMap; import java.util.Map; /** * Gui interface for ItemCalculator. * * @author hockeyhurd * @version 11/10/16 */ @SideOnly(Side.CLIENT) public final class GuiCalculator extends GuiScreen { private static final ResourceLocation texture = new ResourceLocation(HCoreLibMain.assetDir, "textures/gui/GuiCalculator.png"); private int xSize, ySize; private int guiLeft, guiTop; private String drawString; private char[] drawBuffer; private int charIndex; private GuiButton[] numPad; private GuiButton deleteButton, clearButton; private Map<String, GuiButton> buttonMap; private MemoryBuffer memoryBuffer; private InterpreterResult lastResult; public GuiCalculator() { this.xSize = 248; this.ySize = 166; this.drawBuffer = new char[0x20]; this.charIndex = 0; this.drawString = ""; buttonMap = new HashMap<String, GuiButton>(); memoryBuffer = new MemoryBuffer(); } @Override public void initGui() { super.initGui(); final ItemStack calcStack = mc.player.getHeldItem(EnumHand.MAIN_HAND); if (calcStack != null && calcStack.getCount() > 0 && calcStack.getItem() == ModRegistry.ModItems.itemCalculator.getItem().getItem()) { NBTTagCompound comp = calcStack.getTagCompound(); if (comp != null) { drawString = comp.getString("CalculatorInput"); charIndex = comp.getInteger("CalculatorInputCharCount"); memoryBuffer.store(comp.getDouble("CalculatorMemoryBuffer")); if (lastResult == null) lastResult = new InterpreterResult(); lastResult.updateResult(comp.getString("CalculatorLastResultString"), comp.getDouble("CalculatorLastResult")); // comp.setDouble("CalculatorMemoryBuffer", memoryBuffer.read()); // comp.setDouble("CalculatorLastResult", lastResult.getResult()); // comp.setString("CalculatorLastResultString", lastResult.getExpressionString()); for (int i = 0; i < charIndex; i++) drawBuffer[i] = drawString.charAt(i); } } this.guiLeft = (this.width - this.xSize) >> 1; this.guiTop = (this.height - this.ySize) >> 1; // if (numPad == null) numPad = new GuiButton[9]; if (numPad == null) numPad = new GuiButton[12]; // final int bw = xSize / 10; // final int bh = ySize / 10; final int bw = 0x14; final int bh = 0x14; for (int y = 0; y < 3; y++) { for (int x = 0; x < 3; x++) { GuiButton button = new GuiButton(x + y * 3, guiLeft + ((x + 1) * (bw + 4)) + (bw >> 1), height - guiTop - ((4 - y) * (bh + 4)) - (bh >> 2), bw, bh, Integer.toString(1 + x + y * 3)); numPad[x + y * 3] = button; buttonList.add(button); } } numPad[9] = new GuiButton(9, numPad[6].x, numPad[6].y + bh + 4, bw, bh, "("); numPad[10] = new GuiButton(10, numPad[7].x, numPad[7].y + bh + 4, bw, bh, Integer.toString(0)); numPad[11] = new GuiButton(11, numPad[8].x, numPad[8].y + bh + 4, bw, bh, ")"); buttonList.add(numPad[9]); buttonList.add(numPad[10]); buttonList.add(numPad[11]); clearButton = new GuiButton(buttonList.size(), numPad[0].x - bw - 4, numPad[4].y, bw, bh, "C"); buttonList.add(clearButton); deleteButton = new GuiButton(buttonList.size(), clearButton.x, numPad[1].y, bw, bh, "<-"); buttonList.add(deleteButton); GuiButton bufferButton; // equalsButtons = new GuiButton(buttonList.size(), numPad[11].xPosition + bw + 4, numPad[11].yPosition, bw, bh, "="); // buttonList.add(equalsButtons); buttonMap.put("=", new GuiButton(buttonList.size(), clearButton.x, numPad[7].y, bw, bh, "=")); buttonMap.put(".", new GuiButton(buttonList.size(), clearButton.x, numPad[11].y, bw, bh, ".")); buttonMap.put("+", new GuiButton(buttonList.size(), numPad[11].x + bw + 4, numPad[11].y, bw, bh, "+")); buttonMap.put("-", new GuiButton(buttonList.size(), numPad[8].x + bw + 4, numPad[8].y, bw, bh, "-")); buttonMap.put("*", new GuiButton(buttonList.size(), numPad[5].x + bw + 4, numPad[5].y, bw, bh, "*")); buttonMap.put("/", new GuiButton(buttonList.size(), numPad[2].x + bw + 4, numPad[2].y, bw, bh, "/")); buttonMap.put("M+", bufferButton = new GuiButton(buttonList.size(), numPad[11].x + (bw + 4 << 1), numPad[11].y, bw, bh, "M+")); buttonMap.put("M-", new GuiButton(buttonList.size(), numPad[8].x + (bw + 4 << 1), numPad[8].y, bw, bh, "M-")); buttonMap.put("M*", new GuiButton(buttonList.size(), numPad[5].x + (bw + 4 << 1), numPad[5].y, bw, bh, "M*")); buttonMap.put("M/", new GuiButton(buttonList.size(), numPad[2].x + (bw + 4 << 1), numPad[2].y, bw, bh, "M/")); buttonMap.put("MC", new GuiButton(buttonList.size(), numPad[0].x, numPad[0].y - bh - 4, bw, bh, "MC")); buttonMap.put("MR", new GuiButton(buttonList.size(), numPad[1].x, numPad[1].y - bh - 4, bw, bh, "MR")); buttonMap.put("MS", new GuiButton(buttonList.size(), numPad[2].x, numPad[2].y - bh - 4, bw, bh, "MS")); buttonMap.put("^", new GuiButton(buttonList.size(), numPad[2].x + bw + 4, numPad[2].y - bh - 4, bw, bh, "^")); buttonMap.put("" + GlobalConstants.SQ_ROOT_CHAR, new GuiButton(buttonList.size(), numPad[2].x + (bw + 4 << 1), numPad[2].y - bh - 4, bw, bh, "" + GlobalConstants.SQ_ROOT_CHAR)); buttonMap.put("e", bufferButton = new GuiButton(buttonList.size(), bufferButton.x + bw + 4, numPad[11].y, bw, bh, "e")); buttonMap.put("" + GlobalConstants.PI_CHAR, new GuiButton(buttonList.size(), bufferButton.x, numPad[8].y, bw, bh, "" + GlobalConstants.PI_CHAR)); for (GuiButton button : buttonMap.values()) { button.id = buttonList.size(); buttonList.add(button); } } @Override public void onGuiClosed() { super.onGuiClosed(); final ItemStack calcStack = mc.player.getHeldItem(EnumHand.MAIN_HAND); if (calcStack != null && calcStack.getCount() > 0 && calcStack.getItem() == ModRegistry.ModItems.itemCalculator.getItem().getItem()) { NBTTagCompound comp = calcStack.getTagCompound(); if (comp == null) calcStack.setTagCompound((comp = new NBTTagCompound())); comp.setString("CalculatorInput", drawString); comp.setInteger("CalculatorInputCharCount", charIndex); comp.setDouble("CalculatorMemoryBuffer", memoryBuffer.read()); if (lastResult != null) { comp.setDouble("CalculatorLastResult", lastResult.getResult()); comp.setString("CalculatorLastResultString", lastResult.getExpressionString()); } else { comp.setDouble("CalculatorLastResult", 0.0d); comp.setString("CalculatorLastResultString", "0.0"); } } } public void drawGuiContainerForegroundLayer(int x, int y) { // fontRendererObj.drawString(drawString, xSize - (width >> 1) - 8, guiTop, 0xffffffff); fontRenderer.drawString(drawString, (xSize >> 3) - (fontRenderer.getStringWidth("00") >> 1), (ySize >> 3), 0xffffffff); } public void drawGuiContainerBackgroundLayer(float f, int x, int y) { GL11.glColor4f(1f, 1f, 1f, 1f); Minecraft.getMinecraft().getTextureManager().bindTexture(texture); drawTexturedModalRect(guiLeft, guiTop, 0, 0, xSize, ySize); /*final int size = fontRendererObj.getCharWidth('0'); drawRect(guiLeft + xSize - (width >> 1) - (size >> 1) - 8, (guiTop << 1) - (size >> 1), size * 53 - 8, ((guiTop << 1) + (size << 1)) - (size >> 1), 0xff000000);*/ } @Override public void drawScreen(int mouseX, int mouseY, float partialTicks) { this.drawDefaultBackground(); int i = this.guiLeft; int j = this.guiTop; this.drawGuiContainerBackgroundLayer(partialTicks, mouseX, mouseY); GlStateManager.disableRescaleNormal(); GlStateManager.disableLighting(); GlStateManager.disableDepth(); super.drawScreen(mouseX, mouseY, partialTicks); GlStateManager.pushMatrix(); GlStateManager.translate((float) i, (float) j, 0.0F); GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); GlStateManager.enableRescaleNormal(); OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, 240.0F, 240.0F); GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); this.drawGuiContainerForegroundLayer(mouseX, mouseY); GlStateManager.popMatrix(); GlStateManager.enableLighting(); GlStateManager.enableDepth(); } @Override public void actionPerformed(GuiButton button) { // Is num key: if (button.id < numPad.length || (!button.displayString.equals("=") && !button.displayString.startsWith("M") && !button.displayString.startsWith("\u221A") && buttonMap.containsKey(button.displayString))) { if (charIndex < drawBuffer.length) { // drawBuffer[charIndex++] = (char) ('0' + button.id + 1); drawBuffer[charIndex++] = button.displayString.charAt(0); drawString = new String(drawBuffer); } } else if (button.id == clearButton.id) { for (int i = 0; i < charIndex; i++) drawBuffer[i] = 0; charIndex = 0; drawString = ""; } else if (button.id == deleteButton.id) { if (charIndex > 0) { drawBuffer[--charIndex] = 0; drawString = new String(drawBuffer); } } // else if (button.id == equalsButtons.id) { else if (button.id == buttonMap.get("=").id) { // if (drawString.contains("=")) return; Interpreter interpreter = new Interpreter(); lastResult = interpreter.processExpressionString(new Expression(drawString.substring(0, charIndex)), ClientProxy.getPlayer().getUniqueID().hashCode()); if (!lastResult.isEmpty()) { // drawString = lastResult.getExpressionString(); drawString = Double.toString(lastResult.getResult()); charIndex = drawString.length(); syncStringBuffer(); } } else if (button.displayString.startsWith("\u221A")) { drawBuffer[charIndex++] = '^'; drawBuffer[charIndex++] = '('; drawBuffer[charIndex++] = '1'; drawBuffer[charIndex++] = '/'; drawBuffer[charIndex++] = '2'; drawBuffer[charIndex++] = ')'; drawString = new String(drawBuffer); } else if (button.displayString.startsWith("M")) { final char secondChar = button.displayString.charAt(1); final double lastValue = lastResult != null ? lastResult.getResult() : 0.0d; switch (secondChar) { case '+': memoryBuffer.add(lastValue); break; case '-': memoryBuffer.subtract(lastValue); break; case '*': memoryBuffer.multiply(lastValue); break; case '/': if (lastValue != 0.0d) memoryBuffer.divide(lastValue); break; case 'C': memoryBuffer.clear(); break; case 'R': drawString = Double.toString(memoryBuffer.read()); charIndex = drawString.length(); syncStringBuffer(); break; case 'S': memoryBuffer.store(lastValue); break; default: } } } /** * Synchronizes the drawBuffer with the drawString. */ private void syncStringBuffer() { if (drawString == null || drawString.isEmpty()) return; int i; for (i = 0; i < drawString.length(); i++) drawBuffer[i] = drawString.charAt(i); for ( ; i < drawBuffer.length; i++) drawBuffer[i] = '\0'; } /** * Static class for storing values and emulating the * memory buffer function of calculators. * * @author hockeyhurd * @version 12/19/16 */ private static class MemoryBuffer { private double value; MemoryBuffer() { this(0.0d); } MemoryBuffer(double value) { if (value != Double.NaN) this.value = value; } void add(double value) { if (value != Double.NaN) this.value += value; } void subtract(double value) { if (value != Double.NaN) this.value -= value; } void multiply(double value) { if (value != Double.NaN) this.value *= value; } void divide(double value) { if (value != Double.NaN) this.value /= value; } void store(double value) { if (value != Double.NaN) this.value = value; } double read() { return value; } void clear() { value = 0.0d; } } }
{ "content_hash": "b72eb92b31774059a4ac6a261158a410", "timestamp": "", "source": "github", "line_count": 378, "max_line_length": 185, "avg_line_length": 38.857142857142854, "alnum_prop": 0.5745506535947712, "repo_name": "hockeyhurd/HCoreLib", "id": "57c485752e9a9c72acbd430ee63a5a436e315266", "size": "14688", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "com/hockeyhurd/hcorelib/mod/client/gui/GuiCalculator.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "649175" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>coalgebras: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.13.1 / coalgebras - 8.7.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> coalgebras <small> 8.7.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-10-04 03:59:34 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-04 03:59:34 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq 8.13.1 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.13.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.13.1 Official release 4.13.1 ocaml-config 2 OCaml Switch Configuration ocaml-options-vanilla 1 Ensure that OCaml is compiled with no special options enabled ocamlfind 1.9.5 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;Hugo.Herbelin@inria.fr&quot; homepage: &quot;https://github.com/coq-contribs/coalgebras&quot; license: &quot;LGPL&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/Coalgebras&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.7&quot; &amp; &lt; &quot;8.8~&quot;} ] tags: [ &quot;keyword: coalgebra&quot; &quot;keyword: bisimulation&quot; &quot;keyword: weakly final&quot; &quot;keyword: coiteration&quot; &quot;keyword: co-inductive types&quot; &quot;category: Mathematics/Category Theory&quot; &quot;date: 2008-10&quot; ] authors: [ &quot;Milad Niqui &lt;M.Niqui@cwi.nl&gt; [http://www.cwi.nl/~milad]&quot; ] bug-reports: &quot;https://github.com/coq-contribs/coalgebras/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/coalgebras.git&quot; synopsis: &quot;Coalgebras, bisimulation and lambda-coiteration&quot; description: &quot;This contribution contains a formalisation of coalgebras, bisimulation on coalgebras, weakly final coalgebras, lambda-coiteration definition scheme (including primitive corecursion) and a version of lambda-bisimulation. The formalisation is modular. The implementation of the module types for streams and potentially infinite Peano numbers are provided using the coinductive types.&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/coalgebras/archive/v8.7.0.tar.gz&quot; checksum: &quot;md5=73818481b949ccbb1d4579b4da27c951&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-coalgebras.8.7.0 coq.8.13.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.13.1). The following dependencies couldn&#39;t be met: - coq-coalgebras -&gt; coq &lt; 8.8~ -&gt; ocaml &lt; 4.10 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-coalgebras.8.7.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "c26fde6b4c2f2951e46029da065c262c", "timestamp": "", "source": "github", "line_count": 172, "max_line_length": 395, "avg_line_length": 43.348837209302324, "alnum_prop": 0.5583422746781116, "repo_name": "coq-bench/coq-bench.github.io", "id": "7e903082aeb91c78b765f572be72de1141a543aa", "size": "7481", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.13.1-2.0.10/released/8.13.1/coalgebras/8.7.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "dfca2768dccf1baadbbdd8454cf56288", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "1d94278f4848afb7e3bdb1e03df133aab2ca237c", "size": "176", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Origanum/Origanum floribundum/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <desktop> <title>Haberdashery Management</title> <icon>icon/icon.png</icon> <laf>com.sun.java.swing.plaf.windows.WindowsLookAndFeel</laf> <bgcolor>230</bgcolor> <login>system.tasks.Login</login> <exit>system.tasks.Exit</exit> <blocker_time>10</blocker_time> <!-- in minutes --> <messages> <information>Information</information> <confirm>Confirmation</confirm> <warning>Warning</warning> <error>Error</error> <no_permission>The user does not have permissions to perform this operation</no_permission> <communication_error>Communication error</communication_error> <wait>Please, wait...</wait> <true_string>Yes</true_string> <false_string>No</false_string> </messages> </desktop>
{ "content_hash": "237a53352712729e41cce955af4b51e5", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 93, "avg_line_length": 32.291666666666664, "alnum_prop": 0.7006451612903226, "repo_name": "vndly/saas", "id": "bf9118e10a0f4851c7a6a9c4333374841cc9b48e", "size": "775", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "client/src/client/app/system/conf/desktop.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "594322" } ], "symlink_target": "" }
// ----------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ----------------------------------------------------------------------------- //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace Microsoft.Azure.Commands.Batch.Models { public class PSPoolUsageMetrics { internal Microsoft.Azure.Batch.PoolUsageMetrics omObject; internal PSPoolUsageMetrics(Microsoft.Azure.Batch.PoolUsageMetrics omObject) { if ((omObject == null)) { throw new System.ArgumentNullException("omObject"); } this.omObject = omObject; } public double DataEgressGiB { get { return this.omObject.DataEgressGiB; } } public double DataIngressGiB { get { return this.omObject.DataIngressGiB; } } public System.DateTime EndTime { get { return this.omObject.EndTime; } } public string PoolId { get { return this.omObject.PoolId; } } public System.DateTime StartTime { get { return this.omObject.StartTime; } } public double TotalCoreHours { get { return this.omObject.TotalCoreHours; } } public string VirtualMachineSize { get { return this.omObject.VirtualMachineSize; } } } }
{ "content_hash": "19bc1be1c19d6976445438f5c4d6de15", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 84, "avg_line_length": 27.112244897959183, "alnum_prop": 0.48099360180654877, "repo_name": "akurmi/azure-powershell", "id": "b12388434c492f1ad38cf7d5b158ab8b19852ba3", "size": "2659", "binary": false, "copies": "3", "ref": "refs/heads/dev", "path": "src/ResourceManager/AzureBatch/Commands.Batch/Models.Generated/PSPoolUsageMetrics.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "15822" }, { "name": "C#", "bytes": "29663399" }, { "name": "HTML", "bytes": "209" }, { "name": "JavaScript", "bytes": "4979" }, { "name": "PHP", "bytes": "41" }, { "name": "PowerShell", "bytes": "3086042" }, { "name": "Shell", "bytes": "50" } ], "symlink_target": "" }
#include "bgp/routing-instance/routing_instance.h" #include <boost/foreach.hpp> #include <boost/assign/list_of.hpp> #include "base/lifetime.h" #include "base/task_annotations.h" #include "bgp/bgp_config.h" #include "bgp/bgp_factory.h" #include "bgp/bgp_log.h" #include "bgp/bgp_server.h" #include "bgp/routing-instance/peer_manager.h" #include "bgp/routing-instance/routepath_replicator.h" #include "bgp/routing-instance/routing_instance_log.h" #include "bgp/routing-instance/rtarget_group_mgr.h" #include "bgp/routing-instance/rtarget_group.h" #include "bgp/routing-instance/service_chaining.h" #include "bgp/routing-instance/static_route.h" #include "db/db_table.h" using boost::assign::list_of; using boost::system::error_code; using std::make_pair; using std::set; using std::string; using std::vector; SandeshTraceBufferPtr RoutingInstanceTraceBuf( SandeshTraceBufferCreate(RTINSTANCE_TRACE_BUF, 1000)); class RoutingInstanceMgr::DeleteActor : public LifetimeActor { public: explicit DeleteActor(RoutingInstanceMgr *manager) : LifetimeActor(manager->server_->lifetime_manager()), manager_(manager) { } virtual bool MayDelete() const { return true; } virtual void Shutdown() { } virtual void Destroy() { // memory is deallocated by BgpServer scoped_ptr. manager_->server_delete_ref_.Reset(NULL); } private: RoutingInstanceMgr *manager_; }; RoutingInstanceMgr::RoutingInstanceMgr(BgpServer *server) : server_(server), deleter_(new DeleteActor(this)), server_delete_ref_(this, server->deleter()) { } RoutingInstanceMgr::~RoutingInstanceMgr() { } void RoutingInstanceMgr::ManagedDelete() { deleter_->Delete(); } LifetimeActor *RoutingInstanceMgr::deleter() { return deleter_.get(); } bool RoutingInstanceMgr::deleted() { return deleter()->IsDeleted(); } // // Go through all export targets for the RoutingInstance and add an entry for // each one to the InstanceTargetMap. // void RoutingInstanceMgr::InstanceTargetAdd(RoutingInstance *rti) { for (RoutingInstance::RouteTargetList::const_iterator it = rti->GetExportList().begin(); it != rti->GetExportList().end(); ++it) { target_map_.insert(make_pair(*it, rti)); } } // // Go through all export targets for the RoutingInstance and remove the entry // for each one from the InstanceTargetMap. Note that there may be multiple // entries in the InstanceTargetMap for a given export target. Hence we need // to make sure that we only remove the entry that matches the RoutingInstance. // void RoutingInstanceMgr::InstanceTargetRemove(const RoutingInstance *rti) { for (RoutingInstance::RouteTargetList::const_iterator it = rti->GetExportList().begin(); it != rti->GetExportList().end(); ++it) { for (InstanceTargetMap::iterator loc = target_map_.find(*it); loc != target_map_.end() && loc->first == *it; ++loc) { if (loc->second == rti) { target_map_.erase(loc); break; } } } } // // Lookup the RoutingInstance for the given RouteTarget. // const RoutingInstance *RoutingInstanceMgr::GetInstanceByTarget( const RouteTarget &rtarget) const { InstanceTargetMap::const_iterator loc = target_map_.find(rtarget); if (loc == target_map_.end()) { return NULL; } return loc->second; } // // Add an entry for the vn index to the VnIndexMap. // void RoutingInstanceMgr::InstanceVnIndexAdd(RoutingInstance *rti) { if (rti->virtual_network_index()) vn_index_map_.insert(make_pair(rti->virtual_network_index(), rti)); } // // Remove the entry for the vn index from the VnIndexMap. Note that there may // be multiple entries in the VnIndexMap for a given vn index target. Hence we // need to make sure that we remove the entry that matches the RoutingInstance. // void RoutingInstanceMgr::InstanceVnIndexRemove(const RoutingInstance *rti) { if (!rti->virtual_network_index()) return; int vn_index = rti->virtual_network_index(); for (VnIndexMap::iterator loc = vn_index_map_.find(vn_index); loc != vn_index_map_.end() && loc->first == vn_index; ++loc) { if (loc->second == rti) { vn_index_map_.erase(loc); break; } } } // // Lookup the RoutingInstance for the given vn index. // const RoutingInstance *RoutingInstanceMgr::GetInstanceByVnIndex( int vn_index) const { VnIndexMap::const_iterator loc = vn_index_map_.find(vn_index); if (loc == vn_index_map_.end()) return NULL; return loc->second; } // // Lookup the VN name for the given vn index. // string RoutingInstanceMgr::GetVirtualNetworkByVnIndex( int vn_index) const { const RoutingInstance *rti = GetInstanceByVnIndex(vn_index); return rti ? rti->virtual_network() : "unresolved"; } // // Lookup the vn index for the given RouteTarget. // // Return 0 if the RouteTarget does not map to a RoutingInstance. // Return -1 if the RouteTarget maps to multiple RoutingInstances // that belong to different VNs. // int RoutingInstanceMgr::GetVnIndexByRouteTarget( const RouteTarget &rtarget) const { int vn_index = 0; for (InstanceTargetMap::const_iterator loc = target_map_.find(rtarget); loc != target_map_.end() && loc->first == rtarget; ++loc) { int ri_vn_index = loc->second->virtual_network_index(); if (vn_index && ri_vn_index && ri_vn_index != vn_index) return -1; vn_index = ri_vn_index; } return vn_index; } // // Derive the vn index from the route targets in the ExtCommunity. // // If the result is ambiguous i.e. we have a RouteTarget that maps // to multiple vn indexes or we have multiple RouteTargets that map // to different vn indexes, return 0. // int RoutingInstanceMgr::GetVnIndexByExtCommunity( const ExtCommunity *ext_community) const { int vn_index = 0; BOOST_FOREACH(const ExtCommunity::ExtCommunityValue &comm, ext_community->communities()) { if (!ExtCommunity::is_route_target(comm)) continue; RouteTarget rtarget(comm); int rtgt_vn_index = GetVnIndexByRouteTarget(rtarget); if (rtgt_vn_index < 0 || (vn_index && rtgt_vn_index && rtgt_vn_index != vn_index)) { vn_index = 0; break; } else if (rtgt_vn_index) { vn_index = rtgt_vn_index; } } return vn_index; } int RoutingInstanceMgr::RegisterInstanceOpCallback(RoutingInstanceCb callback) { tbb::spin_rw_mutex::scoped_lock write_lock(rw_mutex_, true); size_t i = bmap_.find_first(); if (i == bmap_.npos) { i = callbacks_.size(); callbacks_.push_back(callback); } else { bmap_.reset(i); if (bmap_.none()) { bmap_.clear(); } callbacks_[i] = callback; } return i; } void RoutingInstanceMgr::UnregisterInstanceOpCallback(int listener) { tbb::spin_rw_mutex::scoped_lock write_lock(rw_mutex_, true); callbacks_[listener] = NULL; if ((size_t) listener == callbacks_.size() - 1) { while (!callbacks_.empty() && callbacks_.back() == NULL) { callbacks_.pop_back(); } if (bmap_.size() > callbacks_.size()) { bmap_.resize(callbacks_.size()); } } else { if ((size_t) listener >= bmap_.size()) { bmap_.resize(listener + 1); } bmap_.set(listener); } } void RoutingInstanceMgr::NotifyInstanceOp(string name, Operation op) { tbb::spin_rw_mutex::scoped_lock read_lock(rw_mutex_, false); for (InstanceOpListenersList::iterator iter = callbacks_.begin(); iter != callbacks_.end(); ++iter) { if (*iter != NULL) { RoutingInstanceCb cb = *iter; (cb)(name, op); } } } RoutingInstance *RoutingInstanceMgr::CreateRoutingInstance( const BgpInstanceConfig *config) { RoutingInstance *rtinstance = GetRoutingInstance(config->name()); if (rtinstance) { if (rtinstance->deleted()) { RTINSTANCE_LOG_MESSAGE(server_, SandeshLevel::SYS_WARN, RTINSTANCE_LOG_FLAG_ALL, config->name(), "Instance is recreated before pending deletion is complete"); return NULL; } else { // Duplicate instance creation request can be safely ignored RTINSTANCE_LOG_MESSAGE(server_, SandeshLevel::SYS_WARN, RTINSTANCE_LOG_FLAG_ALL, config->name(), "Instance already found during creation"); } return rtinstance; } rtinstance = BgpObjectFactory::Create<RoutingInstance>( config->name(), server_, this, config); int index = instances_.Insert(config->name(), rtinstance); rtinstance->ProcessConfig(server_); rtinstance->set_index(server_, index); InstanceTargetAdd(rtinstance); InstanceVnIndexAdd(rtinstance); // Notify clients about routing instance create NotifyInstanceOp(config->name(), INSTANCE_ADD); vector<string> import_rt(config->import_list().begin(), config->import_list().end()); vector<string> export_rt(config->export_list().begin(), config->export_list().end()); RTINSTANCE_LOG(Create, rtinstance, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL, import_rt, export_rt, rtinstance->virtual_network(), rtinstance->virtual_network_index()); return rtinstance; } void RoutingInstanceMgr::UpdateRoutingInstance( const BgpInstanceConfig *config) { CHECK_CONCURRENCY("bgp::Config"); RoutingInstance *rtinstance = GetRoutingInstance(config->name()); if (rtinstance && rtinstance->deleted()) { RTINSTANCE_LOG_MESSAGE(server_, SandeshLevel::SYS_WARN, RTINSTANCE_LOG_FLAG_ALL, config->name(), "Instance is updated before pending deletion is complete"); return; } else if (!rtinstance) { RTINSTANCE_LOG_MESSAGE(server_, SandeshLevel::SYS_WARN, RTINSTANCE_LOG_FLAG_ALL, config->name(), "Instance not found during update"); assert(rtinstance != NULL); } InstanceTargetRemove(rtinstance); InstanceVnIndexRemove(rtinstance); rtinstance->UpdateConfig(server_, config); InstanceTargetAdd(rtinstance); InstanceVnIndexAdd(rtinstance); // Notify clients about routing instance create NotifyInstanceOp(config->name(), INSTANCE_UPDATE); vector<string> import_rt(config->import_list().begin(), config->import_list().end()); vector<string> export_rt(config->export_list().begin(), config->export_list().end()); RTINSTANCE_LOG(Update, rtinstance, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL, import_rt, export_rt, rtinstance->virtual_network(), rtinstance->virtual_network_index()); } // // Concurrency: BGP Config task // // Trigger deletion of a particular routing-instance // // This involves several asynchronous steps such as // // 1. Close all peers (RibIn and RibOut) from every IPeerRib in the instance // 2. Close all tables (Flush all notifications, registrations and user data) // 3. etc. // void RoutingInstanceMgr::DeleteRoutingInstance(const string &name) { CHECK_CONCURRENCY("bgp::Config"); RoutingInstance *rtinstance = GetRoutingInstance(name); // Ignore if instance is not found as it might already have been deleted. if (rtinstance && rtinstance->deleted()) { RTINSTANCE_LOG_MESSAGE(server_, SandeshLevel::SYS_WARN, RTINSTANCE_LOG_FLAG_ALL, name, "Duplicate instance delete while pending deletion"); return; } else if (!rtinstance) { RTINSTANCE_LOG_MESSAGE(server_, SandeshLevel::SYS_WARN, RTINSTANCE_LOG_FLAG_ALL, name, "Instance not found during delete"); assert(rtinstance != NULL); } InstanceVnIndexRemove(rtinstance); InstanceTargetRemove(rtinstance); rtinstance->ClearConfig(); RTINSTANCE_LOG(Delete, rtinstance, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); rtinstance->ClearRouteTarget(); server()->service_chain_mgr()->StopServiceChain(rtinstance); // Remove Static Route config if (rtinstance->static_route_mgr()) rtinstance->static_route_mgr()->FlushStaticRouteConfig(); NotifyInstanceOp(name, INSTANCE_DELETE); rtinstance->ManagedDelete(); } // // Concurrency: Called from BGP config task manager // // Destroy a routing instance from the data structures // void RoutingInstanceMgr::DestroyRoutingInstance(RoutingInstance *rtinstance) { CHECK_CONCURRENCY("bgp::Config"); RTINSTANCE_LOG(Destroy, rtinstance, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); // Remove call here also deletes the instance. const string name = rtinstance->name(); instances_.Remove(rtinstance->name(), rtinstance->index()); if (deleted()) return; if (name == BgpConfigManager::kMasterInstance) return; const BgpInstanceConfig *config = server()->config_manager()->FindInstance(name); if (config) { CreateRoutingInstance(config); return; } } class RoutingInstance::DeleteActor : public LifetimeActor { public: DeleteActor(BgpServer *server, RoutingInstance *parent) : LifetimeActor(server->lifetime_manager()), parent_(parent) { } virtual bool MayDelete() const { return parent_->MayDelete(); } virtual void Shutdown() { parent_->mgr_->NotifyInstanceOp(parent_->name(), RoutingInstanceMgr::INSTANCE_DELETE); parent_->Shutdown(); } virtual void Destroy() { parent_->mgr_->DestroyRoutingInstance(parent_); } private: RoutingInstance *parent_; }; RoutingInstance::RoutingInstance(string name, BgpServer *server, RoutingInstanceMgr *mgr, const BgpInstanceConfig *config) : name_(name), index_(-1), mgr_(mgr), config_(config), is_default_(false), virtual_network_index_(0), virtual_network_allow_transit_(false), vxlan_id_(0), deleter_(new DeleteActor(server, this)), manager_delete_ref_(this, mgr->deleter()) { peer_manager_.reset(BgpObjectFactory::Create<PeerManager>(this)); } RoutingInstance::~RoutingInstance() { } void RoutingInstance::ProcessConfig(BgpServer *server) { RoutingInstanceInfo info = GetDataCollection(""); // Initialize virtual network info. virtual_network_ = config_->virtual_network(); virtual_network_index_ = config_->virtual_network_index(); virtual_network_allow_transit_ = config_->virtual_network_allow_transit(); vxlan_id_ = config_->vxlan_id(); vector<string> import_rt, export_rt; BOOST_FOREACH(string irt, config_->import_list()) { import_.insert(RouteTarget::FromString(irt)); import_rt.push_back(irt); } BOOST_FOREACH(string ert, config_->export_list()) { export_.insert(RouteTarget::FromString(ert)); export_rt.push_back(ert); } if (import_rt.size()) info.set_add_import_rt(import_rt); if (export_rt.size()) info.set_add_export_rt(export_rt); if (import_rt.size() || export_rt.size()) ROUTING_INSTANCE_COLLECTOR_INFO(info); // Create BGP Table if (name_ == BgpConfigManager::kMasterInstance) { is_default_ = true; VpnTableCreate(server, Address::INETVPN); VpnTableCreate(server, Address::INET6VPN); VpnTableCreate(server, Address::ERMVPN); VpnTableCreate(server, Address::EVPN); RTargetTableCreate(server); BgpTable *table_inet = static_cast<BgpTable *>( server->database()->CreateTable("inet.0")); if (table_inet != NULL) { AddTable(table_inet); } } else { // Create foo.inet.0. VrfTableCreate(server, Address::INET, Address::INETVPN); // Create foo.inet6.0. VrfTableCreate(server, Address::INET6, Address::INET6VPN); // Create foo.ermvpn.0. VrfTableCreate(server, Address::ERMVPN, Address::ERMVPN); // Create foo.evpn.0. VrfTableCreate(server, Address::EVPN, Address::EVPN); } // Service Chain if (!config_->service_chain_list().empty()) { const ServiceChainConfig &cfg = config_->service_chain_list().front(); if (cfg.routing_instance != "") { server->service_chain_mgr()->LocateServiceChain(this, cfg); } } if (static_route_mgr()) static_route_mgr()->ProcessStaticRouteConfig(); } void RoutingInstance::UpdateConfig(BgpServer *server, const BgpInstanceConfig *cfg) { CHECK_CONCURRENCY("bgp::Config"); // This is a noop in production code. However unit tests may pass a // new object. config_ = cfg; // Figure out if there's a significant configuration change that requires // notifying routes to all listeners. bool notify_routes = false; if (virtual_network_allow_transit_ != cfg->virtual_network_allow_transit()) notify_routes = true; if (virtual_network_ != cfg->virtual_network()) notify_routes = true; if (virtual_network_index_ != cfg->virtual_network_index()) notify_routes = true; // Trigger notification of all routes in each table. if (notify_routes) { BOOST_FOREACH(RouteTableList::value_type &entry, vrf_tables_) { BgpTable *table = entry.second; table->NotifyAllEntries(); } } // Update virtual network info. virtual_network_ = cfg->virtual_network(); virtual_network_index_ = cfg->virtual_network_index(); virtual_network_allow_transit_ = cfg->virtual_network_allow_transit(); vxlan_id_ = cfg->vxlan_id(); // Master routing instance doesn't have import & export list // Master instance imports and exports all RT if (IsDefaultRoutingInstance()) return; // Do a diff walk of Routing Instance config and Routing Instance. BgpInstanceConfig::RouteTargetList::const_iterator cfg_it = cfg->import_list().begin(); RoutingInstance::RouteTargetList::const_iterator rt_it = import_.begin(); RoutingInstance::RouteTargetList::const_iterator rt_next_it = rt_it; BgpTable *inet_table = GetTable(Address::INET); RoutePathReplicator *inetvpn_replicator = server->replicator(Address::INETVPN); BgpTable *inet6_table = GetTable(Address::INET6); RoutePathReplicator *inet6vpn_replicator = server->replicator(Address::INET6VPN); BgpTable *evpn_table = GetTable(Address::EVPN); RoutePathReplicator *evpn_replicator = server->replicator(Address::EVPN); BgpTable *ermvpn_table = GetTable(Address::ERMVPN); RoutePathReplicator *ermvpn_replicator = server->replicator(Address::ERMVPN); RoutingInstanceInfo info = GetDataCollection(""); vector<string> add_import_rt, remove_import_rt; vector<string> add_export_rt, remove_export_rt; while ((cfg_it != cfg->import_list().end()) && (rt_it != import_.end())) { RouteTarget cfg_rtarget(RouteTarget::FromString(*cfg_it)); if (cfg_rtarget.GetExtCommunity() < rt_it->GetExtCommunity()) { // If present in config and not in Routing Instance, // a. Add to import list // b. Add the table to import from the RouteTarget import_.insert(cfg_rtarget); add_import_rt.push_back(*cfg_it); inetvpn_replicator->Join(inet_table, cfg_rtarget, true); ermvpn_replicator->Join(ermvpn_table, cfg_rtarget, true); evpn_replicator->Join(evpn_table, cfg_rtarget, true); inet6vpn_replicator->Join(inet6_table, cfg_rtarget, true); cfg_it++; } else if (cfg_rtarget.GetExtCommunity() > rt_it->GetExtCommunity()) { // If not present in config but present in Routing Instance, // a. Remove to import list // b. Leave the Import RtGroup rt_next_it++; remove_import_rt.push_back(rt_it->ToString()); inetvpn_replicator->Leave(inet_table, *rt_it, true); ermvpn_replicator->Leave(ermvpn_table, *rt_it, true); evpn_replicator->Leave(evpn_table, *rt_it, true); inet6vpn_replicator->Leave(inet6_table, *rt_it, true); import_.erase(rt_it); rt_it = rt_next_it; } else { // Present in both, Nop rt_it++; cfg_it++; } rt_next_it = rt_it; } // Walk through the entire left over config list and add to import list for (; cfg_it != cfg->import_list().end(); ++cfg_it) { RouteTarget cfg_rtarget(RouteTarget::FromString(*cfg_it)); import_.insert(cfg_rtarget); add_import_rt.push_back(*cfg_it); inetvpn_replicator->Join(inet_table, cfg_rtarget, true); ermvpn_replicator->Join(ermvpn_table, cfg_rtarget, true); evpn_replicator->Join(evpn_table, cfg_rtarget, true); inet6vpn_replicator->Join(inet6_table, cfg_rtarget, true); } // Walk through the entire left over RoutingInstance import list and purge for (rt_next_it = rt_it; rt_it != import_.end(); rt_it = rt_next_it) { rt_next_it++; remove_import_rt.push_back(rt_it->ToString()); inetvpn_replicator->Leave(inet_table, *rt_it, true); ermvpn_replicator->Leave(ermvpn_table, *rt_it, true); evpn_replicator->Leave(evpn_table, *rt_it, true); inet6vpn_replicator->Leave(inet6_table, *rt_it, true); import_.erase(rt_it); } // Same step for Export_rt config cfg_it = cfg->export_list().begin(); rt_next_it = rt_it = export_.begin(); while ((cfg_it != cfg->export_list().end()) && (rt_it != export_.end())) { RouteTarget cfg_rtarget(RouteTarget::FromString(*cfg_it)); if (cfg_rtarget.GetExtCommunity() < rt_it->GetExtCommunity()) { export_.insert(cfg_rtarget); add_export_rt.push_back(*cfg_it); inetvpn_replicator->Join(inet_table, cfg_rtarget, false); ermvpn_replicator->Join(ermvpn_table, cfg_rtarget, false); inet6vpn_replicator->Join(inet6_table, cfg_rtarget, false); evpn_replicator->Join(evpn_table, cfg_rtarget, false); cfg_it++; } else if (cfg_rtarget.GetExtCommunity() > rt_it->GetExtCommunity()) { rt_next_it++; remove_export_rt.push_back(rt_it->ToString()); inetvpn_replicator->Leave(inet_table, *rt_it, false); ermvpn_replicator->Leave(ermvpn_table, *rt_it, false); evpn_replicator->Leave(evpn_table, *rt_it, false); inet6vpn_replicator->Leave(inet6_table, *rt_it, false); export_.erase(rt_it); rt_it = rt_next_it; } else { rt_it++; cfg_it++; } rt_next_it = rt_it; } for (; cfg_it != cfg->export_list().end(); ++cfg_it) { RouteTarget cfg_rtarget(RouteTarget::FromString(*cfg_it)); export_.insert(cfg_rtarget); add_export_rt.push_back(*cfg_it); inetvpn_replicator->Join(inet_table, cfg_rtarget, false); ermvpn_replicator->Join(ermvpn_table, cfg_rtarget, false); evpn_replicator->Join(evpn_table, cfg_rtarget, false); inet6vpn_replicator->Join(inet6_table, cfg_rtarget, false); } for (rt_next_it = rt_it; rt_it != export_.end(); rt_it = rt_next_it) { rt_next_it++; remove_export_rt.push_back(rt_it->ToString()); inetvpn_replicator->Leave(inet_table, *rt_it, false); ermvpn_replicator->Leave(ermvpn_table, *rt_it, false); evpn_replicator->Leave(evpn_table, *rt_it, false); inet6vpn_replicator->Leave(inet6_table, *rt_it, false); export_.erase(rt_it); } if (add_import_rt.size()) info.set_add_import_rt(add_import_rt); if (remove_import_rt.size()) info.set_remove_import_rt(remove_import_rt); if (add_export_rt.size()) info.set_add_export_rt(add_export_rt); if (remove_export_rt.size()) info.set_remove_export_rt(remove_export_rt); if (add_import_rt.size() || remove_import_rt.size() || add_export_rt.size() || remove_export_rt.size()) ROUTING_INSTANCE_COLLECTOR_INFO(info); // // Service Chain update // if (!config_->service_chain_list().empty()) { const ServiceChainConfig &cfg = config_->service_chain_list().front(); server->service_chain_mgr()->LocateServiceChain(this, cfg); } else { server->service_chain_mgr()->StopServiceChain(this); } if (static_route_mgr()) static_route_mgr()->UpdateStaticRouteConfig(); } void RoutingInstance::ClearConfig() { CHECK_CONCURRENCY("bgp::Config"); config_ = NULL; } void RoutingInstance::ManagedDelete() { // RoutingInstanceMgr logs the delete for non-default instances. if (IsDefaultRoutingInstance()) { RTINSTANCE_LOG(Delete, this, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); } deleter_->Delete(); } void RoutingInstance::Shutdown() { CHECK_CONCURRENCY("bgp::Config"); RTINSTANCE_LOG(Shutdown, this, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); ClearRouteTarget(); server()->service_chain_mgr()->StopServiceChain(this); if (static_route_mgr()) static_route_mgr()->FlushStaticRouteConfig(); } bool RoutingInstance::MayDelete() const { return true; } LifetimeActor *RoutingInstance::deleter() { return deleter_.get(); } const LifetimeActor *RoutingInstance::deleter() const { return deleter_.get(); } bool RoutingInstance::deleted() const { return deleter()->IsDeleted(); } const string RoutingInstance::GetVirtualNetworkName() const { if (!virtual_network_.empty()) return virtual_network_; size_t pos = name_.rfind(':'); if (pos == string::npos) { return name_; } else { return name_.substr(0, pos); } } const string RoutingInstance::virtual_network() const { return virtual_network_.empty() ? "unresolved" : virtual_network_; } int RoutingInstance::virtual_network_index() const { return virtual_network_index_; } bool RoutingInstance::virtual_network_allow_transit() const { return virtual_network_allow_transit_; } int RoutingInstance::vxlan_id() const { return vxlan_id_; } BgpServer *RoutingInstance::server() { return mgr_->server(); } const BgpServer *RoutingInstance::server() const { return mgr_->server(); } void RoutingInstance::ClearFamilyRouteTarget(Address::Family vrf_family, Address::Family vpn_family) { BgpTable *table = GetTable(vrf_family); if (table) { RoutePathReplicator *replicator = server()->replicator(vpn_family); BOOST_FOREACH(RouteTarget rt, import_) { replicator->Leave(table, rt, true); } BOOST_FOREACH(RouteTarget rt, export_) { replicator->Leave(table, rt, false); } } } void RoutingInstance::ClearRouteTarget() { CHECK_CONCURRENCY("bgp::Config"); if (IsDefaultRoutingInstance()) { return; } ClearFamilyRouteTarget(Address::INET, Address::INETVPN); ClearFamilyRouteTarget(Address::INET6, Address::INET6VPN); ClearFamilyRouteTarget(Address::ERMVPN, Address::ERMVPN); ClearFamilyRouteTarget(Address::EVPN, Address::EVPN); import_.clear(); export_.clear(); } BgpTable *RoutingInstance::RTargetTableCreate(BgpServer *server) { BgpTable *rtargettbl = static_cast<BgpTable *>( server->database()->CreateTable("bgp.rtarget.0")); RTINSTANCE_LOG_TABLE(Create, this, rtargettbl, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); AddTable(rtargettbl); return rtargettbl; } BgpTable *RoutingInstance::VpnTableCreate(BgpServer *server, Address::Family vpn_family) { string table_name = GetTableName(name(), vpn_family); BgpTable *table = static_cast<BgpTable *> (server->database()->CreateTable(table_name)); if (table) { AddTable(table); RTINSTANCE_LOG_TABLE(Create, this, table, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); assert(server->rtarget_group_mgr()->GetRtGroupMap().empty()); RoutePathReplicator *replicator = server->replicator(vpn_family); replicator->Initialize(); } return table; } BgpTable *RoutingInstance::VrfTableCreate(BgpServer *server, Address::Family vrf_family, Address::Family vpn_family) { // Create foo.table_name_suffix string table_name = GetTableName(name(), vrf_family); BgpTable *table = static_cast<BgpTable *> (server->database()->CreateTable(table_name)); if (table) { AddTable(table); RTINSTANCE_LOG_TABLE(Create, this, table, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); RoutePathReplicator *replicator = server->replicator(vpn_family); BOOST_FOREACH(RouteTarget rt, import_) { replicator->Join(table, rt, true); } BOOST_FOREACH(RouteTarget rt, export_) { replicator->Join(table, rt, false); } } return table; } void RoutingInstance::AddTable(BgpTable *tbl) { vrf_tables_.insert(make_pair(tbl->name(), tbl)); tbl->set_routing_instance(this); RoutingInstanceInfo info = GetDataCollection("Add"); info.set_family(Address::FamilyToString(tbl->family())); ROUTING_INSTANCE_COLLECTOR_INFO(info); } void RoutingInstance::RemoveTable(BgpTable *tbl) { RoutingInstanceInfo info = GetDataCollection("Remove"); info.set_family(Address::FamilyToString(tbl->family())); vrf_tables_.erase(tbl->name()); ROUTING_INSTANCE_COLLECTOR_INFO(info); } // // Concurrency: BGP Config task // // Remove the table from the map and delete the table data structure // void RoutingInstance::DestroyDBTable(DBTable *dbtable) { CHECK_CONCURRENCY("bgp::Config"); BgpTable *table = static_cast<BgpTable *>(dbtable); RTINSTANCE_LOG_TABLE(Destroy, this, table, SandeshLevel::SYS_DEBUG, RTINSTANCE_LOG_FLAG_ALL); // Remove this table from various data structures server()->database()->RemoveTable(table); RemoveTable(table); // Make sure that there are no routes left in this table assert(table->Size() == 0); delete table; } string RoutingInstance::GetTableName(string instance_name, Address::Family fmly) { string table_name; if (instance_name == BgpConfigManager::kMasterInstance) { if ((fmly == Address::INET) || (fmly == Address::INET6)) { table_name = Address::FamilyToTableString(fmly) + ".0"; } else { table_name = "bgp." + Address::FamilyToTableString(fmly) + ".0"; } } else { table_name = instance_name + "." + Address::FamilyToTableString(fmly) + ".0"; } return table_name; } BgpTable *RoutingInstance::GetTable(Address::Family fmly) { string table_name = RoutingInstance::GetTableName(name_, fmly); RouteTableList::const_iterator loc = GetTables().find(table_name); if (loc != GetTables().end()) { return loc->second; } return NULL; } string RoutingInstance::GetVrfFromTableName(const string table) { static set<string> master_tables = list_of("inet.0"); static set<string> vpn_tables = list_of("bgp.l3vpn.0")("bgp.ermvpn.0")("bgp.evpn.0")("bgp.rtarget.0") ("bgp.l3vpn-inet6.0"); if (master_tables.find(table) != master_tables.end()) return BgpConfigManager::kMasterInstance; if (vpn_tables.find(table) != vpn_tables.end()) return BgpConfigManager::kMasterInstance; size_t pos1 = table.rfind('.'); if (pos1 == string::npos) return "__unknown__"; size_t pos2 = table.rfind('.', pos1 - 1); if (pos2 == string::npos) return "__unknown__"; return table.substr(0, pos2); } void RoutingInstance::set_index(BgpServer *server, int index) { index_ = index; if (!is_default_) { rd_.reset(new RouteDistinguisher(server->bgp_identifier(), index)); static_route_mgr_.reset(new StaticRouteMgr(this)); } } RoutingInstanceInfo RoutingInstance::GetDataCollection(const char *operation) { RoutingInstanceInfo info; info.set_name(name_); info.set_hostname(mgr_->server()->localname()); if (rd_.get()) info.set_route_distinguisher(rd_->ToString()); if (operation) info.set_operation(operation); return info; } // // Return true if one of the route targets in the ExtCommunity is in the // set of export RouteTargets for this RoutingInstance. // bool RoutingInstance::HasExportTarget(const ExtCommunity *extcomm) const { if (!extcomm) return false; BOOST_FOREACH(const ExtCommunity::ExtCommunityValue &value, extcomm->communities()) { if (!ExtCommunity::is_route_target(value)) continue; RouteTarget rtarget(value); if (export_.find(rtarget) != export_.end()) return true; } return false; }
{ "content_hash": "c4032a2f70ad019cb83b376795245133", "timestamp": "", "source": "github", "line_count": 982, "max_line_length": 80, "avg_line_length": 34.210794297352344, "alnum_prop": 0.6350945081113261, "repo_name": "cloudwatt/contrail-controller", "id": "df0a873a5920ea23657124dedbed83c653f1a9cf", "size": "33667", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/bgp/routing-instance/routing_instance.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "80579" }, { "name": "C", "bytes": "44989" }, { "name": "C++", "bytes": "14908777" }, { "name": "CSS", "bytes": "531" }, { "name": "Java", "bytes": "171966" }, { "name": "Lua", "bytes": "8164" }, { "name": "Makefile", "bytes": "12449" }, { "name": "Objective-C", "bytes": "720" }, { "name": "Protocol Buffer", "bytes": "1120" }, { "name": "Python", "bytes": "3057429" }, { "name": "Shell", "bytes": "54611" }, { "name": "Thrift", "bytes": "40763" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html><head> <title>CURLOPT_PROXY_SERVICE_NAME man page</title> <meta name="generator" content="roffit"> <STYLE type="text/css"> pre { overflow: auto; margin: 0; } P.level0, pre.level0 { padding-left: 2em; } P.level1, pre.level1 { padding-left: 4em; } P.level2, pre.level2 { padding-left: 6em; } span.emphasis { font-style: italic; } span.bold { font-weight: bold; } span.manpage { font-weight: bold; } h2.nroffsh { background-color: #e0e0e0; } span.nroffip { font-weight: bold; font-size: 120%; font-family: monospace; } p.roffit { text-align: center; font-size: 80%; } </STYLE> </head><body> <p class="level0"><a name="NAME"></a><h2 class="nroffsh">NAME</h2> <p class="level0">CURLOPT_PROXY_SERVICE_NAME - proxy service name <a name="SYNOPSIS"></a><h2 class="nroffsh">SYNOPSIS</h2> <p class="level0">&#35;include &lt;curl/curl.h&gt; <p class="level0">CURLcode curl_easy_setopt(CURL *handle, CURLOPT_PROXY_SERVICE_NAME, char *name); <a name="DESCRIPTION"></a><h2 class="nroffsh">DESCRIPTION</h2> <p class="level0">Pass a char * as parameter to a string holding the <span Class="emphasis">name</span> of the service. The default service name is "HTTP". This option allows you to change it. <p class="level0">See above <a name="PROTOCOLS"></a><h2 class="nroffsh">PROTOCOLS</h2> <p class="level0">Most <a name="EXAMPLE"></a><h2 class="nroffsh">EXAMPLE</h2> <p class="level0">TODO <a name="AVAILABILITY"></a><h2 class="nroffsh">AVAILABILITY</h2> <p class="level0">Added in 7.43.0 <a name="RETURN"></a><h2 class="nroffsh">RETURN VALUE</h2> <p class="level0">Returns CURLE_OK if the option is supported, CURLE_UNKNOWN_OPTION if not, or CURLE_OUT_OF_MEMORY if there was insufficient heap space. <a name="SEE"></a><h2 class="nroffsh">SEE ALSO</h2> <p class="level0"><a Class="manpage" href="./CURLOPT_PROXY.html">CURLOPT_PROXY</a>, <a Class="manpage" href="./CURLOPT_PROXYTYPE.html">CURLOPT_PROXYTYPE</a><p class="roffit"> This HTML page was made with <a href="http://daniel.haxx.se/projects/roffit/">roffit</a>. </body></html>
{ "content_hash": "46b34f0882c69b0bea1c58539c4f7ef8", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 204, "avg_line_length": 33.13846153846154, "alnum_prop": 0.6935933147632312, "repo_name": "phr34k/serpent", "id": "4bf0537c476f5ace0c0f75023b9ccec233b1741d", "size": "2154", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "thirdparty/curl/docs/libcurl/opts/CURLOPT_PROXY_SERVICE_NAME.html", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "503" }, { "name": "C++", "bytes": "69304" }, { "name": "Makefile", "bytes": "1096" }, { "name": "Python", "bytes": "25175" } ], "symlink_target": "" }
using System.Web.Mvc; namespace TestWebEngine.Web { public class FilterConfig { public static void RegisterFilters(GlobalFilterCollection filters) { filters.Add(new HandleErrorAttribute()); } } }
{ "content_hash": "baf953e57adbfe19578e7bb85deeb877", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 74, "avg_line_length": 20.416666666666668, "alnum_prop": 0.6448979591836734, "repo_name": "SKorolchuk/test-web-engine", "id": "78260d031a36e5f46ed1c6abf19a053add315587", "size": "247", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "TestWebEngine/TestWebEngine.Web/App_Start/FilterConfig.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "100" }, { "name": "C#", "bytes": "74405" }, { "name": "CSS", "bytes": "4789" }, { "name": "HTML", "bytes": "19621" }, { "name": "JavaScript", "bytes": "812332" }, { "name": "Pascal", "bytes": "1094" }, { "name": "PowerShell", "bytes": "106195" } ], "symlink_target": "" }
<?php use kartik\money\MaskMoney; use yii\helpers\Html; use yii\bootstrap\ActiveForm; /* @var $this yii\web\View */ /* @var $model app\models\Cuenta */ /* @var $form yii\widgets\ActiveForm */ $inputTemplate = '<div class="input-group">' . '<span class="input-group-addon">$</span>' . '{input}' . "</div>"; ?> <div class="cuenta-form"> <?php $form = ActiveForm::begin(); ?> <?= $form->field($model, 'MontoCaja', ['inputTemplate' => $inputTemplate ])->widget(MaskMoney::classname()) ?> <?= $form->field($model, 'MontoSobre', ['inputTemplate' => $inputTemplate ])->widget(MaskMoney::classname()) ?> <div class="form-group"> <?= Html::submitButton(!$model->iniciado() ? 'Crear' : 'Actualizar', ['class' => !$model->iniciado() ? 'btn btn-success' : 'btn btn-primary']) ?> </div> <?php ActiveForm::end(); ?> </div>
{ "content_hash": "53ebf4392888fa61587e204e46013082", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 153, "avg_line_length": 25.647058823529413, "alnum_prop": 0.591743119266055, "repo_name": "Santiago-j-s/cefi-stock", "id": "cb412cb25802a483119e5a7a2371d0ccfde96d3b", "size": "872", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "views/cuenta/_form.php", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "1030" }, { "name": "CSS", "bytes": "2442" }, { "name": "PHP", "bytes": "112042" }, { "name": "Shell", "bytes": "242" } ], "symlink_target": "" }
package runtime import ( "unsafe" ) // The constant is known to the compiler. // There is no fundamental theory behind this number. const tmpStringBufSize = 32 type tmpBuf [tmpStringBufSize]byte // concatstrings implements a Go string concatenation x+y+z+... // The operands are passed in the slice a. // If buf != nil, the compiler has determined that the result does not // escape the calling function, so the string data can be stored in buf // if small enough. func concatstrings(buf *tmpBuf, a []string) string { idx := 0 l := 0 count := 0 for i, x := range a { n := len(x) if n == 0 { continue } if l+n < l { throw("string concatenation too long") } l += n count++ idx = i } if count == 0 { return "" } // If there is just one string and either it is not on the stack // or our result does not escape the calling frame (buf != nil), // then we can return that string directly. if count == 1 && (buf != nil || !stringDataOnStack(a[idx])) { return a[idx] } s, b := rawstringtmp(buf, l) l = 0 for _, x := range a { copy(b[l:], x) l += len(x) } return s } func concatstring2(buf *tmpBuf, a [2]string) string { return concatstrings(buf, a[:]) } func concatstring3(buf *tmpBuf, a [3]string) string { return concatstrings(buf, a[:]) } func concatstring4(buf *tmpBuf, a [4]string) string { return concatstrings(buf, a[:]) } func concatstring5(buf *tmpBuf, a [5]string) string { return concatstrings(buf, a[:]) } // Buf is a fixed-size buffer for the result, // it is not nil if the result does not escape. func slicebytetostring(buf *tmpBuf, b []byte) string { l := len(b) if l == 0 { // Turns out to be a relatively common case. // Consider that you want to parse out data between parens in "foo()bar", // you find the indices and convert the subslice to string. return "" } if raceenabled && l > 0 { racereadrangepc(unsafe.Pointer(&b[0]), uintptr(l), getcallerpc(unsafe.Pointer(&b)), funcPC(slicebytetostring)) } s, c := rawstringtmp(buf, l) copy(c, b) return s } // stringDataOnStack reports whether the string's data is // stored on the current goroutine's stack. func stringDataOnStack(s string) bool { ptr := uintptr(stringStructOf(&s).str) stk := getg().stack return stk.lo <= ptr && ptr < stk.hi } func rawstringtmp(buf *tmpBuf, l int) (s string, b []byte) { if buf != nil && l <= len(buf) { b = buf[:l] s = slicebytetostringtmp(b) } else { s, b = rawstring(l) } return } func slicebytetostringtmp(b []byte) string { // Return a "string" referring to the actual []byte bytes. // This is only for use by internal compiler optimizations // that know that the string form will be discarded before // the calling goroutine could possibly modify the original // slice or synchronize with another goroutine. // First such case is a m[string(k)] lookup where // m is a string-keyed map and k is a []byte. // Second such case is "<"+string(b)+">" concatenation where b is []byte. // Third such case is string(b)=="foo" comparison where b is []byte. if raceenabled && len(b) > 0 { racereadrangepc(unsafe.Pointer(&b[0]), uintptr(len(b)), getcallerpc(unsafe.Pointer(&b)), funcPC(slicebytetostringtmp)) } return *(*string)(unsafe.Pointer(&b)) } func stringtoslicebyte(buf *tmpBuf, s string) []byte { var b []byte if buf != nil && len(s) <= len(buf) { b = buf[:len(s)] } else { b = rawbyteslice(len(s)) } copy(b, s) return b } func stringtoslicebytetmp(s string) []byte { // Return a slice referring to the actual string bytes. // This is only for use by internal compiler optimizations // that know that the slice won't be mutated. // The only such case today is: // for i, c := range []byte(str) str := stringStructOf(&s) ret := slice{array: unsafe.Pointer(str.str), len: str.len, cap: str.len} return *(*[]byte)(unsafe.Pointer(&ret)) } func stringtoslicerune(buf *[tmpStringBufSize]rune, s string) []rune { // two passes. // unlike slicerunetostring, no race because strings are immutable. n := 0 t := s for len(s) > 0 { _, k := charntorune(s) s = s[k:] n++ } var a []rune if buf != nil && n <= len(buf) { a = buf[:n] } else { a = rawruneslice(n) } n = 0 for len(t) > 0 { r, k := charntorune(t) t = t[k:] a[n] = r n++ } return a } func slicerunetostring(buf *tmpBuf, a []rune) string { if raceenabled && len(a) > 0 { racereadrangepc(unsafe.Pointer(&a[0]), uintptr(len(a))*unsafe.Sizeof(a[0]), getcallerpc(unsafe.Pointer(&a)), funcPC(slicerunetostring)) } var dum [4]byte size1 := 0 for _, r := range a { size1 += runetochar(dum[:], r) } s, b := rawstringtmp(buf, size1+3) size2 := 0 for _, r := range a { // check for race if size2 >= size1 { break } size2 += runetochar(b[size2:], r) } return s[:size2] } type stringStruct struct { str unsafe.Pointer len int } // Variant with *byte pointer type for DWARF debugging. type stringStructDWARF struct { str *byte len int } func stringStructOf(sp *string) *stringStruct { return (*stringStruct)(unsafe.Pointer(sp)) } func intstring(buf *[4]byte, v int64) string { var s string var b []byte if buf != nil { b = buf[:] s = slicebytetostringtmp(b) } else { s, b = rawstring(4) } n := runetochar(b, rune(v)) return s[:n] } // stringiter returns the index of the next // rune after the rune that starts at s[k]. func stringiter(s string, k int) int { if k >= len(s) { // 0 is end of iteration return 0 } c := s[k] if c < runeself { return k + 1 } // multi-char rune _, n := charntorune(s[k:]) return k + n } // stringiter2 returns the rune that starts at s[k] // and the index where the next rune starts. func stringiter2(s string, k int) (int, rune) { if k >= len(s) { // 0 is end of iteration return 0, 0 } c := s[k] if c < runeself { return k + 1, rune(c) } // multi-char rune r, n := charntorune(s[k:]) return k + n, r } // rawstring allocates storage for a new string. The returned // string and byte slice both refer to the same storage. // The storage is not zeroed. Callers should use // b to set the string contents and then drop b. func rawstring(size int) (s string, b []byte) { p := mallocgc(uintptr(size), nil, flagNoScan|flagNoZero) stringStructOf(&s).str = p stringStructOf(&s).len = size *(*slice)(unsafe.Pointer(&b)) = slice{p, size, size} for { ms := maxstring if uintptr(size) <= uintptr(ms) || casuintptr((*uintptr)(unsafe.Pointer(&maxstring)), uintptr(ms), uintptr(size)) { return } } } // rawbyteslice allocates a new byte slice. The byte slice is not zeroed. func rawbyteslice(size int) (b []byte) { cap := roundupsize(uintptr(size)) p := mallocgc(cap, nil, flagNoScan|flagNoZero) if cap != uintptr(size) { memclr(add(p, uintptr(size)), cap-uintptr(size)) } *(*slice)(unsafe.Pointer(&b)) = slice{p, size, int(cap)} return } // rawruneslice allocates a new rune slice. The rune slice is not zeroed. func rawruneslice(size int) (b []rune) { if uintptr(size) > _MaxMem/4 { throw("out of memory") } mem := roundupsize(uintptr(size) * 4) p := mallocgc(mem, nil, flagNoScan|flagNoZero) if mem != uintptr(size)*4 { memclr(add(p, uintptr(size)*4), mem-uintptr(size)*4) } *(*slice)(unsafe.Pointer(&b)) = slice{p, size, int(mem / 4)} return } // used by cmd/cgo func gobytes(p *byte, n int) []byte { if n == 0 { return make([]byte, 0) } x := make([]byte, n) memmove(unsafe.Pointer(&x[0]), unsafe.Pointer(p), uintptr(n)) return x } func gostring(p *byte) string { l := findnull(p) if l == 0 { return "" } s, b := rawstring(l) memmove(unsafe.Pointer(&b[0]), unsafe.Pointer(p), uintptr(l)) return s } func gostringn(p *byte, l int) string { if l == 0 { return "" } s, b := rawstring(l) memmove(unsafe.Pointer(&b[0]), unsafe.Pointer(p), uintptr(l)) return s } func index(s, t string) int { if len(t) == 0 { return 0 } for i := 0; i < len(s); i++ { if s[i] == t[0] && hasprefix(s[i:], t) { return i } } return -1 } func contains(s, t string) bool { return index(s, t) >= 0 } func hasprefix(s, t string) bool { return len(s) >= len(t) && s[:len(t)] == t } func atoi(s string) int { n := 0 for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { n = n*10 + int(s[0]) - '0' s = s[1:] } return n } //go:nosplit func findnull(s *byte) int { if s == nil { return 0 } p := (*[_MaxMem/2 - 1]byte)(unsafe.Pointer(s)) l := 0 for p[l] != 0 { l++ } return l } func findnullw(s *uint16) int { if s == nil { return 0 } p := (*[_MaxMem/2/2 - 1]uint16)(unsafe.Pointer(s)) l := 0 for p[l] != 0 { l++ } return l } var maxstring uintptr = 256 // a hint for print //go:nosplit func gostringnocopy(str *byte) string { ss := stringStruct{str: unsafe.Pointer(str), len: findnull(str)} s := *(*string)(unsafe.Pointer(&ss)) for { ms := maxstring if uintptr(len(s)) <= ms || casuintptr(&maxstring, ms, uintptr(len(s))) { break } } return s } func gostringw(strw *uint16) string { var buf [8]byte str := (*[_MaxMem/2/2 - 1]uint16)(unsafe.Pointer(strw)) n1 := 0 for i := 0; str[i] != 0; i++ { n1 += runetochar(buf[:], rune(str[i])) } s, b := rawstring(n1 + 4) n2 := 0 for i := 0; str[i] != 0; i++ { // check for race if n2 >= n1 { break } n2 += runetochar(b[n2:], rune(str[i])) } b[n2] = 0 // for luck return s[:n2] }
{ "content_hash": "932b173a29e3e29853702a3913416db6", "timestamp": "", "source": "github", "line_count": 428, "max_line_length": 117, "avg_line_length": 21.873831775700936, "alnum_prop": 0.6286049989318522, "repo_name": "dterei/go", "id": "680001d8df10296da52ba587ada9ed3044c19bb2", "size": "9522", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/runtime/string.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "1054066" }, { "name": "Awk", "bytes": "450" }, { "name": "Batchfile", "bytes": "7207" }, { "name": "C", "bytes": "216345" }, { "name": "C++", "bytes": "1373" }, { "name": "CSS", "bytes": "8" }, { "name": "Go", "bytes": "25436465" }, { "name": "HTML", "bytes": "841569" }, { "name": "JavaScript", "bytes": "2550" }, { "name": "Logos", "bytes": "1248" }, { "name": "Makefile", "bytes": "1014" }, { "name": "Perl", "bytes": "34469" }, { "name": "Protocol Buffer", "bytes": "1569" }, { "name": "Python", "bytes": "12446" }, { "name": "Shell", "bytes": "63964" }, { "name": "Yacc", "bytes": "42457" } ], "symlink_target": "" }
/* Reports periodic system (virtual memory) swap statistics: cumulative pages * swapped in/out and pages swapped in/out for a user-defineable time period. * Only known to work with Linux v2.6. */ #ifndef __SWAP_MONITOR_H #define __SWAP_MONITOR_H #include <fstream> #include "Scheduler.h" /** * \file SwapMonitor.h * \brief declares SwapMonitor */ /** * \addtogroup Monitors * \brief A set of system resource monitors which can be used during execution of your application. * @{ */ /// A class which monitors and logs the swap utilization of a program class SwapMonitor : public TimerHandler { public: SwapMonitor(); void expire(); static void runSwapMonitor(); ///< start the swap monitor static void stopSwapMonitor(); ///< stop the swap monitor static const uint64_t DEFAULT_FREQUENCY = 5*1000*1000; ///< the default swap monitor frequency (in microseconds) private: void fillLast(); static SwapMonitor* instance; bool halt; uint64_t frequency; /* microseconds */ std::ifstream vmstat_file; uint32_t cumulative_in; uint32_t cumulative_out; uint32_t last_in; uint32_t last_out; double peak_in; double peak_out; }; /** @} */ #endif
{ "content_hash": "72153f414e16df1c350022e1d4cce2db", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 118, "avg_line_length": 24.25, "alnum_prop": 0.6653449643140364, "repo_name": "jojochuang/eventwave", "id": "695d38ccbbf70ddfa9e8808cf9e293acb9a70400", "size": "3155", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/SwapMonitor.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "10066" }, { "name": "C++", "bytes": "2738579" }, { "name": "CMake", "bytes": "51866" }, { "name": "Makefile", "bytes": "133" }, { "name": "Perl", "bytes": "1416743" }, { "name": "Python", "bytes": "7053" }, { "name": "Shell", "bytes": "2205" } ], "symlink_target": "" }
package cz.muni.fi.xtovarn.heimdall.dispatcher; import cz.muni.fi.xtovarn.heimdall.commons.entity.Event; import java.util.Set; public class Subscription { private final Set<String> recipients; private final Event event; public Subscription(Set<String> recipients, Event event) { this.recipients = recipients; this.event = event; } public Set<String> getRecipients() { return recipients; } public Event getEvent() { return event; } }
{ "content_hash": "53fba5896adf6497dbf8bccace7d4d24", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 59, "avg_line_length": 20.782608695652176, "alnum_prop": 0.7092050209205021, "repo_name": "ngmon/ngmon", "id": "7206c6e055253c02dd857010072b97466c3a46d8", "size": "478", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/main/java/cz/muni/fi/xtovarn/heimdall/dispatcher/Subscription.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "9985" }, { "name": "Java", "bytes": "59210" }, { "name": "Python", "bytes": "1221" }, { "name": "Shell", "bytes": "1274" } ], "symlink_target": "" }
namespace { sf::Mutex mutex; GLint checkMaxTextureUnits() { GLint maxUnits = 0; glCheck(glGetIntegerv(GL_MAX_TEXTURE_COORDS_ARB, &maxUnits)); return maxUnits; } // Retrieve the maximum number of texture units available GLint getMaxTextureUnits() { // TODO: Remove this lock when it becomes unnecessary in C++11 sf::Lock lock(mutex); static GLint maxUnits = checkMaxTextureUnits(); return maxUnits; } // Read the contents of a file into an array of char bool getFileContents(const std::string& filename, std::vector<char>& buffer) { std::ifstream file(filename.c_str(), std::ios_base::binary); if (file) { file.seekg(0, std::ios_base::end); std::streamsize size = file.tellg(); if (size > 0) { file.seekg(0, std::ios_base::beg); buffer.resize(static_cast<std::size_t>(size)); file.read(&buffer[0], size); } buffer.push_back('\0'); return true; } else { return false; } } // Read the contents of a stream into an array of char bool getStreamContents(sf::InputStream& stream, std::vector<char>& buffer) { bool success = true; sf::Int64 size = stream.getSize(); if (size > 0) { buffer.resize(static_cast<std::size_t>(size)); stream.seek(0); sf::Int64 read = stream.read(&buffer[0], size); success = (read == size); } buffer.push_back('\0'); return success; } bool checkShadersAvailable() { // Create a temporary context in case the user checks // before a GlResource is created, thus initializing // the shared context sf::Context context; // Make sure that extensions are initialized sf::priv::ensureExtensionsInit(); bool available = GLEW_ARB_shading_language_100 && GLEW_ARB_shader_objects && GLEW_ARB_vertex_shader && GLEW_ARB_fragment_shader; return available; } } namespace sf { //////////////////////////////////////////////////////////// Shader::CurrentTextureType Shader::CurrentTexture; //////////////////////////////////////////////////////////// Shader::Shader() : m_shaderProgram (0), m_currentTexture(-1), m_textures (), m_params () { } //////////////////////////////////////////////////////////// Shader::~Shader() { ensureGlContext(); // Destroy effect program if (m_shaderProgram) glCheck(glDeleteObjectARB(m_shaderProgram)); } //////////////////////////////////////////////////////////// bool Shader::loadFromFile(const std::string& filename, Type type) { // Read the file std::vector<char> shader; if (!getFileContents(filename, shader)) { err() << "Failed to open shader file \"" << filename << "\"" << std::endl; return false; } // Compile the shader program if (type == Vertex) return compile(&shader[0], NULL); else return compile(NULL, &shader[0]); } //////////////////////////////////////////////////////////// bool Shader::loadFromFile(const std::string& vertexShaderFilename, const std::string& fragmentShaderFilename) { // Read the vertex shader file std::vector<char> vertexShader; if (!getFileContents(vertexShaderFilename, vertexShader)) { err() << "Failed to open vertex shader file \"" << vertexShaderFilename << "\"" << std::endl; return false; } // Read the fragment shader file std::vector<char> fragmentShader; if (!getFileContents(fragmentShaderFilename, fragmentShader)) { err() << "Failed to open fragment shader file \"" << fragmentShaderFilename << "\"" << std::endl; return false; } // Compile the shader program return compile(&vertexShader[0], &fragmentShader[0]); } //////////////////////////////////////////////////////////// bool Shader::loadFromMemory(const std::string& shader, Type type) { // Compile the shader program if (type == Vertex) return compile(shader.c_str(), NULL); else return compile(NULL, shader.c_str()); } //////////////////////////////////////////////////////////// bool Shader::loadFromMemory(const std::string& vertexShader, const std::string& fragmentShader) { // Compile the shader program return compile(vertexShader.c_str(), fragmentShader.c_str()); } //////////////////////////////////////////////////////////// bool Shader::loadFromStream(InputStream& stream, Type type) { // Read the shader code from the stream std::vector<char> shader; if (!getStreamContents(stream, shader)) { err() << "Failed to read shader from stream" << std::endl; return false; } // Compile the shader program if (type == Vertex) return compile(&shader[0], NULL); else return compile(NULL, &shader[0]); } //////////////////////////////////////////////////////////// bool Shader::loadFromStream(InputStream& vertexShaderStream, InputStream& fragmentShaderStream) { // Read the vertex shader code from the stream std::vector<char> vertexShader; if (!getStreamContents(vertexShaderStream, vertexShader)) { err() << "Failed to read vertex shader from stream" << std::endl; return false; } // Read the fragment shader code from the stream std::vector<char> fragmentShader; if (!getStreamContents(fragmentShaderStream, fragmentShader)) { err() << "Failed to read fragment shader from stream" << std::endl; return false; } // Compile the shader program return compile(&vertexShader[0], &fragmentShader[0]); } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x) { if (m_shaderProgram) { ensureGlContext(); // Enable program GLhandleARB program = glCheck(glGetHandleARB(GL_PROGRAM_OBJECT_ARB)); glCheck(glUseProgramObjectARB(m_shaderProgram)); // Get parameter location and assign it new values GLint location = getParamLocation(name); if (location != -1) { glCheck(glUniform1fARB(location, x)); } // Disable program glCheck(glUseProgramObjectARB(program)); } } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x, float y) { if (m_shaderProgram) { ensureGlContext(); // Enable program GLhandleARB program = glCheck(glGetHandleARB(GL_PROGRAM_OBJECT_ARB)); glCheck(glUseProgramObjectARB(m_shaderProgram)); // Get parameter location and assign it new values GLint location = getParamLocation(name); if (location != -1) { glCheck(glUniform2fARB(location, x, y)); } // Disable program glCheck(glUseProgramObjectARB(program)); } } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x, float y, float z) { if (m_shaderProgram) { ensureGlContext(); // Enable program GLhandleARB program = glCheck(glGetHandleARB(GL_PROGRAM_OBJECT_ARB)); glCheck(glUseProgramObjectARB(m_shaderProgram)); // Get parameter location and assign it new values GLint location = getParamLocation(name); if (location != -1) { glCheck(glUniform3fARB(location, x, y, z)); } // Disable program glCheck(glUseProgramObjectARB(program)); } } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x, float y, float z, float w) { if (m_shaderProgram) { ensureGlContext(); // Enable program GLhandleARB program = glCheck(glGetHandleARB(GL_PROGRAM_OBJECT_ARB)); glCheck(glUseProgramObjectARB(m_shaderProgram)); // Get parameter location and assign it new values GLint location = getParamLocation(name); if (location != -1) { glCheck(glUniform4fARB(location, x, y, z, w)); } // Disable program glCheck(glUseProgramObjectARB(program)); } } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Vector2f& v) { setParameter(name, v.x, v.y); } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Vector3f& v) { setParameter(name, v.x, v.y, v.z); } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Color& color) { setParameter(name, color.r / 255.f, color.g / 255.f, color.b / 255.f, color.a / 255.f); } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const sf::Transform& transform) { if (m_shaderProgram) { ensureGlContext(); // Enable program GLhandleARB program = glCheck(glGetHandleARB(GL_PROGRAM_OBJECT_ARB)); glCheck(glUseProgramObjectARB(m_shaderProgram)); // Get parameter location and assign it new values GLint location = getParamLocation(name); if (location != -1) { glCheck(glUniformMatrix4fvARB(location, 1, GL_FALSE, transform.getMatrix())); } // Disable program glCheck(glUseProgramObjectARB(program)); } } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Texture& texture) { if (m_shaderProgram) { ensureGlContext(); // Find the location of the variable in the shader int location = getParamLocation(name); if (location != -1) { // Store the location -> texture mapping TextureTable::iterator it = m_textures.find(location); if (it == m_textures.end()) { // New entry, make sure there are enough texture units GLint maxUnits = getMaxTextureUnits(); if (m_textures.size() + 1 >= static_cast<std::size_t>(maxUnits)) { err() << "Impossible to use texture \"" << name << "\" for shader: all available texture units are used" << std::endl; return; } m_textures[location] = &texture; } else { // Location already used, just replace the texture it->second = &texture; } } } } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, CurrentTextureType) { if (m_shaderProgram) { ensureGlContext(); // Find the location of the variable in the shader m_currentTexture = getParamLocation(name); } } //////////////////////////////////////////////////////////// void Shader::bind(const Shader* shader) { ensureGlContext(); if (shader && shader->m_shaderProgram) { // Enable the program glCheck(glUseProgramObjectARB(shader->m_shaderProgram)); // Bind the textures shader->bindTextures(); // Bind the current texture if (shader->m_currentTexture != -1) glCheck(glUniform1iARB(shader->m_currentTexture, 0)); } else { // Bind no shader glCheck(glUseProgramObjectARB(0)); } } //////////////////////////////////////////////////////////// bool Shader::isAvailable() { // TODO: Remove this lock when it becomes unnecessary in C++11 Lock lock(mutex); static bool available = checkShadersAvailable(); return available; } //////////////////////////////////////////////////////////// bool Shader::compile(const char* vertexShaderCode, const char* fragmentShaderCode) { ensureGlContext(); // First make sure that we can use shaders if (!isAvailable()) { err() << "Failed to create a shader: your system doesn't support shaders " << "(you should test Shader::isAvailable() before trying to use the Shader class)" << std::endl; return false; } // Destroy the shader if it was already created if (m_shaderProgram) glCheck(glDeleteObjectARB(m_shaderProgram)); // Reset the internal state m_currentTexture = -1; m_textures.clear(); m_params.clear(); // Create the program m_shaderProgram = glCheck(glCreateProgramObjectARB()); // Create the vertex shader if needed if (vertexShaderCode) { // Create and compile the shader GLhandleARB vertexShader = glCheck(glCreateShaderObjectARB(GL_VERTEX_SHADER_ARB)); glCheck(glShaderSourceARB(vertexShader, 1, &vertexShaderCode, NULL)); glCheck(glCompileShaderARB(vertexShader)); // Check the compile log GLint success; glCheck(glGetObjectParameterivARB(vertexShader, GL_OBJECT_COMPILE_STATUS_ARB, &success)); if (success == GL_FALSE) { char log[1024]; glCheck(glGetInfoLogARB(vertexShader, sizeof(log), 0, log)); err() << "Failed to compile vertex shader:" << std::endl << log << std::endl; glCheck(glDeleteObjectARB(vertexShader)); glCheck(glDeleteObjectARB(m_shaderProgram)); m_shaderProgram = 0; return false; } // Attach the shader to the program, and delete it (not needed anymore) glCheck(glAttachObjectARB(m_shaderProgram, vertexShader)); glCheck(glDeleteObjectARB(vertexShader)); } // Create the fragment shader if needed if (fragmentShaderCode) { // Create and compile the shader GLhandleARB fragmentShader = glCheck(glCreateShaderObjectARB(GL_FRAGMENT_SHADER_ARB)); glCheck(glShaderSourceARB(fragmentShader, 1, &fragmentShaderCode, NULL)); glCheck(glCompileShaderARB(fragmentShader)); // Check the compile log GLint success; glCheck(glGetObjectParameterivARB(fragmentShader, GL_OBJECT_COMPILE_STATUS_ARB, &success)); if (success == GL_FALSE) { char log[1024]; glCheck(glGetInfoLogARB(fragmentShader, sizeof(log), 0, log)); err() << "Failed to compile fragment shader:" << std::endl << log << std::endl; glCheck(glDeleteObjectARB(fragmentShader)); glCheck(glDeleteObjectARB(m_shaderProgram)); m_shaderProgram = 0; return false; } // Attach the shader to the program, and delete it (not needed anymore) glCheck(glAttachObjectARB(m_shaderProgram, fragmentShader)); glCheck(glDeleteObjectARB(fragmentShader)); } // Link the program glCheck(glLinkProgramARB(m_shaderProgram)); // Check the link log GLint success; glCheck(glGetObjectParameterivARB(m_shaderProgram, GL_OBJECT_LINK_STATUS_ARB, &success)); if (success == GL_FALSE) { char log[1024]; glCheck(glGetInfoLogARB(m_shaderProgram, sizeof(log), 0, log)); err() << "Failed to link shader:" << std::endl << log << std::endl; glCheck(glDeleteObjectARB(m_shaderProgram)); m_shaderProgram = 0; return false; } // Force an OpenGL flush, so that the shader will appear updated // in all contexts immediately (solves problems in multi-threaded apps) glCheck(glFlush()); return true; } //////////////////////////////////////////////////////////// void Shader::bindTextures() const { TextureTable::const_iterator it = m_textures.begin(); for (std::size_t i = 0; i < m_textures.size(); ++i) { GLint index = static_cast<GLsizei>(i + 1); glCheck(glUniform1iARB(it->first, index)); glCheck(glActiveTextureARB(GL_TEXTURE0_ARB + index)); Texture::bind(it->second); ++it; } // Make sure that the texture unit which is left active is the number 0 glCheck(glActiveTextureARB(GL_TEXTURE0_ARB)); } //////////////////////////////////////////////////////////// int Shader::getParamLocation(const std::string& name) { // Check the cache ParamTable::const_iterator it = m_params.find(name); if (it != m_params.end()) { // Already in cache, return it return it->second; } else { // Not in cache, request the location from OpenGL int location = glGetUniformLocationARB(m_shaderProgram, name.c_str()); m_params.insert(std::make_pair(name, location)); if (location == -1) err() << "Parameter \"" << name << "\" not found in shader" << std::endl; return location; } } } // namespace sf #else // SFML_OPENGL_ES // OpenGL ES 1 doesn't support GLSL shaders at all, we have to provide an empty implementation namespace sf { //////////////////////////////////////////////////////////// Shader::CurrentTextureType Shader::CurrentTexture; //////////////////////////////////////////////////////////// Shader::Shader() : m_shaderProgram (0), m_currentTexture(-1) { } //////////////////////////////////////////////////////////// Shader::~Shader() { } //////////////////////////////////////////////////////////// bool Shader::loadFromFile(const std::string& filename, Type type) { return false; } //////////////////////////////////////////////////////////// bool Shader::loadFromFile(const std::string& vertexShaderFilename, const std::string& fragmentShaderFilename) { return false; } //////////////////////////////////////////////////////////// bool Shader::loadFromMemory(const std::string& shader, Type type) { return false; } //////////////////////////////////////////////////////////// bool Shader::loadFromMemory(const std::string& vertexShader, const std::string& fragmentShader) { return false; } //////////////////////////////////////////////////////////// bool Shader::loadFromStream(InputStream& stream, Type type) { return false; } //////////////////////////////////////////////////////////// bool Shader::loadFromStream(InputStream& vertexShaderStream, InputStream& fragmentShaderStream) { return false; } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x, float y) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x, float y, float z) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, float x, float y, float z, float w) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Vector2f& v) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Vector3f& v) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Color& color) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const sf::Transform& transform) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, const Texture& texture) { } //////////////////////////////////////////////////////////// void Shader::setParameter(const std::string& name, CurrentTextureType) { } //////////////////////////////////////////////////////////// void Shader::bind(const Shader* shader) { } //////////////////////////////////////////////////////////// bool Shader::isAvailable() { return false; } //////////////////////////////////////////////////////////// bool Shader::compile(const char* vertexShaderCode, const char* fragmentShaderCode) { return false; } //////////////////////////////////////////////////////////// void Shader::bindTextures() const { } } // namespace sf #endif // SFML_OPENGL_ES
{ "content_hash": "98f9797ad8aae6a9af70b1d0b4d48801", "timestamp": "", "source": "github", "line_count": 744, "max_line_length": 138, "avg_line_length": 27.693548387096776, "alnum_prop": 0.5315958066394875, "repo_name": "fmenozzi/games", "id": "1cf7648088610f00e9f7fbd5acb6428b7e396f74", "size": "22188", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "libs/SFML-2.2/src/SFML/Graphics/Shader.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "6087340" }, { "name": "C++", "bytes": "2287870" }, { "name": "CMake", "bytes": "137153" }, { "name": "CSS", "bytes": "20399" }, { "name": "GLSL", "bytes": "3875" }, { "name": "HTML", "bytes": "1072" }, { "name": "Makefile", "bytes": "6708" }, { "name": "Objective-C", "bytes": "49292" }, { "name": "Objective-C++", "bytes": "193710" }, { "name": "Shell", "bytes": "6739" } ], "symlink_target": "" }
The Google Chrome Content Analysis Connector provides an official mechanism allowing Data Loss Prevention (DLP) agents to more deeply integrate their services with Google Chrome. DLP agents are background processes on managed computers that allow enterprises to monitor locally running applications for data exfiltration events. They can allow/block these activities based on customer defined DLP policies. This repository contains the SDK that DLP agents may use to become service providers for the Google Chrome Content Analysis Connector. The code that must be compiled and linked into the content analysis agent is located in the `agent` subdirectory. A demo implementation of a service provider is located in the `demo` subdirectory. The code that must be compiled and linked into Google Chrome is located in the `browser` subdirectory. The Protocol Buffer serialization format is used to serialize messages between the browser and the agent. The protobuf definitions used can be found in the `proto` subdirectory. ## Google Protocol Buffers This SDK depends on Google Protocol Buffers version 3.18 or later. It may be installed from Google's [download page](https://developers.google.com/protocol-buffers/docs/downloads#release-packages) for your developement platform. It may also be installed using a package manager. The included prepare_build scripts use the Microsoft [vcpkg](https://github.com/microsoft/vcpkg) package manager to install protobuf. vcpkg is available on all supported platforms. ## Build ### Pre-requisites The following must be installed on the computer before building the demo: - [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) version 2.33 or later. - [cmake](https://cmake.org/install/) version 3.23 or later. - A C++ compiler toolchain for your platform. - On linux, the `realpath` shell tool, available in the `coreutils` package. In Debian-based distributions use `sudo apt intall coreutils`. In Red Hat distributions use `sudo yum install coreutils`. - On Mac, use `brew install cmake coreutils pkg-config` or an equivalent setup ### Running prepare_build First get things ready by installing required dependencies: ``` $SDK_DIR/prepare_build <build-dir> ``` where `<build-dir>` is the path to a directory where the demo will be built. By default, if no argument is provided, a directory named `build` will be created in the project root. Any output within the `build/` directory will be ignored by version control. `prepare_build` performs the following steps: 1. Downloads the vcpkg package manager. 2. Downloads and builds the Google Protocol Buffers library. 3. Creates build files for your specific platform. ### Cmake Targets To build the demo run the command `cmake --build <build-dir>`. To build the protocol buffer targets run the command `cmake --build <build-dir> --target proto`
{ "content_hash": "2302d928947eb15af61f9c15807fc2b5", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 119, "avg_line_length": 42.294117647058826, "alnum_prop": 0.7882475660639777, "repo_name": "chromium/content_analysis_sdk", "id": "8d505b530af61cf09f981b3c7c8e3090f5ed8f9c", "size": "2930", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "README.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "1980" }, { "name": "C++", "bytes": "96204" }, { "name": "CMake", "bytes": "6216" }, { "name": "Shell", "bytes": "1568" } ], "symlink_target": "" }
title: "ADDMONTHS" layout: "function" isPage: "true" link: "/warpscript/functions" desc: "Adds a certain number of months to a timestamp." categoryTree: ["reference","functions"] category: "reference" --- The `ADDMONTHS` function modifies a timestamp or tselements list by adding a specified number of months to it. When modifying a timestamp, an optional timezone can be specified so the computation is performed using it instead of `UTC`. Timezone names are the ones defined in [Joda Time](http://joda-time.sourceforge.net/timezones.html). This function will take into account leap years and, when working with timestamps, possible daylight saving time change. ## Example ## {% raw %} <warp10-warpscript-widget backend="{{backend}}" exec-endpoint="{{execEndpoint}}">'2016-10-30T12:00:00Z' TOTIMESTAMP 'Europe/Paris' 1 ADDDAYS 'Europe/Paris' ISO8601 '2016-10-30T12:00:00Z' TOTIMESTAMP ->TSELEMENTS 1 ADDDAYS </warp10-warpscript-widget> {% endraw %}
{ "content_hash": "9c539124784e2130c957664cc8e22894", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 235, "avg_line_length": 34.535714285714285, "alnum_prop": 0.7518097207859359, "repo_name": "slambour/www.warp10.io", "id": "e9c5f3fa6ad3c9ad480f088bcb8be9278cb6a4e5", "size": "971", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "reference/functions/function_ADDMONTHS.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "23519" }, { "name": "HTML", "bytes": "20139" }, { "name": "JavaScript", "bytes": "63025" } ], "symlink_target": "" }
import { getRequiredProperties } from '../helpers'; import { RequiredProperty } from '../../mapper/required-property.decorator'; class RequiredPropertyTest { @RequiredProperty( { path: 'boom' }) pathOnly; @RequiredProperty( { path: 'excludeNull', excludeIfNull: true }) excludeNull; @RequiredProperty( { path: 'excludeUndefined', excludeIfUndefined: true }) excludeUndefined; @RequiredProperty( { path: 'important', readOnly: true }) important; } class WillThrow { shouldThrow; } describe( 'RequiredProperty', () => { it( 'should return a decorator function', () => { const decorator = RequiredProperty( { path: 'foo' }); expect( decorator ).toBeDefined(); }); it( 'should throw if path is empty', () => { expect(() => { const decorator = RequiredProperty( { path: '' }); decorator( WillThrow, 'shouldThrow' ); }).toThrow( 'RequiredProperty mapping decorator requires path to be defined' ); }); describe( 'decorator', () => { it( 'should add mapping info with default options when only path is provided', () => { const instance = new RequiredPropertyTest(); const requiredProperties = getRequiredProperties( instance ); expect( requiredProperties ).toBeDefined(); expect( requiredProperties[ 'pathOnly' ].path ).toBe( 'boom' ); expect( requiredProperties[ 'pathOnly' ].excludeIfNull ).toBeUndefined(); expect( requiredProperties[ 'pathOnly' ].excludeIfUndefined ).toBeUndefined(); expect( requiredProperties[ 'pathOnly' ].readOnly ).toBe(false); }); it( 'should add mapping info with excludeIfNull', () => { const instance = new RequiredPropertyTest(); const requiredProperties = getRequiredProperties( instance ); expect( requiredProperties ).toBeDefined(); expect( requiredProperties[ 'excludeNull' ].path ).toBe( 'excludeNull' ); expect( requiredProperties[ 'excludeNull' ].excludeIfNull ).toBe( true ); expect( requiredProperties[ 'excludeNull' ].readOnly ).toBe(false); }); it( 'should add mapping info with excludeIfUndefined', () => { const instance = new RequiredPropertyTest(); const requiredProperties = getRequiredProperties( instance ); expect( requiredProperties ).toBeDefined(); expect( requiredProperties[ 'excludeUndefined' ].path ).toBe( 'excludeUndefined' ); expect( requiredProperties[ 'excludeUndefined' ].excludeIfUndefined ).toBe( true ); expect( requiredProperties[ 'excludeUndefined' ].readOnly ).toBe(false); }); it( 'should add mapping info with readOnly', () => { const instance = new RequiredPropertyTest(); const requiredProperties = getRequiredProperties( instance ); expect( requiredProperties ).toBeDefined(); expect( requiredProperties[ 'important' ].path ).toBe( 'important' ); expect( requiredProperties[ 'important' ].readOnly ).toBe(true); }); }); });
{ "content_hash": "2fb23b84be6747a210331d69d14a851d", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 90, "avg_line_length": 35.166666666666664, "alnum_prop": 0.6723087339201084, "repo_name": "SimonNodel-AI/typescript-object-mapper", "id": "e3a02fcd4ac75a4442316f1e362e155f07b35812", "size": "2954", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/tests/specs/required-property.decorator.spec.ts", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "376" }, { "name": "TypeScript", "bytes": "44950" } ], "symlink_target": "" }
<?php declare(strict_types=1); namespace Nelliel\IfThens; defined('NELLIEL_VERSION') or die('NOPE.AVI'); interface Conditions { public function check(array $conditions): bool; }
{ "content_hash": "c255d69a2832aca27b9ef41e0fa60ffe", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 51, "avg_line_length": 14.384615384615385, "alnum_prop": 0.7219251336898396, "repo_name": "OtakuMegane/Nelliel", "id": "338ac8f27af0b3a896cde286e5a27baed92d79c5", "size": "187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/include/IfThens/Conditions.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "22984" }, { "name": "HTML", "bytes": "81293" }, { "name": "JavaScript", "bytes": "35796" }, { "name": "PHP", "bytes": "452799" } ], "symlink_target": "" }
namespace NorthwindWindowsStore.DAL { using NorthwindWindowsStore.DAL.Model; public class CategorySalesFor1997Repository : BaseView<Category_Sales_for_1997> { public CategorySalesFor1997Repository(string path) : base(path) { } } }
{ "content_hash": "d9aa6e97c982238b854f7d6be54fa384", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 83, "avg_line_length": 23.666666666666668, "alnum_prop": 0.6690140845070423, "repo_name": "krzysztofkolek/NorthwindWindowsStore", "id": "3fb3b584ac62a3c6ed85e2084d24cd9f3c35ae0f", "size": "284", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "NorthwindWindowsStoreService/NorthwindWindowsStore.DAL/CategorySalesFor1997Repository.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "122" }, { "name": "C#", "bytes": "321717" }, { "name": "CSS", "bytes": "2198" }, { "name": "HTML", "bytes": "5067" }, { "name": "JavaScript", "bytes": "10714" } ], "symlink_target": "" }
#include <algorithm> using std::sort; using std::for_each; #include <functional> using std::bind; using namespace std::placeholders; #include <string> using std::string; #include <vector> using std::vector; #include <iostream> using std::cin; using std::cout; using std::endl; // comparison function to be used to sort by word length bool isShorter(const string &s1, const string &s2) { return s1.size() < s2.size(); } bool LT(const string &s1, const string &s2) { return s1 < s2; } void print(const vector<string> &words) { for_each(words.begin(), words.end(), [](const string &s) { cout << s << " "; }); cout << endl; } int main() { vector<string> words; // copy contents of each book into a single vector string next_word; while (cin >> next_word) { // insert next book's contents at end of words words.push_back(next_word); } print(words); vector<string> cpy = words; // save the original data // uses string < to compare elements // sort and print the vector sort(words.begin(), words.end()); words = cpy; // return to the original data // uses the LT function to compare elements // should have the same output as the previous sort sort(words.begin(), words.end(), LT); print(words); words = cpy; // return to the original data // eliminate duplicates sort(words.begin(), words.end()); auto it = unique(words.begin(), words.end()); words.erase(it, words.end()); // sort by length using a function stable_sort(words.begin(), words.end(), isShorter); print(words); words = cpy; // return to the original data // sort the original input on word length, shortest to longest sort(words.begin(), words.end(), isShorter); print(words); // use bind to invert isShorter to sort longest to shortest sort(words.begin(), words.end(), bind(isShorter, _2, _1)); print(words); return 0; }
{ "content_hash": "026a0b0fc8b5f22718c3d487cf35a0e1", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 63, "avg_line_length": 23.195121951219512, "alnum_prop": 0.6587802313354364, "repo_name": "Ztiany/Repository", "id": "0e17e5bd7aede6ab4cd8a002c71e7ef4dece46f7", "size": "3181", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "C&C++/cplusplus-program/_cpp_primer_source/GCC_4_7_0/10/elimDups.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "38608" }, { "name": "C++", "bytes": "52662" }, { "name": "CMake", "bytes": "316" }, { "name": "CSS", "bytes": "28" }, { "name": "Groovy", "bytes": "151193" }, { "name": "HTML", "bytes": "126611" }, { "name": "Java", "bytes": "632743" }, { "name": "Kotlin", "bytes": "81491" }, { "name": "Python", "bytes": "16189" } ], "symlink_target": "" }
namespace luabind { namespace detail { typedef void*(*cast_function)(void*); typedef std::size_t class_id; class_id const unknown_class = std::numeric_limits<class_id>::max(); class class_rep; class LUABIND_API cast_graph { public: cast_graph(); ~cast_graph(); // `src` and `p` here describe the *most derived* object. This means that // for a polymorphic type, the pointer must be cast with // dynamic_cast<void*> before being passed in here, and `src` has to // match typeid(*p). std::pair<void*, int> cast( void* p, class_id src, class_id target , class_id dynamic_id, void const* dynamic_ptr) const; void insert(class_id src, class_id target, cast_function cast); private: class impl; boost::scoped_ptr<impl> m_impl; }; // Maps a type_id to a class_id. Note that this actually partitions the // id-space into two, using one half for "local" ids; ids that are used only as // keys into the conversion cache. This is needed because we need a unique key // even for types that hasn't been registered explicitly. class LUABIND_API class_id_map { public: class_id_map(); class_id get(type_id const& type) const; class_id get_local(type_id const& type); void put(class_id id, type_id const& type); private: typedef std::map<type_id, class_id> map_type; map_type m_classes; class_id m_local_id; static class_id const local_id_base; }; inline class_id_map::class_id_map() : m_local_id(local_id_base) {} inline class_id class_id_map::get(type_id const& type) const { map_type::const_iterator i = m_classes.find(type); if (i == m_classes.end() || i->second >= local_id_base) return unknown_class; return i->second; } inline class_id class_id_map::get_local(type_id const& type) { std::pair<map_type::iterator, bool> result = m_classes.insert( std::make_pair(type, 0)); if (result.second) result.first->second = m_local_id++; assert(m_local_id >= local_id_base); return result.first->second; } inline void class_id_map::put(class_id id, type_id const& type) { assert(id < local_id_base); std::pair<map_type::iterator, bool> result = m_classes.insert( std::make_pair(type, 0)); assert( result.second || result.first->second == id || result.first->second >= local_id_base ); result.first->second = id; } class class_map { public: class_rep* get(class_id id) const; void put(class_id id, class_rep* cls); private: std::vector<class_rep*> m_classes; }; inline class_rep* class_map::get(class_id id) const { if (id >= m_classes.size()) return 0; return m_classes[id]; } inline void class_map::put(class_id id, class_rep* cls) { if (id >= m_classes.size()) m_classes.resize(id + 1); m_classes[id] = cls; } template <class S, class T> struct static_cast_ { static void* execute(void* p) { return static_cast<T*>(static_cast<S*>(p)); } }; template <class S, class T> struct dynamic_cast_ { static void* execute(void* p) { return dynamic_cast<T*>(static_cast<S*>(p)); } }; // Thread safe class_id allocation. LUABIND_API class_id allocate_class_id(); template <class T> struct registered_class { static class_id const id; }; template <class T> class_id const registered_class<T>::id = allocate_class_id(); template <class T> struct registered_class<T const> : registered_class<T> {}; }} // namespace luabind::detail #endif // LUABIND_INHERITANCE_090217_HPP
{ "content_hash": "bc5b06df26f2adb364ad63152aa66d87", "timestamp": "", "source": "github", "line_count": 153, "max_line_length": 79, "avg_line_length": 23.235294117647058, "alnum_prop": 0.6455696202531646, "repo_name": "LuaDist/luabind", "id": "7cf82aa92a2d7ee91a7f1eaed093219d300e2e14", "size": "4011", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "luabind/detail/inheritance.hpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "553452" }, { "name": "JavaScript", "bytes": "1618" }, { "name": "Lua", "bytes": "6527" }, { "name": "PHP", "bytes": "599" }, { "name": "Perl", "bytes": "211" }, { "name": "Ruby", "bytes": "215" }, { "name": "Shell", "bytes": "5681" } ], "symlink_target": "" }
package org.apache.flink.kubernetes.kubeclient.factory; import org.apache.flink.configuration.DeploymentOptions; import org.apache.flink.configuration.HighAvailabilityOptions; import org.apache.flink.configuration.SecurityOptions; import org.apache.flink.kubernetes.KubernetesTestUtils; import org.apache.flink.kubernetes.configuration.KubernetesConfigOptions; import org.apache.flink.kubernetes.configuration.KubernetesConfigOptionsInternal; import org.apache.flink.kubernetes.configuration.KubernetesDeploymentTarget; import org.apache.flink.kubernetes.entrypoint.KubernetesSessionClusterEntrypoint; import org.apache.flink.kubernetes.highavailability.KubernetesHaServicesFactory; import org.apache.flink.kubernetes.kubeclient.FlinkPod; import org.apache.flink.kubernetes.kubeclient.KubernetesJobManagerSpecification; import org.apache.flink.kubernetes.kubeclient.KubernetesJobManagerTestBase; import org.apache.flink.kubernetes.kubeclient.decorators.ExternalServiceDecorator; import org.apache.flink.kubernetes.kubeclient.decorators.FlinkConfMountDecorator; import org.apache.flink.kubernetes.kubeclient.decorators.HadoopConfMountDecorator; import org.apache.flink.kubernetes.kubeclient.decorators.InternalServiceDecorator; import org.apache.flink.kubernetes.kubeclient.decorators.KerberosMountDecorator; import org.apache.flink.kubernetes.utils.Constants; import org.apache.flink.kubernetes.utils.KubernetesUtils; import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.OwnerReference; import io.fabric8.kubernetes.api.model.PodSpec; import io.fabric8.kubernetes.api.model.Quantity; import io.fabric8.kubernetes.api.model.Secret; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.apps.Deployment; import io.fabric8.kubernetes.api.model.apps.DeploymentSpec; import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; import java.util.Base64; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.apache.flink.configuration.GlobalConfiguration.FLINK_CONF_FILENAME; import static org.apache.flink.kubernetes.utils.Constants.CONFIG_FILE_LOG4J_NAME; import static org.apache.flink.kubernetes.utils.Constants.CONFIG_FILE_LOGBACK_NAME; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** General tests for the {@link KubernetesJobManagerFactory}. */ public class KubernetesJobManagerFactoryTest extends KubernetesJobManagerTestBase { private static final String SERVICE_ACCOUNT_NAME = "service-test"; private static final String ENTRY_POINT_CLASS = KubernetesSessionClusterEntrypoint.class.getCanonicalName(); private static final String EXISTING_HADOOP_CONF_CONFIG_MAP = "hadoop-conf"; private static final String OWNER_REFERENCE_STRING = "apiVersion:cloudflow.io/v1beta1,blockOwnerDeletion:true," + "controller:true,kind:FlinkApplication,name:testapp,uid:e3c9aa3f-cc42-4178-814a-64aa15c82373"; private static final List<OwnerReference> OWNER_REFERENCES = Collections.singletonList( new OwnerReference( "cloudflow.io/v1beta1", true, true, "FlinkApplication", "testapp", "e3c9aa3f-cc42-4178-814a-64aa15c82373")); private static final int JOBMANAGER_REPLICAS = 2; private final FlinkPod flinkPod = new FlinkPod.Builder().build(); protected KubernetesJobManagerSpecification kubernetesJobManagerSpecification; @Override protected void setupFlinkConfig() { super.setupFlinkConfig(); flinkConfig.set(DeploymentOptions.TARGET, KubernetesDeploymentTarget.SESSION.getName()); flinkConfig.set(KubernetesConfigOptionsInternal.ENTRY_POINT_CLASS, ENTRY_POINT_CLASS); flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_SERVICE_ACCOUNT, SERVICE_ACCOUNT_NAME); flinkConfig.set( SecurityOptions.KERBEROS_LOGIN_KEYTAB, kerberosDir.toString() + "/" + KEYTAB_FILE); flinkConfig.set(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, "test"); flinkConfig.set( SecurityOptions.KERBEROS_KRB5_PATH, kerberosDir.toString() + "/" + KRB5_CONF_FILE); flinkConfig.setString( KubernetesConfigOptions.JOB_MANAGER_OWNER_REFERENCE.key(), OWNER_REFERENCE_STRING); } @Override protected void onSetup() throws Exception { super.onSetup(); KubernetesTestUtils.createTemporyFile("some data", flinkConfDir, CONFIG_FILE_LOGBACK_NAME); KubernetesTestUtils.createTemporyFile("some data", flinkConfDir, CONFIG_FILE_LOG4J_NAME); generateKerberosFileItems(); } @Test public void testDeploymentMetadata() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final Deployment resultDeployment = this.kubernetesJobManagerSpecification.getDeployment(); assertEquals(Constants.APPS_API_VERSION, resultDeployment.getApiVersion()); assertEquals( KubernetesUtils.getDeploymentName(CLUSTER_ID), resultDeployment.getMetadata().getName()); final Map<String, String> expectedLabels = getCommonLabels(); expectedLabels.put(Constants.LABEL_COMPONENT_KEY, Constants.LABEL_COMPONENT_JOB_MANAGER); expectedLabels.putAll(userLabels); assertEquals(expectedLabels, resultDeployment.getMetadata().getLabels()); assertThat( resultDeployment.getMetadata().getOwnerReferences(), Matchers.containsInAnyOrder(OWNER_REFERENCES.toArray())); } @Test public void testDeploymentSpec() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final DeploymentSpec resultDeploymentSpec = this.kubernetesJobManagerSpecification.getDeployment().getSpec(); assertEquals(1, resultDeploymentSpec.getReplicas().intValue()); final Map<String, String> expectedLabels = new HashMap<>(getCommonLabels()); expectedLabels.put(Constants.LABEL_COMPONENT_KEY, Constants.LABEL_COMPONENT_JOB_MANAGER); expectedLabels.putAll(userLabels); assertEquals(expectedLabels, resultDeploymentSpec.getTemplate().getMetadata().getLabels()); assertEquals(expectedLabels, resultDeploymentSpec.getSelector().getMatchLabels()); assertNotNull(resultDeploymentSpec.getTemplate().getSpec()); } @Test public void testPodSpec() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final PodSpec resultPodSpec = this.kubernetesJobManagerSpecification .getDeployment() .getSpec() .getTemplate() .getSpec(); assertEquals(1, resultPodSpec.getContainers().size()); assertEquals(SERVICE_ACCOUNT_NAME, resultPodSpec.getServiceAccountName()); assertEquals(3, resultPodSpec.getVolumes().size()); final Container resultedMainContainer = resultPodSpec.getContainers().get(0); assertEquals(Constants.MAIN_CONTAINER_NAME, resultedMainContainer.getName()); assertEquals(CONTAINER_IMAGE, resultedMainContainer.getImage()); assertEquals( CONTAINER_IMAGE_PULL_POLICY.name(), resultedMainContainer.getImagePullPolicy()); assertEquals(3, resultedMainContainer.getEnv().size()); assertTrue( resultedMainContainer.getEnv().stream() .anyMatch(envVar -> envVar.getName().equals("key1"))); assertEquals(3, resultedMainContainer.getPorts().size()); final Map<String, Quantity> requests = resultedMainContainer.getResources().getRequests(); assertEquals(Double.toString(JOB_MANAGER_CPU), requests.get("cpu").getAmount()); assertEquals(String.valueOf(JOB_MANAGER_MEMORY), requests.get("memory").getAmount()); assertEquals(1, resultedMainContainer.getCommand().size()); // The args list is [bash, -c, 'java -classpath $FLINK_CLASSPATH ...']. assertEquals(3, resultedMainContainer.getArgs().size()); assertEquals(3, resultedMainContainer.getVolumeMounts().size()); } @Test public void testAdditionalResourcesSize() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final List<HasMetadata> resultAdditionalResources = this.kubernetesJobManagerSpecification.getAccompanyingResources(); assertEquals(5, resultAdditionalResources.size()); final List<HasMetadata> resultServices = resultAdditionalResources.stream() .filter(x -> x instanceof Service) .collect(Collectors.toList()); assertEquals(2, resultServices.size()); final List<HasMetadata> resultConfigMaps = resultAdditionalResources.stream() .filter(x -> x instanceof ConfigMap) .collect(Collectors.toList()); assertEquals(2, resultConfigMaps.size()); final List<HasMetadata> resultSecrets = resultAdditionalResources.stream() .filter(x -> x instanceof Secret) .collect(Collectors.toList()); assertEquals(1, resultSecrets.size()); } @Test public void testServices() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final List<Service> resultServices = this.kubernetesJobManagerSpecification.getAccompanyingResources().stream() .filter(x -> x instanceof Service) .map(x -> (Service) x) .collect(Collectors.toList()); assertEquals(2, resultServices.size()); final List<Service> internalServiceCandidates = resultServices.stream() .filter( x -> x.getMetadata() .getName() .equals( InternalServiceDecorator .getInternalServiceName( CLUSTER_ID))) .collect(Collectors.toList()); assertEquals(1, internalServiceCandidates.size()); final List<Service> restServiceCandidates = resultServices.stream() .filter( x -> x.getMetadata() .getName() .equals( ExternalServiceDecorator .getExternalServiceName( CLUSTER_ID))) .collect(Collectors.toList()); assertEquals(1, restServiceCandidates.size()); final Service resultInternalService = internalServiceCandidates.get(0); assertEquals(2, resultInternalService.getMetadata().getLabels().size()); assertNull(resultInternalService.getSpec().getType()); assertEquals( Constants.HEADLESS_SERVICE_CLUSTER_IP, resultInternalService.getSpec().getClusterIP()); assertEquals(2, resultInternalService.getSpec().getPorts().size()); assertEquals(5, resultInternalService.getSpec().getSelector().size()); final Service resultRestService = restServiceCandidates.get(0); assertEquals(2, resultRestService.getMetadata().getLabels().size()); assertEquals(resultRestService.getSpec().getType(), "LoadBalancer"); assertEquals(1, resultRestService.getSpec().getPorts().size()); assertEquals(5, resultRestService.getSpec().getSelector().size()); } @Test public void testKerberosConfConfigMap() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final ConfigMap resultConfigMap = (ConfigMap) this.kubernetesJobManagerSpecification.getAccompanyingResources().stream() .filter( x -> x instanceof ConfigMap && x.getMetadata() .getName() .equals( KerberosMountDecorator .getKerberosKrb5confConfigMapName( CLUSTER_ID))) .collect(Collectors.toList()) .get(0); assertEquals(Constants.API_VERSION, resultConfigMap.getApiVersion()); assertEquals( KerberosMountDecorator.getKerberosKrb5confConfigMapName(CLUSTER_ID), resultConfigMap.getMetadata().getName()); final Map<String, String> resultDatas = resultConfigMap.getData(); assertEquals(1, resultDatas.size()); assertEquals("some conf", resultDatas.get(KRB5_CONF_FILE)); } @Test public void testKerberosKeytabSecret() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final Secret resultSecret = (Secret) this.kubernetesJobManagerSpecification.getAccompanyingResources().stream() .filter( x -> x instanceof Secret && x.getMetadata() .getName() .equals( KerberosMountDecorator .getKerberosKeytabSecretName( CLUSTER_ID))) .collect(Collectors.toList()) .get(0); final Map<String, String> resultDatas = resultSecret.getData(); assertEquals(1, resultDatas.size()); assertEquals( Base64.getEncoder().encodeToString("some keytab".getBytes()), resultDatas.get(KEYTAB_FILE)); } @Test public void testFlinkConfConfigMap() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final ConfigMap resultConfigMap = (ConfigMap) this.kubernetesJobManagerSpecification.getAccompanyingResources().stream() .filter( x -> x instanceof ConfigMap && x.getMetadata() .getName() .equals( FlinkConfMountDecorator .getFlinkConfConfigMapName( CLUSTER_ID))) .collect(Collectors.toList()) .get(0); assertEquals(2, resultConfigMap.getMetadata().getLabels().size()); final Map<String, String> resultDatas = resultConfigMap.getData(); assertEquals(3, resultDatas.size()); assertEquals("some data", resultDatas.get(CONFIG_FILE_LOG4J_NAME)); assertEquals("some data", resultDatas.get(CONFIG_FILE_LOGBACK_NAME)); assertTrue( resultDatas .get(FLINK_CONF_FILENAME) .contains( KubernetesConfigOptionsInternal.ENTRY_POINT_CLASS.key() + ": " + ENTRY_POINT_CLASS)); } @Test public void testExistingHadoopConfigMap() throws IOException { flinkConfig.set( KubernetesConfigOptions.HADOOP_CONF_CONFIG_MAP, EXISTING_HADOOP_CONF_CONFIG_MAP); kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); assertFalse( kubernetesJobManagerSpecification.getAccompanyingResources().stream() .anyMatch( resource -> resource.getMetadata() .getName() .equals( HadoopConfMountDecorator .getHadoopConfConfigMapName( CLUSTER_ID)))); final PodSpec podSpec = kubernetesJobManagerSpecification.getDeployment().getSpec().getTemplate().getSpec(); assertTrue( podSpec.getVolumes().stream() .anyMatch( volume -> volume.getConfigMap() .getName() .equals(EXISTING_HADOOP_CONF_CONFIG_MAP))); } @Test public void testHadoopConfConfigMap() throws IOException { setHadoopConfDirEnv(); generateHadoopConfFileItems(); kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final ConfigMap resultConfigMap = (ConfigMap) kubernetesJobManagerSpecification.getAccompanyingResources().stream() .filter( x -> x instanceof ConfigMap && x.getMetadata() .getName() .equals( HadoopConfMountDecorator .getHadoopConfConfigMapName( CLUSTER_ID))) .collect(Collectors.toList()) .get(0); assertEquals(2, resultConfigMap.getMetadata().getLabels().size()); final Map<String, String> resultDatas = resultConfigMap.getData(); assertEquals(2, resultDatas.size()); assertEquals("some data", resultDatas.get("core-site.xml")); assertEquals("some data", resultDatas.get("hdfs-site.xml")); } @Test public void testEmptyHadoopConfDirectory() throws IOException { setHadoopConfDirEnv(); kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); assertFalse( kubernetesJobManagerSpecification.getAccompanyingResources().stream() .anyMatch( resource -> resource.getMetadata() .getName() .equals( HadoopConfMountDecorator .getHadoopConfConfigMapName( CLUSTER_ID)))); } @Test public void testSetJobManagerDeploymentReplicas() throws Exception { flinkConfig.set( HighAvailabilityOptions.HA_MODE, KubernetesHaServicesFactory.class.getCanonicalName()); flinkConfig.set( KubernetesConfigOptions.KUBERNETES_JOBMANAGER_REPLICAS, JOBMANAGER_REPLICAS); kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); assertThat( kubernetesJobManagerSpecification.getDeployment().getSpec().getReplicas(), is(JOBMANAGER_REPLICAS)); } }
{ "content_hash": "77df0e2afe99170f0770b1dd4bb4a532", "timestamp": "", "source": "github", "line_count": 469, "max_line_length": 116, "avg_line_length": 49.58422174840085, "alnum_prop": 0.5682218877660719, "repo_name": "tillrohrmann/flink", "id": "248121bc3f67b595a8a8d615aa73af727b93cbbd", "size": "24060", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/factory/KubernetesJobManagerFactoryTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "20448" }, { "name": "Batchfile", "bytes": "1863" }, { "name": "C", "bytes": "847" }, { "name": "Clojure", "bytes": "84400" }, { "name": "Dockerfile", "bytes": "5563" }, { "name": "FreeMarker", "bytes": "86639" }, { "name": "GAP", "bytes": "139514" }, { "name": "HTML", "bytes": "135625" }, { "name": "HiveQL", "bytes": "78611" }, { "name": "Java", "bytes": "83158201" }, { "name": "JavaScript", "bytes": "1829" }, { "name": "Less", "bytes": "65918" }, { "name": "Makefile", "bytes": "5134" }, { "name": "Python", "bytes": "2433935" }, { "name": "Scala", "bytes": "10501870" }, { "name": "Shell", "bytes": "525933" }, { "name": "TypeScript", "bytes": "288472" }, { "name": "q", "bytes": "7406" } ], "symlink_target": "" }
{% if top_line %} {{ top_line }} {{ top_underline * ((top_line)|length)}} {% endif %} {% for section, _ in sections.items() %} {% set underline = underlines[0] %}{% if section %}{{section}} {{ underline * section|length }} {% set underline = underlines[1] %} {% endif %} {% if sections[section] %} {% for category, val in definitions.items() if category in sections[section]%} {{ definitions[category]['name'] }} {{ underline * definitions[category]['name']|length }} {% if definitions[category]['showcontent'] %} {% for text, values in sections[section][category].items() %} * {{ values|join(', ') }}: {{ text }} {% endfor %} {% else %} * {{ sections[section][category]['']|join(', ') }} {% endif %} {% if sections[section][category]|length == 0 %} No significant changes. {% else %} {% endif %} {% endfor %} {% else %} No significant changes. {% endif %} {% endfor %}
{ "content_hash": "a0524d191c1fec7cc8c207c8f137272f", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 78, "avg_line_length": 25.057142857142857, "alnum_prop": 0.6043329532497149, "repo_name": "pypa/setuptools", "id": "7f507342d7d2a9e3ee1ec5ba238143867279baea", "size": "877", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "tools/towncrier_template.rst", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "2173" }, { "name": "C", "bytes": "36107" }, { "name": "HTML", "bytes": "266" }, { "name": "Python", "bytes": "4027592" } ], "symlink_target": "" }
.oo-ui-icon-alert { background-image: url('themes/wikimediaui/images/icons/alert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/alert.svg'); } .oo-ui-image-error.oo-ui-icon-alert { background-image: url('themes/wikimediaui/images/icons/alert-error.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/alert-error.svg'); } .oo-ui-image-warning.oo-ui-icon-alert { background-image: url('themes/wikimediaui/images/icons/alert-warning.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/alert-warning.svg'); } .oo-ui-image-invert.oo-ui-icon-alert { background-image: url('themes/wikimediaui/images/icons/alert-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/alert-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-alert { background-image: url('themes/wikimediaui/images/icons/alert-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/alert-progressive.svg'); } .oo-ui-icon-bell { background-image: url('themes/wikimediaui/images/icons/bell.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/bell.svg'); } .oo-ui-image-invert.oo-ui-icon-bell { background-image: url('themes/wikimediaui/images/icons/bell-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/bell-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-bell { background-image: url('themes/wikimediaui/images/icons/bell-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/bell-progressive.svg'); } .oo-ui-icon-message { background-image: url('themes/wikimediaui/images/icons/message.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/message.svg'); } .oo-ui-image-invert.oo-ui-icon-message { background-image: url('themes/wikimediaui/images/icons/message-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/message-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-message { background-image: url('themes/wikimediaui/images/icons/message-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/message-progressive.svg'); } .oo-ui-icon-notice { background-image: url('themes/wikimediaui/images/icons/notice.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/notice.svg'); } .oo-ui-image-invert.oo-ui-icon-notice { background-image: url('themes/wikimediaui/images/icons/notice-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/notice-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-notice { background-image: url('themes/wikimediaui/images/icons/notice-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/notice-progressive.svg'); } .oo-ui-icon-speechBubble { background-image: url('themes/wikimediaui/images/icons/speechBubble-ltr.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubble-ltr.svg'); } .oo-ui-image-invert.oo-ui-icon-speechBubble { background-image: url('themes/wikimediaui/images/icons/speechBubble-ltr-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubble-ltr-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-speechBubble { background-image: url('themes/wikimediaui/images/icons/speechBubble-ltr-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubble-ltr-progressive.svg'); } .oo-ui-icon-speechBubbleAdd { background-image: url('themes/wikimediaui/images/icons/speechBubbleAdd-ltr.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubbleAdd-ltr.svg'); } .oo-ui-image-invert.oo-ui-icon-speechBubbleAdd { background-image: url('themes/wikimediaui/images/icons/speechBubbleAdd-ltr-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubbleAdd-ltr-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-speechBubbleAdd { background-image: url('themes/wikimediaui/images/icons/speechBubbleAdd-ltr-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubbleAdd-ltr-progressive.svg'); } .oo-ui-icon-speechBubbles { background-image: url('themes/wikimediaui/images/icons/speechBubbles-ltr.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubbles-ltr.svg'); } .oo-ui-image-invert.oo-ui-icon-speechBubbles { background-image: url('themes/wikimediaui/images/icons/speechBubbles-ltr-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubbles-ltr-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-speechBubbles { background-image: url('themes/wikimediaui/images/icons/speechBubbles-ltr-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/speechBubbles-ltr-progressive.svg'); } .oo-ui-icon-tray { background-image: url('themes/wikimediaui/images/icons/tray.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/tray.svg'); } .oo-ui-image-invert.oo-ui-icon-tray { background-image: url('themes/wikimediaui/images/icons/tray-invert.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/tray-invert.svg'); } .oo-ui-image-progressive.oo-ui-icon-tray { background-image: url('themes/wikimediaui/images/icons/tray-progressive.png'); background-image: linear-gradient(transparent, transparent), /* @embed */ url('themes/wikimediaui/images/icons/tray-progressive.svg'); }
{ "content_hash": "36d0915ae8ed65d6cee87e8a8645674e", "timestamp": "", "source": "github", "line_count": 105, "max_line_length": 151, "avg_line_length": 63.7047619047619, "alnum_prop": 0.7633428016145911, "repo_name": "extend1994/cdnjs", "id": "817cce8fbbe387f1a1058eb28761b329c24a5362", "size": "6911", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "ajax/libs/oojs-ui/0.30.4/oojs-ui-wikimediaui-icons-alerts.css", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
require 'bundler' Bundler.require require 'asana' access_token = ENV['ASANA_ACCESS_TOKEN'] unless access_token abort "Run this program with the env var ASANA_ACCESS_TOKEN.\n" \ "Go to http://app.asana.com/-/account_api to create a personal access token." end client = Asana::Client.new do |c| c.authentication :access_token, access_token end workspace = client.workspaces.find_all.first task = client.tasks.find_all(assignee: "me", workspace: workspace.id).first unless task task = client.tasks.create(workspace: workspace.id, name: "Hello world!", assignee: "me") end Thread.abort_on_exception = true Thread.new do puts "Listening for 'changed' events on #{task} in one thread..." task.events(wait: 2).lazy.select { |event| event.action == 'changed' }.each do |event| puts "#{event.user.name} changed #{event.resource}" end end Thread.new do puts "Listening for non-'changed' events on #{task} in another thread..." task.events(wait: 1).lazy.reject { |event| event.action == 'changed' }.each do |event| puts "'#{event.action}' event: #{event}" end end sleep
{ "content_hash": "d33b22fec299be3f9a833f522ae6bcb4", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 91, "avg_line_length": 29.64864864864865, "alnum_prop": 0.7064721969006381, "repo_name": "cocktail-io/ruby-asana", "id": "6f8a98f21e06a35193722d0287e7aa259388d948", "size": "1121", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "examples/events.rb", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1232" }, { "name": "Ruby", "bytes": "137988" }, { "name": "Shell", "bytes": "126" } ], "symlink_target": "" }
import bitcoin = require('bitcoinjs-lib'); // For testing only function rng() { return new Buffer('12345678901234567890123456789012'); } // Generate a random bitcoin address const keyPair1 = bitcoin.ECPair.makeRandom({rng}); const { address } = bitcoin.payments.p2pkh({ pubkey: keyPair1.publicKey }); keyPair1.toWIF(); // Generate an address from a SHA256 hash const hash = bitcoin.crypto.sha256(Buffer.from('correct horse battery staple', 'utf8')); const keyPair2 = bitcoin.ECPair.fromPrivateKey(hash); // Generate a random keypair for alternative networks const keyPair3 = bitcoin.ECPair.makeRandom({network: bitcoin.networks.testnet, rng}); keyPair3.toWIF(); bitcoin.payments.p2pkh({ pubkey: keyPair3.publicKey }); const network = keyPair3.network; // Test TransactionBuilder and Transaction const txb = new bitcoin.TransactionBuilder(); txb.addInput('aa94ab02c182214f090e99a0d57021caffd0f195a81c24602b1028b130b63e31', 0); txb.addOutput(Buffer.from('1Gokm82v6DmtwKEB8AiVhm82hyFSsEvBDK', 'utf8'), 15000); txb.sign(0, keyPair1); const tx = txb.build(); tx.toHex(); tx.hasWitnesses(); tx.hashForWitnessV0(1, new Buffer('12345678901234567890123456789012'), 2, 3); // Test functions in address const rsBase58Check = bitcoin.address.fromBase58Check(address); const rsBech32 = bitcoin.address.fromBech32(address); const rsOutputScript = bitcoin.address.fromOutputScript(new Buffer('12345678901234567890123456789012')); const rsOutputScriptWithNetwork = bitcoin.address.fromOutputScript(new Buffer('12345678901234567890123456789012'), network); bitcoin.address.toBase58Check(rsBase58Check.hash, rsBase58Check.version); bitcoin.address.toBech32(rsBech32.data, rsBech32.version, rsBech32.prefix); bitcoin.address.toOutputScript(address); bitcoin.address.toOutputScript(address, network);
{ "content_hash": "144ba2fbf0a008d4d313594bc9588d25", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 124, "avg_line_length": 43.68292682926829, "alnum_prop": 0.7967615857063093, "repo_name": "AgentME/DefinitelyTyped", "id": "cc24c6573b3bb25f357cfd1bbec483bf54ec23bc", "size": "1791", "binary": false, "copies": "17", "ref": "refs/heads/master", "path": "types/bitcoinjs-lib/bitcoinjs-lib-tests.ts", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "10652407" }, { "name": "Ruby", "bytes": "40" }, { "name": "Shell", "bytes": "60" }, { "name": "TypeScript", "bytes": "11370242" } ], "symlink_target": "" }
using System; using FasterTests.Core.Integration.Nunit.SetupFixturesContexts; using FasterTests.Core.Integration.Nunit.SetupFixturesContexts.SetupFixtures; using Machine.Fakes; using Machine.Specifications; namespace FasterTests.Tests.Core.Integration.Nunit.SetupFixturesContexts.SetupFixtures.SetupFixtureSpecs { public abstract class SetupFixtureSpecification<TSetupFixture> : WithSubject<SetupFixture> where TSetupFixture : class { Establish context = () => { Configure(r => r.For<Type>().Use(typeof(TSetupFixture))); The<ISetupFixtureAdapterFactory>() .WhenToldTo(f => f.Create(typeof(TSetupFixture))) .Return(The<ISetupFixtureAdapter>()); TheAdapterWhenToldToSetupReturn(true); }; protected static void ConfigureAdapterToFail() { TheAdapterWhenToldToSetupReturn(false); } protected static ISetupFixture CreateFixtureFor<T>() { var fixture = An<ISetupFixture>(); fixture .WhenToldTo(f => f.Type) .Return(typeof(T)); return fixture; } private static void TheAdapterWhenToldToSetupReturn(bool value) { The<ISetupFixtureAdapter>() .WhenToldTo(a => a.Setup()) .Return(value); } } }
{ "content_hash": "9e8a718f799c4bd9b09ad41f121e9eff", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 122, "avg_line_length": 30.88888888888889, "alnum_prop": 0.6266187050359712, "repo_name": "devoyster/FasterTests", "id": "af6c60951873666675aa249b5c6d7e3cf5d05fbe", "size": "1392", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/Tests/Core/Integration/Nunit/SetupFixturesContexts/SetupFixtures/SetupFixtureSpecs/SetupFixtureSpecification.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C#", "bytes": "150986" }, { "name": "PowerShell", "bytes": "18497" } ], "symlink_target": "" }
import logging import traceback import discovery import yami import yagent class subscribers: def __init__(self): self.subscriptions = {} discovery.register(self.on_service_availability) def add(self, s): # first search if this service is not already in dict for i, os in self.subscriptions.iteritems(): if os == s: # it is already there, return subscription id return i # search for free subscription id and add service when found i = 0 while i in self.subscriptions: i += 1 self.subscriptions[i] = s return i def remove(self, id): if id in self.subscriptions: logging.debug("Removing service subscription '%s[%d]'", self.subscriptions[id], id) self.subscriptions.pop(id) def send(self, msg, params): to_remove = [] for i, s in self.subscriptions.iteritems(): logging.debug("sending '%s' to '%s[%d]'", msg, s, i) try: yagent.agent.send(discovery.get(s), s, msg, params) except yami.YAMIError as e: logging.error("error while sending '%s' to '%s[%d]': %s", msg, s, i, traceback.format_exc()) to_remove.append(i) for i in to_remove: self.remove(i) def send_to(self, id, msg, params): if id in self.subscriptions: s = self.subscriptions[id] logging.debug("sending '%s' to '%s[%d]'", msg, s, id) try: yagent.agent.send(discovery.get(s), s, msg, params) except yami.YAMIError as e: logging.error("error while sending '%s' to '%s[%d]': %s", msg, s, i, traceback.format_exc()) self.remove(id) raise def on_service_availability(self, s, available): if not available: to_remove = [] for i, os in self.subscriptions.iteritems(): if s == os: to_remove.append(i) for i in to_remove: self.remove(i)
{ "content_hash": "d9247fecb5594bc684ebe36a8c2ca456", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 108, "avg_line_length": 31.91044776119403, "alnum_prop": 0.5318054256314313, "repo_name": "wozio/home-system", "id": "f1a8d3da816280ed5d3c00ea3e43842d9804f83a", "size": "2138", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "io-control/subscribers.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "1233645" }, { "name": "C++", "bytes": "385133" }, { "name": "CMake", "bytes": "2618" }, { "name": "CSS", "bytes": "8487" }, { "name": "HTML", "bytes": "8209" }, { "name": "Java", "bytes": "191887" }, { "name": "JavaScript", "bytes": "28432" }, { "name": "Makefile", "bytes": "112196" }, { "name": "Python", "bytes": "106320" }, { "name": "Shell", "bytes": "20133" } ], "symlink_target": "" }
This example extends [Redux Observable's cancellable counter example](https://github.com/redux-observable/redux-observable/tree/master/examples/redux-observable-cancellable-counter) to have multiple counters on the same page. To run the example locally: ```sh git clone https://github.com/ioof-holdings/redux-subspace.git cd redux-subspace/examples/redux-observable/cancellable-counter npm install npm start ``` Or check out the [sandbox](https://codesandbox.io/s/github/ioof-holdings/redux-subspace/tree/master/examples/redux-observable/cancellable-counter).
{ "content_hash": "4f2cc3b6129f1a2ff42990510a81a55d", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 225, "avg_line_length": 43.38461538461539, "alnum_prop": 0.8120567375886525, "repo_name": "mpeyper/redux-subspace", "id": "e155606344e1c9e4ec2866450265e170f85ffd55", "size": "595", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "examples/redux-observable/cancellable-counter/README.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "212959" }, { "name": "TypeScript", "bytes": "26674" } ], "symlink_target": "" }
require "bundler/setup" require "ahoy/intercom" RSpec.configure do |config| config.disable_monkey_patching! config.expect_with :rspec do |c| c.syntax = :expect end config.before(:suite) do Time.zone = 'UTC' class Ahoy::Store < Ahoy::Intercom::Store end end end
{ "content_hash": "845a018353dd8689396d427ea31bf2e9", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 45, "avg_line_length": 15.421052631578947, "alnum_prop": 0.6723549488054608, "repo_name": "mkdev-me/ahoy-intercom", "id": "b0ef1ccbd5c49b72ebeb29265847ae6db1c87d75", "size": "293", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/spec_helper.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "3769" }, { "name": "Shell", "bytes": "74" } ], "symlink_target": "" }
A generator for [Yeoman](http://yeoman.io) for a Struts2 + Grunt + Maven project. ![](http://f.cl.ly/items/3A3r0R3y2t2U2g3F1c2y/maven-grunt.png) ## Getting Started To install generator-grunt-maven from npm, run: ``` $ npm install -g generator-grunt-maven ``` Finally, initiate the generator: ``` $ yo grunt-maven ``` ### Getting To Know Yeoman Yeoman has a heart of gold. He's a person with feelings and opinions, but he's very easy to work with. If you think he's too opinionated, he can be easily convinced. If you'd like to get to know Yeoman better and meet some of his friends, [Grunt](http://gruntjs.com) and [Bower](http://bower.io), check out the complete [Getting Started Guide](https://github.com/yeoman/yeoman/wiki/Getting-Started). ## License [MIT License](http://en.wikipedia.org/wiki/MIT_License)
{ "content_hash": "c8189d5bd0c69c6dd2b99f427d6926c3", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 233, "avg_line_length": 28.413793103448278, "alnum_prop": 0.7257281553398058, "repo_name": "cesarwbr/generator-grunt-maven", "id": "f5111f891398904a248f12b5fd5780103e04eb1b", "size": "1000", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "7491" }, { "name": "Java", "bytes": "2962" }, { "name": "JavaScript", "bytes": "16017" }, { "name": "SCSS", "bytes": "1543" } ], "symlink_target": "" }