text
stringlengths
2
1.04M
meta
dict
/*! text-resources | 0.1.2 | 2017-09-06 */ (function(global, factory) { if (typeof module === "object" && typeof module.exports === "object") { global.CurrencyNames = module.require("./currency-names"); module.exports = factory(global); } else { factory(global); } })(this, function(global) { global.CurrencyNames["it-IT"] = { EUR: "\u20ac", ITL: "L." }; return global.CurrencyNames; }); (function(global, factory) { if (typeof module === "object" && typeof module.exports === "object") { global.FormatData = module.require("./format-data"); module.exports = factory(global); } else { factory(global); } })(this, function(global) { global.FormatData["it-IT"] = { NumberPatterns: [ "#,##0.###;-#,##0.###", "\xa4 #,##0.00;-\xa4 #,##0.00", "#,##0%" ] }; return global.FormatData; });
{ "content_hash": "6b938cc58603ce2da02a4ab6599c056c", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 92, "avg_line_length": 31.06896551724138, "alnum_prop": 0.5471698113207547, "repo_name": "yannickebongue/text-resources", "id": "b0ab2ce9921674507789681c404f0c2a7f433901", "size": "901", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dist/text-resources-it-IT.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "804" }, { "name": "JavaScript", "bytes": "185339" } ], "symlink_target": "" }
<!DOCTYPE html> <!--[if lt IE 7 ]> <html class="ie ie6" lang="en"> <![endif]--> <!--[if IE 7 ]> <html class="ie ie7" lang="en"> <![endif]--> <!--[if IE 8 ]> <html class="ie ie8" lang="en"> <![endif]--> <!--[if (gte IE 9)|!(IE)]><!--> <html class="not-ie" lang="en" xmlns:th="http://www.thymeleaf.org"><!--<![endif]--> <head th:replace="fragment/common :: headerFragment"/> <body> <section th:replace="fragment/common :: topFragment"/> <section th:replace="fragment/common :: titleBar (title='Personal Background')"/> <section id="content" class="clearfix"> <div class="container"> <div class="row"> <div class="col-md-8"> <form th:action="@{/questions/Demographics}" method="POST"> <!-- Add timer --> <input id="timeOnPage" name="timeOnPage" type="hidden"/> <p> We would like to start with a few questions to get to know you better. Answers to these personal questions are kept confidential, but they help us improve MindTrails by figuring out for whom it works well.</p> <div class="section"> <br/> <div><label class="question"> Birth year <input type="number" min="1900" name="BirthYear" required="true"/> </label> </div> <br/> <div class="row"> <div class="col-md-8 side-border"> <div><label class="question"> Race (select all the apply)</label></div> <div class="radio"> <label> <input type="checkbox" name="race" value="American Indian/Alaska Native"/> American Indian/Alaska Native</label> </div> <div class="radio"> <label> <input type="checkbox" name="race" value="East Asian" /> East Asian</label> </div> <div class="radio"> <label> <input type="checkbox" name="race" value="South Asian"/> South Asian</label> </div> <div class="radio"> <label> <input type="checkbox" name="race" value="Native Hawaiian/Pacific Islander"/> Native Hawaiian/Pacific Islander</label> </div> <div class="radio"> <label> <input type="checkbox" name="race" value="Black/African origin"/> Black/African origin</label> </div> <div class="radio"> <label> <input type="checkbox" name="race" value="White/European origin" /> White/European origin</label> </div> <div class="radio"> <label> <input type="checkbox" name="race" value="Other or Unknown"/> Other or Unknown</label> </div> </div> <div class="col-md-4"> <div class="radio"> <label class="question"><input type="checkbox" name="Disorders" value="555" onchange="disableSection(this)"/> Prefer not to answer </label> </div> </div> </div> <br/> <div><label class="question"> Ethnicity <select name="Ethnicity" required="true"> <option value="">Please select one</option> <option>Hispanic or Latino</option> <option>Not Hispanic or Latino</option> <option>Unknown</option> <option>Prefer not to answer</option> </select> </label> </div> <br/> <div><label class="question"> Country of Residence <select required="true" class="auto-selector" placeholder="Type your country of residence" name="Country" autofocus="autofocus" autocorrect="off" autocomplete="off"> <option value="" selected="selected">Please select one</option> <option value="United States" data-relevancy-booster="3.5" data-alternative-spellings="US USA United States of America">United States </option> <option value="Afghanistan" data-alternative-spellings="AF">Afghanistan</option> <option value="Åland Islands" data-alternative-spellings="AX Aaland Aland" data-relevancy-booster="0.5">Åland Islands </option> <option value="Albania" data-alternative-spellings="AL">Albania</option> <option value="Algeria" data-alternative-spellings="DZ">Algeria</option> <option value="American Samoa" data-alternative-spellings="AS" data-relevancy-booster="0.5">American Samoa </option> <option value="Andorra" data-alternative-spellings="AD" data-relevancy-booster="0.5"> Andorra </option> <option value="Angola" data-alternative-spellings="AO">Angola</option> <option value="Anguilla" data-alternative-spellings="AI" data-relevancy-booster="0.5"> Anguilla </option> <option value="Antarctica" data-alternative-spellings="AQ" data-relevancy-booster="0.5"> Antarctica </option> <option value="Antigua And Barbuda" data-alternative-spellings="AG" data-relevancy-booster="0.5">Antigua and Barbuda </option> <option value="Argentina" data-alternative-spellings="AR">Argentina</option> <option value="Armenia" data-alternative-spellings="AM">Armenia</option> <option value="Aruba" data-alternative-spellings="AW" data-relevancy-booster="0.5"> Aruba </option> <option value="Australia" data-alternative-spellings="AU" data-relevancy-booster="1.5"> Australia </option> <option value="Austria" data-alternative-spellings="AT Österreich Osterreich Oesterreich ">Austria </option> <option value="Azerbaijan" data-alternative-spellings="AZ">Azerbaijan</option> <option value="Bahamas" data-alternative-spellings="BS">Bahamas</option> <option value="Bahrain" data-alternative-spellings="BH">Bahrain</option> <option value="Bangladesh" data-alternative-spellings="BD" data-relevancy-booster="2"> Bangladesh </option> <option value="Barbados" data-alternative-spellings="BB">Barbados</option> <option value="Belarus" data-alternative-spellings="BY">Belarus</option> <option value="Belgium" data-alternative-spellings="BE België Belgie Belgien Belgique" data-relevancy-booster="1.5">Belgium </option> <option value="Belize" data-alternative-spellings="BZ">Belize</option> <option value="Benin" data-alternative-spellings="BJ">Benin</option> <option value="Bermuda" data-alternative-spellings="BM" data-relevancy-booster="0.5"> Bermuda </option> <option value="Bhutan" data-alternative-spellings="BT">Bhutan</option> <option value="Bolivia" data-alternative-spellings="BO">Bolivia</option> <option value="Bonaire, Sint Eustatius and Saba" data-alternative-spellings="BQ"> Bonaire, Sint Eustatius and Saba </option> <option value="Bosnia and Herzegovina" data-alternative-spellings="BA">Bosnia and Herzegovina </option> <option value="Botswana" data-alternative-spellings="BW">Botswana</option> <option value="Bouvet Island" data-alternative-spellings="BV">Bouvet Island</option> <option value="Brazil" data-alternative-spellings="BR Brasil" data-relevancy-booster="2">Brazil </option> <option value="British Indian Ocean Territory" data-alternative-spellings="IO">British Indian Ocean Territory </option> <option value="Brunei Darussalam" data-alternative-spellings="BN">Brunei Darussalam </option> <option value="Bulgaria" data-alternative-spellings="BG">Bulgaria</option> <option value="Burkina Faso" data-alternative-spellings="BF">Burkina Faso</option> <option value="Burundi" data-alternative-spellings="BI">Burundi</option> <option value="Cambodia" data-alternative-spellings="KH">Cambodia</option> <option value="Cameroon" data-alternative-spellings="CM">Cameroon</option> <option value="Canada" data-alternative-spellings="CA" data-relevancy-booster="2"> Canada </option> <option value="Cape Verde" data-alternative-spellings="CV">Cape Verde</option> <option value="Cayman Islands" data-alternative-spellings="KY" data-relevancy-booster="0.5">Cayman Islands </option> <option value="Central African Republic" data-alternative-spellings="CF">Central African Republic </option> <option value="Chad" data-alternative-spellings="TD">Chad</option> <option value="Chile" data-alternative-spellings="CL">Chile</option> <option value="China" data-relevancy-booster="3.5" data-alternative-spellings="CN Zhongguo Zhonghua Peoples Republic">China </option> <option value="Christmas Island" data-alternative-spellings="CX" data-relevancy-booster="0.5">Christmas Island </option> <option value="Cocos (Keeling) Islands" data-alternative-spellings="CC" data-relevancy-booster="0.5">Cocos (Keeling) Islands </option> <option value="Colombia" data-alternative-spellings="CO">Colombia</option> <option value="Comoros" data-alternative-spellings="KM">Comoros</option> <option value="Congo" data-alternative-spellings="CG">Congo</option> <option value="Congo, the Democratic Republic of the" data-alternative-spellings="CD"> Congo, Democratic Republic of the </option> <option value="Cook Islands" data-alternative-spellings="CK" data-relevancy-booster="0.5">Cook Islands </option> <option value="Costa Rica" data-alternative-spellings="CR">Costa Rica</option> <option value="Côte d'Ivoire" data-alternative-spellings="CI Cote dIvoire">Côte d'Ivoire </option> <option value="Croatia" data-alternative-spellings="HR Hrvatska">Croatia</option> <option value="Cuba" data-alternative-spellings="CU">Cuba</option> <option value="Curaçao" data-alternative-spellings="CW Curacao">Curaçao</option> <option value="Cyprus" data-alternative-spellings="CY">Cyprus</option> <option value="Czech Republic" data-alternative-spellings="CZ Česká Ceska">Czech Republic </option> <option value="Denmark" data-alternative-spellings="DK Danmark" data-relevancy-booster="1.5">Denmark </option> <option value="Djibouti" data-alternative-spellings="DJ">Djibouti</option> <option value="Dominica" data-alternative-spellings="DM" data-relevancy-booster="0.5"> Dominica </option> <option value="Dominican Republic" data-alternative-spellings="DO">Dominican Republic </option> <option value="Ecuador" data-alternative-spellings="EC">Ecuador</option> <option value="Egypt" data-alternative-spellings="EG" data-relevancy-booster="1.5"> Egypt </option> <option value="El Salvador" data-alternative-spellings="SV">El Salvador</option> <option value="Equatorial Guinea" data-alternative-spellings="GQ">Equatorial Guinea </option> <option value="Eritrea" data-alternative-spellings="ER">Eritrea</option> <option value="Estonia" data-alternative-spellings="EE Eesti">Estonia</option> <option value="Ethiopia" data-alternative-spellings="ET">Ethiopia</option> <option value="Falkland Islands (Malvinas)" data-alternative-spellings="FK" data-relevancy-booster="0.5">Falkland Islands (Malvinas) </option> <option value="Faroe Islands" data-alternative-spellings="FO Føroyar Færøerne" data-relevancy-booster="0.5">Faroe Islands </option> <option value="Fiji" data-alternative-spellings="FJ">Fiji</option> <option value="Finland" data-alternative-spellings="FI Suomi">Finland</option> <option value="France" data-alternative-spellings="FR République française" data-relevancy-booster="2.5">France </option> <option value="French Guiana" data-alternative-spellings="GF">French Guiana</option> <option value="French Polynesia" data-alternative-spellings="PF">French Polynesia </option> <option value="French Southern Territories" data-alternative-spellings="TF">French Southern Territories </option> <option value="Gabon" data-alternative-spellings="GA">Gabon</option> <option value="Gambia" data-alternative-spellings="GM">Gambia</option> <option value="Georgia" data-alternative-spellings="GE">Georgia</option> <option value="Germany" data-alternative-spellings="DE Bundesrepublik Deutschland" data-relevancy-booster="3">Germany </option> <option value="Ghana" data-alternative-spellings="GH">Ghana</option> <option value="Gibraltar" data-alternative-spellings="GI" data-relevancy-booster="0.5"> Gibraltar </option> <option value="Greece" data-alternative-spellings="GR" data-relevancy-booster="1.5"> Greece </option> <option value="Greenland" data-alternative-spellings="GL grønland" data-relevancy-booster="0.5">Greenland </option> <option value="Grenada" data-alternative-spellings="GD">Grenada</option> <option value="Guadeloupe" data-alternative-spellings="GP">Guadeloupe</option> <option value="Guam" data-alternative-spellings="GU">Guam</option> <option value="Guatemala" data-alternative-spellings="GT">Guatemala</option> <option value="Guernsey" data-alternative-spellings="GG" data-relevancy-booster="0.5"> Guernsey </option> <option value="Guinea" data-alternative-spellings="GN">Guinea</option> <option value="Guinea-Bissau" data-alternative-spellings="GW">Guinea-Bissau</option> <option value="Guyana" data-alternative-spellings="GY">Guyana</option> <option value="Haiti" data-alternative-spellings="HT">Haiti</option> <option value="Heard Island and McDonald Islands" data-alternative-spellings="HM">Heard Island and McDonald Islands </option> <option value="Holy See (Vatican City State)" data-alternative-spellings="VA" data-relevancy-booster="0.5">Holy See (Vatican City State) </option> <option value="Honduras" data-alternative-spellings="HN">Honduras</option> <option value="Hong Kong" data-alternative-spellings="HK">Hong Kong</option> <option value="Hungary" data-alternative-spellings="HU">Hungary</option> <option value="Iceland" data-alternative-spellings="IS Island">Iceland</option> <option value="India" data-alternative-spellings="IN" data-relevancy-booster="3">India </option> <option value="Indonesia" data-alternative-spellings="ID" data-relevancy-booster="2"> Indonesia </option> <option value="Iran, Islamic Republic of" data-alternative-spellings="IR">Iran, Islamic Republic of </option> <option value="Iraq" data-alternative-spellings="IQ">Iraq</option> <option value="Ireland" data-alternative-spellings="IE Éire" data-relevancy-booster="1.2">Ireland </option> <option value="Isle of Man" data-alternative-spellings="IM" data-relevancy-booster="0.5">Isle of Man </option> <option value="Israel" data-alternative-spellings="IL">Israel</option> <option value="Italy" data-alternative-spellings="IT Italia" data-relevancy-booster="2"> Italy </option> <option value="Jamaica" data-alternative-spellings="JM">Jamaica</option> <option value="Japan" data-alternative-spellings="JP Nippon Nihon" data-relevancy-booster="2.5">Japan </option> <option value="Jersey" data-alternative-spellings="JE" data-relevancy-booster="0.5"> Jersey </option> <option value="Jordan" data-alternative-spellings="JO">Jordan</option> <option value="Kazakhstan" data-alternative-spellings="KZ">Kazakhstan</option> <option value="Kenya" data-alternative-spellings="KE">Kenya</option> <option value="Kiribati" data-alternative-spellings="KI">Kiribati</option> <option value="Korea, Democratic People's Republic of" data-alternative-spellings="KP North Korea">Korea, Democratic People's Republic of </option> <option value="Korea, Republic of" data-alternative-spellings="KR South Korea" data-relevancy-booster="1.5">Korea, Republic of </option> <option value="Kuwait" data-alternative-spellings="KW">Kuwait</option> <option value="Kyrgyzstan" data-alternative-spellings="KG">Kyrgyzstan</option> <option value="Lao People's Democratic Republic" data-alternative-spellings="LA">Lao People's Democratic Republic </option> <option value="Latvia" data-alternative-spellings="LV">Latvia</option> <option value="Lebanon" data-alternative-spellings="LB">Lebanon</option> <option value="Lesotho" data-alternative-spellings="LS">Lesotho</option> <option value="Liberia" data-alternative-spellings="LR">Liberia</option> <option value="Libyan Arab Jamahiriya" data-alternative-spellings="LY">Libyan Arab Jamahiriya </option> <option value="Liechtenstein" data-alternative-spellings="LI">Liechtenstein</option> <option value="Lithuania" data-alternative-spellings="LT">Lithuania</option> <option value="Luxembourg" data-alternative-spellings="LU">Luxembourg</option> <option value="Macao" data-alternative-spellings="MO">Macao</option> <option value="Macedonia, The Former Yugoslav Republic Of" data-alternative-spellings="MK">Macedonia, Former Yugoslav Republic Of </option> <option value="Madagascar" data-alternative-spellings="MG">Madagascar</option> <option value="Malawi" data-alternative-spellings="MW">Malawi</option> <option value="Malaysia" data-alternative-spellings="MY">Malaysia</option> <option value="Maldives" data-alternative-spellings="MV">Maldives</option> <option value="Mali" data-alternative-spellings="ML">Mali</option> <option value="Malta" data-alternative-spellings="MT">Malta</option> <option value="Marshall Islands" data-alternative-spellings="MH" data-relevancy-booster="0.5">Marshall Islands </option> <option value="Martinique" data-alternative-spellings="MQ">Martinique</option> <option value="Mauritania" data-alternative-spellings="MR">Mauritania</option> <option value="Mauritius" data-alternative-spellings="MU">Mauritius</option> <option value="Mayotte" data-alternative-spellings="YT">Mayotte</option> <option value="Mexico" data-alternative-spellings="MX Mexicanos" data-relevancy-booster="1.5">Mexico </option> <option value="Micronesia, Federated States of" data-alternative-spellings="FM"> Micronesia, Federated States of </option> <option value="Moldova, Republic of" data-alternative-spellings="MD">Moldova, Republic of </option> <option value="Monaco" data-alternative-spellings="MC">Monaco</option> <option value="Mongolia" data-alternative-spellings="MN">Mongolia</option> <option value="Montenegro" data-alternative-spellings="ME">Montenegro</option> <option value="Montserrat" data-alternative-spellings="MS" data-relevancy-booster="0.5"> Montserrat </option> <option value="Morocco" data-alternative-spellings="MA">Morocco</option> <option value="Mozambique" data-alternative-spellings="MZ">Mozambique</option> <option value="Myanmar" data-alternative-spellings="MM">Myanmar</option> <option value="Namibia" data-alternative-spellings="NA">Namibia</option> <option value="Nauru" data-alternative-spellings="NR" data-relevancy-booster="0.5"> Nauru </option> <option value="Nepal" data-alternative-spellings="NP">Nepal</option> <option value="Netherlands" data-alternative-spellings="NL Holland Nederland" data-relevancy-booster="1.5">Netherlands </option> <option value="New Caledonia" data-alternative-spellings="NC" data-relevancy-booster="0.5">New Caledonia </option> <option value="New Zealand" data-alternative-spellings="NZ">New Zealand</option> <option value="Nicaragua" data-alternative-spellings="NI">Nicaragua</option> <option value="Niger" data-alternative-spellings="NE">Niger</option> <option value="Nigeria" data-alternative-spellings="NG" data-relevancy-booster="1.5"> Nigeria </option> <option value="Niue" data-alternative-spellings="NU" data-relevancy-booster="0.5">Niue </option> <option value="Norfolk Island" data-alternative-spellings="NF" data-relevancy-booster="0.5">Norfolk Island </option> <option value="Northern Mariana Islands" data-alternative-spellings="MP" data-relevancy-booster="0.5">Northern Mariana Islands </option> <option value="Norway" data-alternative-spellings="NO Norge Noreg" data-relevancy-booster="1.5">Norway </option> <option value="Oman" data-alternative-spellings="OM">Oman</option> <option value="Pakistan" data-alternative-spellings="PK" data-relevancy-booster="2"> Pakistan </option> <option value="Palau" data-alternative-spellings="PW" data-relevancy-booster="0.5"> Palau </option> <option value="Palestinian Territory, Occupied" data-alternative-spellings="PS"> Palestinian Territory, Occupied </option> <option value="Panama" data-alternative-spellings="PA">Panama</option> <option value="Papua New Guinea" data-alternative-spellings="PG">Papua New Guinea </option> <option value="Paraguay" data-alternative-spellings="PY">Paraguay</option> <option value="Peru" data-alternative-spellings="PE">Peru</option> <option value="Philippines" data-alternative-spellings="PH" data-relevancy-booster="1.5">Philippines </option> <option value="Pitcairn" data-alternative-spellings="PN" data-relevancy-booster="0.5"> Pitcairn </option> <option value="Poland" data-alternative-spellings="PL" data-relevancy-booster="1.25"> Poland </option> <option value="Portugal" data-alternative-spellings="PT" data-relevancy-booster="1.5"> Portugal </option> <option value="Puerto Rico" data-alternative-spellings="PR">Puerto Rico</option> <option value="Qatar" data-alternative-spellings="QA">Qatar</option> <option value="Réunion" data-alternative-spellings="RE Reunion">Réunion</option> <option value="Romania" data-alternative-spellings="RO">Romania</option> <option value="Russian Federation" data-alternative-spellings="RU Russia Rossiya" data-relevancy-booster="2.5">Russian Federation </option> <option value="Rwanda" data-alternative-spellings="RW">Rwanda</option> <option value="Saint Barthélemy" data-alternative-spellings="BL" data-relevancy-booster="0.5">Saint Barthélemy </option> <option value="Saint Helena" data-alternative-spellings="SH" data-relevancy-booster="0.5">Saint Helena </option> <option value="Saint Kitts and Nevis" data-alternative-spellings="KN" data-relevancy-booster="0.5">Saint Kitts and Nevis </option> <option value="Saint Lucia" data-alternative-spellings="LC" data-relevancy-booster="0.5">Saint Lucia </option> <option value="Saint Martin (French Part)" data-alternative-spellings="MF" data-relevancy-booster="0.5">Saint Martin (French Part) </option> <option value="Saint Pierre and Miquelon" data-alternative-spellings="PM" data-relevancy-booster="0.5">Saint Pierre and Miquelon </option> <option value="Saint Vincent and the Grenadines" data-alternative-spellings="VC" data-relevancy-booster="0.5">Saint Vincent and the Grenadines </option> <option value="Samoa" data-alternative-spellings="WS">Samoa</option> <option value="San Marino" data-alternative-spellings="SM">San Marino</option> <option value="Sao Tome and Principe" data-alternative-spellings="ST">Sao Tome and Principe </option> <option value="Saudi Arabia" data-alternative-spellings="SA">Saudi Arabia</option> <option value="Senegal" data-alternative-spellings="SN">Senegal</option> <option value="Serbia" data-alternative-spellings="RS">Serbia</option> <option value="Seychelles" data-alternative-spellings="SC" data-relevancy-booster="0.5"> Seychelles </option> <option value="Sierra Leone" data-alternative-spellings="SL">Sierra Leone</option> <option value="Singapore" data-alternative-spellings="SG">Singapore</option> <option value="Sint Maarten (Dutch Part)" data-alternative-spellings="SX">Sint Maarten (Dutch Part) </option> <option value="Slovakia" data-alternative-spellings="SK">Slovakia</option> <option value="Slovenia" data-alternative-spellings="SI">Slovenia</option> <option value="Solomon Islands" data-alternative-spellings="SB">Solomon Islands</option> <option value="Somalia" data-alternative-spellings="SO">Somalia</option> <option value="South Africa" data-alternative-spellings="ZA">South Africa</option> <option value="South Georgia and the South Sandwich Islands" data-alternative-spellings="GS">South Georgia and the South Sandwich Islands </option> <option value="South Sudan" data-alternative-spellings="SS">South Sudan</option> <option value="Spain" data-alternative-spellings="ES España" data-relevancy-booster="2"> Spain </option> <option value="Sri Lanka" data-alternative-spellings="LK">Sri Lanka</option> <option value="Sudan" data-alternative-spellings="SD">Sudan</option> <option value="Suriname" data-alternative-spellings="SR">Suriname</option> <option value="Svalbard and Jan Mayen" data-alternative-spellings="SJ" data-relevancy-booster="0.5">Svalbard and Jan Mayen </option> <option value="Swaziland" data-alternative-spellings="SZ">Swaziland</option> <option value="Sweden" data-alternative-spellings="SE Sverige" data-relevancy-booster="1.5">Sweden </option> <option value="Switzerland" data-alternative-spellings="CH Swiss Confederation Schweiz Suisse Svizzera Svizra" data-relevancy-booster="1.5">Switzerland </option> <option value="Syrian Arab Republic" data-alternative-spellings="SY Syria">Syrian Arab Republic </option> <option value="Taiwan, Province of China" data-alternative-spellings="TW">Taiwan, Province of China </option> <option value="Tajikistan" data-alternative-spellings="TJ">Tajikistan</option> <option value="Tanzania, United Republic of" data-alternative-spellings="TZ">Tanzania, United Republic of </option> <option value="Thailand" data-alternative-spellings="TH">Thailand</option> <option value="Timor-Leste" data-alternative-spellings="TL">Timor-Leste</option> <option value="Togo" data-alternative-spellings="TG">Togo</option> <option value="Tokelau" data-alternative-spellings="TK" data-relevancy-booster="0.5"> Tokelau </option> <option value="Tonga" data-alternative-spellings="TO">Tonga</option> <option value="Trinidad and Tobago" data-alternative-spellings="TT">Trinidad and Tobago </option> <option value="Tunisia" data-alternative-spellings="TN">Tunisia</option> <option value="Turkey" data-alternative-spellings="TR Türkiye Turkiye">Turkey</option> <option value="Turkmenistan" data-alternative-spellings="TM">Turkmenistan</option> <option value="Turks and Caicos Islands" data-alternative-spellings="TC" data-relevancy-booster="0.5">Turks and Caicos Islands </option> <option value="Tuvalu" data-alternative-spellings="TV" data-relevancy-booster="0.5"> Tuvalu </option> <option value="Uganda" data-alternative-spellings="UG">Uganda</option> <option value="Ukraine" data-alternative-spellings="UA Ukrayina">Ukraine</option> <option value="United Arab Emirates" data-alternative-spellings="AE UAE Emirates">United Arab Emirates </option> <option value="United Kingdom" data-alternative-spellings="GB Great Britain England UK Wales Scotland Northern Ireland" data-relevancy-booster="2.5">United Kingdom </option> <option value="United States" data-relevancy-booster="3.5" data-alternative-spellings="US USA United States of America">United States </option> <option value="United States Minor Outlying Islands" data-alternative-spellings="UM"> United States Minor Outlying Islands </option> <option value="Uruguay" data-alternative-spellings="UY">Uruguay</option> <option value="Uzbekistan" data-alternative-spellings="UZ">Uzbekistan</option> <option value="Vanuatu" data-alternative-spellings="VU">Vanuatu</option> <option value="Venezuela" data-alternative-spellings="VE">Venezuela</option> <option value="Vietnam" data-alternative-spellings="VN" data-relevancy-booster="1.5"> Vietnam </option> <option value="Virgin Islands, British" data-alternative-spellings="VG" data-relevancy-booster="0.5">Virgin Islands, British </option> <option value="Virgin Islands, U.S." data-alternative-spellings="VI" data-relevancy-booster="0.5">Virgin Islands, U.S. </option> <option value="Wallis and Futuna" data-alternative-spellings="WF" data-relevancy-booster="0.5">Wallis and Futuna </option> <option value="Western Sahara" data-alternative-spellings="EH">Western Sahara</option> <option value="Yemen" data-alternative-spellings="YE">Yemen</option> <option value="Zambia" data-alternative-spellings="ZM">Zambia</option> <option value="Zimbabwe" data-alternative-spellings="ZW">Zimbabwe</option> <option value="NoAnswer">Prefer not to answer</option> </select> </label> </div> <br/> <div><label class="question"> Please indicate your highest educational attainment <select required="true" name="Education"> <option value="">Please select one</option> <option>Elementary School</option> <option>Junior High</option> <option>Some High School</option> <option>High School Graduate</option> <option>Some College</option> <option>Associate's Degree</option> <option>Bachelor's Degree</option> <option>Some Graduate School</option> <option>Master's Degree</option> <option>M.B.A.</option> <option>J.D.</option> <option>M.D.</option> <option>Ph.D.</option> <option>Other Advanced Degree</option> <option>Prefer not to answer</option> </select> </label> </div> <br/> <div><label class="question"> What is your marital status? <select name="MaritalStat" required="true"> <option value="">Please select one</option> <option>Single</option> <option>Single, but casually dating</option> <option>Single, but currently engaged to be married</option> <option>Single, but currently living with someone in a marriage-like relationship </option> <option>Married</option> <option>In a domestic or civil union</option> <option>Separated</option> <option>Divorced</option> <option>Widow/widower</option> <option>Other</option> <option>Prefer not to answer</option> </select> </label> </div> <br/> <div><label class="question"> What is your employment status? <select name="EmploymentStat" required="true"> <option value="">Please select one</option> <option>Working full-time</option> <option>Working part-time</option> <option>Unemployed or laid off</option> <option>Looking for work</option> <option>Homemaker/keeping house or raising children full-time</option> <option>Retired</option> <option>Student</option> <option>Other</option> <option>Prefer not to answer</option> </select> </label> </div> <br/> <div><label class="question"> What is your household annual income (before taxes and other deductions)? <select name="Income" required="true"> <option value="">Please select one</option> <option>Less than $5,000</option> <option>$5,000 through $11,999</option> <option>$12,000 through $15,999</option> <option>$16,000 through $24,999</option> <option>$25,000 through $34,999</option> <option>$35,000 through $49,999</option> <option>$50,000 through $74,999</option> <option>$75,000 through $99,999</option> <option>$100,000 through $149,999</option> <option>$150,000 through $199,999</option> <option>$200,000 through $249,999</option> <option>$250,000 or greater</option> <option>Don't know</option> <option>Prefer not to answer</option> </select> </label> </div> <br/> <div><label class="question"> What brought you to this website? <select name="PtpReason" required="true"> <option value="">Please select one</option> <option>Recommendation of teacher</option> <option>Recommendation of employer</option> <option>Recommendation of health professional (e.g., therapist or general practitioner) </option> <option>Recommendation of friend, family , acquaintance, or colleague</option> <option>Link from blog, chat, or discussion thread</option> <option>Link from education site</option> <option>News story</option> <option>Facebook post</option> <option>Craigslist ad</option> <option>Advertisement from other sources</option> <option>Planned search for information related to this topic</option> <option>Planned search for this topic in particular</option> <option>Just surfing the web</option> <option>Other</option> <option>Prefer not to answer</option> </select> </label> </div> <div id="PtpReasonOther"> <p>Please Specify: <label> <input id="PtpReasonOtherInput" name="PtpReasonOther" type="text" placeholder="" size="50" /> </label> </p> </div> <script> $('#PtpReasonOther').hide(); $('select[name=PtpReason]').change(function () { if ($(this).val() == 'Other') { $('#PtpReasonOther').show(); $("#PtpReasonOtherInput").prop('required',true); } else { $('#PtpReasonOther').hide(); $("#PtpReasonOtherInput").prop('required',false); } }); </script> <div style="text-align:center"> <button type="submit"> Continue </button> </div> </div> </form> </div> </div> </div> </section> <footer th:replace="fragment/common :: footer"/> <div th:include="fragment/common :: scripts"/> </body> </html>
{ "content_hash": "c452d3ab2963a00b71e9e108947c420d", "timestamp": "", "source": "github", "line_count": 702, "max_line_length": 240, "avg_line_length": 70.77777777777777, "alnum_prop": 0.4700720524896349, "repo_name": "Diheng/PIServer", "id": "ceb2e633623025315300427afa6a9b15eb9a54b5", "size": "49709", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "r01/src/main/resources/templates/questions/Demographics.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "514266" }, { "name": "HTML", "bytes": "1721182" }, { "name": "Java", "bytes": "565121" }, { "name": "JavaScript", "bytes": "1684012" }, { "name": "Makefile", "bytes": "285" }, { "name": "Shell", "bytes": "680" } ], "symlink_target": "" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE126_Buffer_Overread__CWE129_rand_66b.c Label Definition File: CWE126_Buffer_Overread__CWE129.label.xml Template File: sources-sinks-66b.tmpl.c */ /* * @description * CWE: 126 Buffer Overread * BadSource: rand Set data to result of rand(), which may be zero * GoodSource: Larger than zero but less than 10 * Sinks: * GoodSink: Ensure the array index is valid * BadSink : Improperly check the array index by not checking the upper bound * Flow Variant: 66 Data flow: data passed in an array from one function to another in different source files * * */ #include "std_testcase.h" #ifndef OMITBAD void CWE126_Buffer_Overread__CWE129_rand_66b_badSink(int dataArray[]) { /* copy data out of dataArray */ int data = dataArray[2]; { int buffer[10] = { 0 }; /* POTENTIAL FLAW: Attempt to access an index of the array that is above the upper bound * This check does not check the upper bounds of the array index */ if (data >= 0) { printIntLine(buffer[data]); } else { printLine("ERROR: Array index is negative"); } } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B uses the GoodSource with the BadSink */ void CWE126_Buffer_Overread__CWE129_rand_66b_goodG2BSink(int dataArray[]) { int data = dataArray[2]; { int buffer[10] = { 0 }; /* POTENTIAL FLAW: Attempt to access an index of the array that is above the upper bound * This check does not check the upper bounds of the array index */ if (data >= 0) { printIntLine(buffer[data]); } else { printLine("ERROR: Array index is negative"); } } } /* goodB2G uses the BadSource with the GoodSink */ void CWE126_Buffer_Overread__CWE129_rand_66b_goodB2GSink(int dataArray[]) { int data = dataArray[2]; { int buffer[10] = { 0 }; /* FIX: Properly validate the array index and prevent a buffer overread */ if (data >= 0 && data < (10)) { printIntLine(buffer[data]); } else { printLine("ERROR: Array index is out-of-bounds"); } } } #endif /* OMITGOOD */
{ "content_hash": "9ef5ac0edfb411b45edc34bc8fd55c57", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 109, "avg_line_length": 28.829268292682926, "alnum_prop": 0.5901015228426396, "repo_name": "JianpingZeng/xcc", "id": "a188e8c8506b85c47aebdb147ce3c631d557bfc5", "size": "2364", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "xcc/test/juliet/testcases/CWE126_Buffer_Overread/s01/CWE126_Buffer_Overread__CWE129_rand_66b.c", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build ignore /* Input to cgo -godefs. See also mkerrors.sh and mkall.sh */ // +godefs map struct_in_addr [4]byte /* in_addr */ // +godefs map struct_in6_addr [16]byte /* in6_addr */ package syscall /* #define KERNEL #include <dirent.h> #include <fcntl.h> #include <signal.h> #include <stdio.h> #include <unistd.h> #include <sys/event.h> #include <sys/mman.h> #include <sys/mount.h> #include <sys/param.h> #include <sys/ptrace.h> #include <sys/resource.h> #include <sys/select.h> #include <sys/signal.h> #include <sys/socket.h> #include <sys/stat.h> #include <sys/time.h> #include <sys/types.h> #include <sys/un.h> #include <sys/wait.h> #include <net/bpf.h> #include <net/if.h> #include <net/if_dl.h> #include <net/route.h> #include <netinet/in.h> #include <netinet/tcp.h> enum { sizeofPtr = sizeof(void*), }; union sockaddr_all { struct sockaddr s1; // this one gets used for fields struct sockaddr_in s2; // these pad it out struct sockaddr_in6 s3; struct sockaddr_un s4; struct sockaddr_dl s5; }; struct sockaddr_any { struct sockaddr addr; char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; }; */ import "C" // Machine characteristics; for internal use. const ( sizeofPtr = C.sizeofPtr sizeofShort = C.sizeof_short sizeofInt = C.sizeof_int sizeofLong = C.sizeof_long sizeofLongLong = C.sizeof_longlong ) // Basic types type ( _C_short C.short _C_int C.int _C_long C.long _C_long_long C.longlong ) // Time type Timespec C.struct_timespec type Timeval C.struct_timeval // Processes type Rusage C.struct_rusage type Rlimit C.struct_rlimit type _Gid_t C.gid_t // Files const ( O_CLOEXEC = 0 // not supported ) const ( // Directory mode bits S_IFMT = C.S_IFMT S_IFIFO = C.S_IFIFO S_IFCHR = C.S_IFCHR S_IFDIR = C.S_IFDIR S_IFBLK = C.S_IFBLK S_IFREG = C.S_IFREG S_IFLNK = C.S_IFLNK S_IFSOCK = C.S_IFSOCK S_ISUID = C.S_ISUID S_ISGID = C.S_ISGID S_ISVTX = C.S_ISVTX S_IRUSR = C.S_IRUSR S_IWUSR = C.S_IWUSR S_IXUSR = C.S_IXUSR ) type Stat_t C.struct_stat type Statfs_t C.struct_statfs type Flock_t C.struct_flock type Dirent C.struct_dirent type Fsid C.struct_fsid // Sockets type RawSockaddrInet4 C.struct_sockaddr_in type RawSockaddrInet6 C.struct_sockaddr_in6 type RawSockaddrUnix C.struct_sockaddr_un type RawSockaddrDatalink C.struct_sockaddr_dl type RawSockaddr C.struct_sockaddr type RawSockaddrAny C.struct_sockaddr_any type _Socklen C.socklen_t type Linger C.struct_linger type Iovec C.struct_iovec type IPMreq C.struct_ip_mreq type IPMreqn C.struct_ip_mreqn type IPv6Mreq C.struct_ipv6_mreq type Msghdr C.struct_msghdr type Cmsghdr C.struct_cmsghdr type Inet6Pktinfo C.struct_in6_pktinfo const ( SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 SizeofSockaddrAny = C.sizeof_struct_sockaddr_any SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl SizeofLinger = C.sizeof_struct_linger SizeofIPMreq = C.sizeof_struct_ip_mreq SizeofIPMreqn = C.sizeof_struct_ip_mreqn SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq SizeofMsghdr = C.sizeof_struct_msghdr SizeofCmsghdr = C.sizeof_struct_cmsghdr SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo ) // Ptrace requests const ( PTRACE_TRACEME = C.PT_TRACE_ME PTRACE_CONT = C.PT_CONTINUE PTRACE_KILL = C.PT_KILL ) // Events (kqueue, kevent) type Kevent_t C.struct_kevent // Select type FdSet C.fd_set // Routing and interface messages const ( SizeofIfMsghdr = C.sizeof_struct_if_msghdr SizeofIfData = C.sizeof_struct_if_data SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr SizeofRtMsghdr = C.sizeof_struct_rt_msghdr SizeofRtMetrics = C.sizeof_struct_rt_metrics ) type IfMsghdr C.struct_if_msghdr type IfData C.struct_if_data type IfaMsghdr C.struct_ifa_msghdr type IfmaMsghdr C.struct_ifma_msghdr type RtMsghdr C.struct_rt_msghdr type RtMetrics C.struct_rt_metrics // Berkeley packet filter const ( SizeofBpfVersion = C.sizeof_struct_bpf_version SizeofBpfStat = C.sizeof_struct_bpf_stat SizeofBpfZbuf = C.sizeof_struct_bpf_zbuf SizeofBpfProgram = C.sizeof_struct_bpf_program SizeofBpfInsn = C.sizeof_struct_bpf_insn SizeofBpfHdr = C.sizeof_struct_bpf_hdr SizeofBpfZbufHeader = C.sizeof_struct_bpf_zbuf_header ) type BpfVersion C.struct_bpf_version type BpfStat C.struct_bpf_stat type BpfZbuf C.struct_bpf_zbuf type BpfProgram C.struct_bpf_program type BpfInsn C.struct_bpf_insn type BpfHdr C.struct_bpf_hdr type BpfZbufHeader C.struct_bpf_zbuf_header
{ "content_hash": "82e95f8192f6a1a9b53f4721d2b7c212", "timestamp": "", "source": "github", "line_count": 240, "max_line_length": 64, "avg_line_length": 20.445833333333333, "alnum_prop": 0.7161198288159771, "repo_name": "Triskite/willstone-goclone", "id": "6e1dd4310af0c439b478346eff2ebcad66ad49cd", "size": "4907", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "src/pkg/syscall/types_freebsd.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "280892" }, { "name": "Awk", "bytes": "3780" }, { "name": "Bison", "bytes": "92689" }, { "name": "C", "bytes": "4559937" }, { "name": "C++", "bytes": "75538" }, { "name": "CSS", "bytes": "1897" }, { "name": "Emacs Lisp", "bytes": "34387" }, { "name": "Go", "bytes": "14854262" }, { "name": "HTML", "bytes": "103824" }, { "name": "JavaScript", "bytes": "1033" }, { "name": "Logos", "bytes": "1248" }, { "name": "Makefile", "bytes": "10175" }, { "name": "OpenEdge ABL", "bytes": "9784" }, { "name": "Perl", "bytes": "189741" }, { "name": "Python", "bytes": "123397" }, { "name": "Shell", "bytes": "73639" }, { "name": "VimL", "bytes": "22363" } ], "symlink_target": "" }
import com.yoururl.libraryname._ new X
{ "content_hash": "a5eef77e586a3668f87ae297d7930d2e", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 32, "avg_line_length": 19.5, "alnum_prop": 0.7948717948717948, "repo_name": "P7h/ScalaPlayground", "id": "dcc9735d01c5156739d3422a9d018265be93eb6d", "size": "60", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Atomic Scala/atomic-scala-examples/examples/17_Summary2/UseALibrary.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "17178" }, { "name": "HTML", "bytes": "8949" }, { "name": "Java", "bytes": "511" }, { "name": "Scala", "bytes": "331190" } ], "symlink_target": "" }
require 'milkode_test_work' require 'milkode/cdstk/cdstk_command' class TestCdstkCommand < Test::Unit::TestCase def setup @work = MilkodeTestWork.new({:default_db => true}) end def test_main t_setdb_set t_setdb_reset end def teardown @work.teardown end private def t_setdb_set # デフォルトデータベースの切り替え CdstkCommand.setdb_set(@work.expand_path "db1") # デフォルトデータベースの切り替え # @work.init_db( "db2" ) # CdstkCommand.setdb_set(@work.expand_path "db2") # 存在していないデータベースを指定するとエラー assert_raise(Milkode::CdstkCommand::NotExistDatabase) { CdstkCommand.setdb_set("not_found") } end def t_setdb_reset path = @work.expand_path '.milkode_db_dir' CdstkCommand.setdb_set(@work.expand_path "db1") assert File.exist?(path) CdstkCommand.setdb_reset assert !File.exist?(path) end end
{ "content_hash": "bf0c7dd8d353bd04dd0e184fed2b1ddc", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 98, "avg_line_length": 19.976744186046513, "alnum_prop": 0.6752037252619325, "repo_name": "ongaeshi/mruby-code-search", "id": "539de7cdfdb529666b9e66f936d430884f367e4d", "size": "1052", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "milkode/test/test_cdstk_command.rb", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "29" }, { "name": "CSS", "bytes": "16910" }, { "name": "JavaScript", "bytes": "5667" }, { "name": "Ruby", "bytes": "345688" } ], "symlink_target": "" }
require_relative 'node_util' module Cisco # SnmpCommunity - node utility class for SNMP community config management class SnmpCommunity < NodeUtil @communities = nil def initialize(name, group, instantiate=true) fail TypeError unless name.is_a?(String) fail TypeError unless group.is_a?(String) @name = name return unless instantiate if platform == :nexus config_set('snmp_community', 'community', state: '', name: @name, group: group) else config_set('snmp_community', 'community', state: '', name: @name) # create the mapping for group config_set('snmp_community', 'group_simple', state: '', group: group) config_set('snmp_community', 'group_community_mapping', name: @name, group: group) # rubocop:disable Metrics/LineLength end end def self.communities @communities = {} comms = config_get('snmp_community', 'all_communities') unless comms.nil? comms.each do |comm| @communities[comm] = SnmpCommunity.new(comm, '', false) end end @communities end def destroy # CLI requires specifying a group even for "no" commands config_set('snmp_community', 'community', state: 'no', name: @name, group: 'null') end # name is read only # def name # @name # end def group if platform == :nexus config_get('snmp_community', 'group', name: @name) else config_get('snmp_community', 'group_community_mapping', name: @name) end end def group=(group) fail TypeError unless group.is_a?(String) if platform == :nexus config_set('snmp_community', 'group', name: @name, group: group) else # create the mapping config_set('snmp_community', 'group_simple', group: group) config_set('snmp_community', 'group_community_mapping', name: @name, group: group) # rubocop:disable Metrics/LineLength end end def self.default_group config_get_default('snmp_community', 'group') end def acl config_get('snmp_community', 'acl', name: @name) end def acl=(acl) fail TypeError unless acl.is_a?(String) if acl.empty? acl = self.acl config_set('snmp_community', 'acl', state: 'no', name: @name, acl: acl) unless acl.empty? # rubocop:disable Metrics/LineLength else config_set('snmp_community', 'acl', state: '', name: @name, acl: acl) end end def self.default_acl config_get_default('snmp_community', 'acl') end end end
{ "content_hash": "0572722be1687355d183c51d42414f5e", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 134, "avg_line_length": 29.56989247311828, "alnum_prop": 0.5821818181818181, "repo_name": "chrisvanheuveln/cisco-network-node-utils", "id": "4d5b7d44db8a780552f4d8f7b92f433cbd16d912", "size": "3387", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "lib/cisco_node_utils/snmpcommunity.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Protocol Buffer", "bytes": "2775" }, { "name": "Ruby", "bytes": "1377863" }, { "name": "Shell", "bytes": "14177" } ], "symlink_target": "" }
package com.pigbear.hi_andgmusic.ui.adapter; import android.content.Context; import android.support.v7.widget.AppCompatImageView; import android.support.v7.widget.RecyclerView; import android.text.Html; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.bumptech.glide.Glide; import com.jakewharton.rxbinding2.view.RxView; import com.pigbear.hi_andgmusic.R; import com.pigbear.hi_andgmusic.common.LocalMusicLibrary; import com.pigbear.hi_andgmusic.data.Song; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import io.reactivex.functions.Consumer; /** * @description: 本地歌曲适配器 */ public class LocalRecyclerAdapter extends RecyclerView.Adapter<LocalRecyclerAdapter.LocalMusicViewHolder> { private Context context; private List<Song> songs; private OnItemClickListener<Song> itemClickListener; public LocalRecyclerAdapter(Context context) { this.context = context; songs = new ArrayList<>(); } //给适配器设置数据,并更新 public void setData(List<Song> songs) { this.songs = songs; notifyDataSetChanged(); } public List<Song> getSongs() { return songs; } @Override public LocalMusicViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(context).inflate(R.layout.recycler_localmusic_listitem, parent, false); return new LocalMusicViewHolder(view); } @Override public void onBindViewHolder(final LocalMusicViewHolder holder, final int position) { final Song song = songs.get(position); holder.title.setText(Html.fromHtml(song.getTitle())); if (TextUtils.isEmpty(song.getArtistName())) {//艺术家的名字为空 holder.detail.setText("unknown"); } else { holder.detail.setText(song.getArtistName()); } // if (MusicPlayManager.getInstance().getState() == PlaybackStateCompat.STATE_PLAYING || // MusicPlayManager.getInstance().getState() == PlaybackStateCompat.STATE_PAUSED) { // if (MusicPlayManager.getInstance().getMusicPlaylist().getCurrentPlay() != null && // song.getId() == MusicPlayManager.getInstance().getPlayingSong().getId()) { // holder.playstate.setVisibility(View.VISIBLE); // holder.playstate.setImageResource(R.drawable.song_play_icon); // holder.playstate.setImageTintList(R.color.theme_color_primary); // } else { // holder.playstate.setVisibility(View.GONE); // } // } //解析图片 Glide.with(context) .load(song.getCoverUrl()) .placeholder(R.drawable.cover) .into(holder.cover); } @Override public int getItemCount() { return songs.size(); } public OnItemClickListener getItemClickListener() { return itemClickListener; } public void setItemClickListener(OnItemClickListener itemClickListener) { this.itemClickListener = itemClickListener; } public class LocalMusicViewHolder extends RecyclerView.ViewHolder { public View musicLayout; public TextView title, detail; public ImageView cover; public AppCompatImageView setting; //public TintImageView playstate; public LocalMusicViewHolder(View itemView) { super(itemView); musicLayout = itemView.findViewById(R.id.local_song_item); title = (TextView) itemView.findViewById(R.id.local_song_title); detail = (TextView) itemView.findViewById(R.id.local_song_detail); cover = (ImageView) itemView.findViewById(R.id.local_song_cover); setting = (AppCompatImageView) itemView.findViewById(R.id.local_song_setting); //playstate = (TintImageView) itemView.findViewById(R.id.play_state); setting.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Song song = songs.get(getAdapterPosition()); if (itemClickListener != null && song.isStatus()) { itemClickListener.onItemSettingClick(setting, song, getAdapterPosition()); } } }); RxView.clicks(musicLayout) .throttleFirst(1000, TimeUnit.MILLISECONDS) .subscribe(new Consumer<Object>() { @Override public void accept(Object o) throws Exception { if (songs.size() == 0) { songs = LocalMusicLibrary.getAllSongs(context); //setData(songs); } Song song = songs.get(getAdapterPosition()); if (itemClickListener != null && song.isStatus()) { itemClickListener.onItemClick(song, getAdapterPosition()); } } }); } } }
{ "content_hash": "5e6271a75593f315834f97d40104c291", "timestamp": "", "source": "github", "line_count": 144, "max_line_length": 111, "avg_line_length": 36.770833333333336, "alnum_prop": 0.6179414542020775, "repo_name": "AndGirl/HiAndG_Music", "id": "45d4bca32f6fbbdaad57f2db31756f51cb4ed0b5", "size": "5355", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/pigbear/hi_andgmusic/ui/adapter/LocalRecyclerAdapter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "384702" } ], "symlink_target": "" }
"""Encoders for text data. * TextEncoder: base class * ByteTextEncoder: for ascii text * TokenTextEncoder: with user-supplied vocabulary file * SubwordTextEncoder: invertible """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from itertools import chain import math import re import tempfile import time import numpy as np import six from six.moves import range # pylint: disable=redefined-builtin from tensor2tensor.data_generators import tokenizer import tensorflow as tf # Reserved tokens for things like padding and EOS symbols. PAD = "<pad>" EOS = "<EOS>" RESERVED_TOKENS = [PAD, EOS] NUM_RESERVED_TOKENS = len(RESERVED_TOKENS) PAD_ID = RESERVED_TOKENS.index(PAD) # Normally 0 EOS_ID = RESERVED_TOKENS.index(EOS) # Normally 1 if six.PY2: RESERVED_TOKENS_BYTES = RESERVED_TOKENS else: RESERVED_TOKENS_BYTES = [bytes(PAD, "ascii"), bytes(EOS, "ascii")] # Regular expression for unescaping token strings. # '\u' is converted to '_' # '\\' is converted to '\' # '\213;' is converted to unichr(213) _UNESCAPE_REGEX = re.compile(r"\\u|\\\\|\\([0-9]+);") _ESCAPE_CHARS = set(u"\\_u;0123456789") # Unicode utility functions that work with Python 2 and 3 def native_to_unicode(s): return s if is_unicode(s) else to_unicode(s) def unicode_to_native(s): if six.PY2: return s.encode("utf-8") if is_unicode(s) else s else: return s def is_unicode(s): if six.PY2: if isinstance(s, unicode): return True else: if isinstance(s, str): return True return False def to_unicode(s, ignore_errors=False): if is_unicode(s): return s error_mode = "ignore" if ignore_errors else "strict" return s.decode("utf-8", errors=error_mode) def to_unicode_ignore_errors(s): return to_unicode(s, ignore_errors=True) def strip_ids(ids, ids_to_strip): """Strip ids_to_strip from the end ids.""" ids = list(ids) while ids and ids[-1] in ids_to_strip: ids.pop() return ids class TextEncoder(object): """Base class for converting from ints to/from human readable strings.""" def __init__(self, num_reserved_ids=NUM_RESERVED_TOKENS): self._num_reserved_ids = num_reserved_ids @property def num_reserved_ids(self): return self._num_reserved_ids def encode(self, s): """Transform a human-readable string into a sequence of int ids. The ids should be in the range [num_reserved_ids, vocab_size). Ids [0, num_reserved_ids) are reserved. EOS is not appended. Args: s: human-readable string to be converted. Returns: ids: list of integers """ return [int(w) + self._num_reserved_ids for w in s.split()] def decode(self, ids, strip_extraneous=False): """Transform a sequence of int ids into a human-readable string. EOS is not expected in ids. Args: ids: list of integers to be converted. strip_extraneous: bool, whether to strip off extraneous tokens (EOS and PAD). Returns: s: human-readable string. """ if strip_extraneous: ids = strip_ids(ids, list(range(self._num_reserved_ids or 0))) return " ".join(self.decode_list(ids)) def decode_list(self, ids): """Transform a sequence of int ids into a their string versions. This method supports transforming individual input/output ids to their string versions so that sequence to/from text conversions can be visualized in a human readable format. Args: ids: list of integers to be converted. Returns: strs: list of human-readable string. """ decoded_ids = [] for id_ in ids: if 0 <= id_ < self._num_reserved_ids: decoded_ids.append(RESERVED_TOKENS[int(id_)]) else: decoded_ids.append(id_ - self._num_reserved_ids) return [str(d) for d in decoded_ids] @property def vocab_size(self): raise NotImplementedError() class ByteTextEncoder(TextEncoder): """Encodes each byte to an id. For 8-bit strings only.""" def encode(self, s): numres = self._num_reserved_ids if six.PY2: if isinstance(s, unicode): s = s.encode("utf-8") return [ord(c) + numres for c in s] # Python3: explicitly convert to UTF-8 return [c + numres for c in s.encode("utf-8")] def decode(self, ids, strip_extraneous=False): if strip_extraneous: ids = strip_ids(ids, list(range(self._num_reserved_ids or 0))) numres = self._num_reserved_ids decoded_ids = [] int2byte = six.int2byte for id_ in ids: if 0 <= id_ < numres: decoded_ids.append(RESERVED_TOKENS_BYTES[int(id_)]) else: decoded_ids.append(int2byte(id_ - numres)) if six.PY2: return "".join(decoded_ids) # Python3: join byte arrays and then decode string return b"".join(decoded_ids).decode("utf-8", "replace") def decode_list(self, ids): numres = self._num_reserved_ids decoded_ids = [] int2byte = six.int2byte for id_ in ids: if 0 <= id_ < numres: decoded_ids.append(RESERVED_TOKENS_BYTES[int(id_)]) else: decoded_ids.append(int2byte(id_ - numres)) # Python3: join byte arrays and then decode string return decoded_ids @property def vocab_size(self): return 2**8 + self._num_reserved_ids class ClassLabelEncoder(TextEncoder): """Encoder for class labels.""" def __init__(self, class_labels=None, class_labels_fname=None): super(ClassLabelEncoder, self).__init__(num_reserved_ids=0) if class_labels_fname: with tf.gfile.Open(class_labels_fname) as f: class_labels = [label.strip() for label in f.readlines()] assert class_labels self._class_labels = class_labels def encode(self, s): label_str = s return self._class_labels.index(label_str) def decode(self, ids, strip_extraneous=False): del strip_extraneous label_id = ids if isinstance(label_id, list): assert len(label_id) == 1 label_id, = label_id if isinstance(label_id, np.ndarray): label_id = np.squeeze(label_id) return self._class_labels[label_id] def decode_list(self, ids): return [self._class_labels[i] for i in ids] @property def vocab_size(self): return len(self._class_labels) class OneHotClassLabelEncoder(ClassLabelEncoder): """One-hot encoder for class labels.""" def encode(self, label_str, on_value=1, off_value=0): # pylint: disable=arguments-differ e = np.full(self.vocab_size, off_value, dtype=np.int32) e[self._class_labels.index(label_str)] = on_value return e.tolist() def decode(self, ids, strip_extraneous=False): del strip_extraneous label_id = ids if isinstance(label_id, np.ndarray): label_id = np.squeeze(label_id).astype(np.int8).tolist() assert isinstance(label_id, list) assert len(label_id) == self.vocab_size return self._class_labels[label_id.index(1)] @property def vocab_size(self): return len(self._class_labels) class TokenTextEncoder(TextEncoder): """Encoder based on a user-supplied vocabulary (file or list).""" def __init__(self, vocab_filename, reverse=False, vocab_list=None, replace_oov=None, num_reserved_ids=NUM_RESERVED_TOKENS): """Initialize from a file or list, one token per line. Handling of reserved tokens works as follows: - When initializing from a list, we add reserved tokens to the vocab. - When initializing from a file, we do not add reserved tokens to the vocab. - When saving vocab files, we save reserved tokens to the file. Args: vocab_filename: If not None, the full filename to read vocab from. If this is not None, then vocab_list should be None. reverse: Boolean indicating if tokens should be reversed during encoding and decoding. vocab_list: If not None, a list of elements of the vocabulary. If this is not None, then vocab_filename should be None. replace_oov: If not None, every out-of-vocabulary token seen when encoding will be replaced by this string (which must be in vocab). num_reserved_ids: Number of IDs to save for reserved tokens like <EOS>. """ super(TokenTextEncoder, self).__init__(num_reserved_ids=num_reserved_ids) self._reverse = reverse self._replace_oov = replace_oov if vocab_filename: self._init_vocab_from_file(vocab_filename) else: assert vocab_list is not None self._init_vocab_from_list(vocab_list) def encode(self, s): """Converts a space-separated string of tokens to a list of ids.""" sentence = s tokens = sentence.strip().split() if self._replace_oov is not None: tokens = [t if t in self._token_to_id else self._replace_oov for t in tokens] ret = [self._token_to_id[tok] for tok in tokens] return ret[::-1] if self._reverse else ret def decode(self, ids, strip_extraneous=False): return " ".join(self.decode_list(ids)) def decode_list(self, ids): seq = reversed(ids) if self._reverse else ids return [self._safe_id_to_token(i) for i in seq] @property def vocab_size(self): return len(self._id_to_token) def _safe_id_to_token(self, idx): return self._id_to_token.get(idx, "ID_%d" % idx) def _init_vocab_from_file(self, filename): """Load vocab from a file. Args: filename: The file to load vocabulary from. """ with tf.gfile.Open(filename) as f: tokens = [token.strip() for token in f.readlines()] def token_gen(): for token in tokens: yield token self._init_vocab(token_gen(), add_reserved_tokens=False) def _init_vocab_from_list(self, vocab_list): """Initialize tokens from a list of tokens. It is ok if reserved tokens appear in the vocab list. They will be removed. The set of tokens in vocab_list should be unique. Args: vocab_list: A list of tokens. """ def token_gen(): for token in vocab_list: if token not in RESERVED_TOKENS: yield token self._init_vocab(token_gen()) def _init_vocab(self, token_generator, add_reserved_tokens=True): """Initialize vocabulary with tokens from token_generator.""" self._id_to_token = {} non_reserved_start_index = 0 if add_reserved_tokens: self._id_to_token.update(enumerate(RESERVED_TOKENS)) non_reserved_start_index = len(RESERVED_TOKENS) self._id_to_token.update( enumerate(token_generator, start=non_reserved_start_index)) # _token_to_id is the reverse of _id_to_token self._token_to_id = dict((v, k) for k, v in six.iteritems(self._id_to_token)) def store_to_file(self, filename): """Write vocab file to disk. Vocab files have one token per line. The file ends in a newline. Reserved tokens are written to the vocab file as well. Args: filename: Full path of the file to store the vocab to. """ with tf.gfile.Open(filename, "w") as f: for i in range(len(self._id_to_token)): f.write(self._id_to_token[i] + "\n") def _escape_token(token, alphabet): """Escape away underscores and OOV characters and append '_'. This allows the token to be expressed as the concatenation of a list of subtokens from the vocabulary. The underscore acts as a sentinel which allows us to invertibly concatenate multiple such lists. Args: token: A unicode string to be escaped. alphabet: A set of all characters in the vocabulary's alphabet. Returns: escaped_token: An escaped unicode string. Raises: ValueError: If the provided token is not unicode. """ if not isinstance(token, six.text_type): raise ValueError("Expected string type for token, got %s" % type(token)) token = token.replace(u"\\", u"\\\\").replace(u"_", u"\\u") ret = [c if c in alphabet and c != u"\n" else r"\%d;" % ord(c) for c in token] return u"".join(ret) + "_" def _unescape_token(escaped_token): """Inverse of _escape_token(). Args: escaped_token: a unicode string Returns: token: a unicode string """ def match(m): if m.group(1) is None: return u"_" if m.group(0) == u"\\u" else u"\\" try: return six.unichr(int(m.group(1))) except (ValueError, OverflowError) as _: return u"\u3013" # Unicode for undefined character. trimmed = escaped_token[:-1] if escaped_token.endswith("_") else escaped_token return _UNESCAPE_REGEX.sub(match, trimmed) class SubwordTextEncoder(TextEncoder): """Class for invertibly encoding text using a limited vocabulary. Invertibly encodes a native string as a sequence of subtokens from a limited vocabulary. A SubwordTextEncoder is built from a corpus (so it is tailored to the text in the corpus), and stored to a file. See text_encoder_build_subword.py. It can then be loaded and used to encode/decode any text. Encoding has four phases: 1. Tokenize into a list of tokens. Each token is a unicode string of either all alphanumeric characters or all non-alphanumeric characters. We drop tokens consisting of a single space that are between two alphanumeric tokens. 2. Escape each token. This escapes away special and out-of-vocabulary characters, and makes sure that each token ends with an underscore, and has no other underscores. 3. Represent each escaped token as a the concatenation of a list of subtokens from the limited vocabulary. Subtoken selection is done greedily from beginning to end. That is, we construct the list in order, always picking the longest subtoken in our vocabulary that matches a prefix of the remaining portion of the encoded token. 4. Concatenate these lists. This concatenation is invertible due to the fact that the trailing underscores indicate when one list is finished. """ def __init__(self, filename=None): """Initialize and read from a file, if provided. Args: filename: filename from which to read vocab. If None, do not load a vocab """ self._alphabet = set() self.filename = filename if filename is not None: self._load_from_file(filename) super(SubwordTextEncoder, self).__init__() def encode(self, s): """Converts a native string to a list of subtoken ids. Args: s: a native string. Returns: a list of integers in the range [0, vocab_size) """ return self._tokens_to_subtoken_ids( tokenizer.encode(native_to_unicode(s))) def encode_without_tokenizing(self, token_text): """Converts string to list of subtoken ids without calling tokenizer. This treats `token_text` as a single token and directly converts it to subtoken ids. This may be useful when the default tokenizer doesn't do what we want (e.g., when encoding text with tokens composed of lots of nonalphanumeric characters). It is then up to the caller to make sure that raw text is consistently converted into tokens. Only use this if you are sure that `encode` doesn't suit your needs. Args: token_text: A native string representation of a single token. Returns: A list of subword token ids; i.e., integers in the range [0, vocab_size). """ return self._tokens_to_subtoken_ids([native_to_unicode(token_text)]) def decode(self, ids, strip_extraneous=False): """Converts a sequence of subtoken ids to a native string. Args: ids: a list of integers in the range [0, vocab_size) strip_extraneous: bool, whether to strip off extraneous tokens (EOS and PAD). Returns: a native string """ if strip_extraneous: ids = strip_ids(ids, list(range(self._num_reserved_ids or 0))) return unicode_to_native( tokenizer.decode(self._subtoken_ids_to_tokens(ids))) def decode_list(self, ids): return [self._subtoken_id_to_subtoken_string(s) for s in ids] @property def vocab_size(self): """The subtoken vocabulary size.""" return len(self._all_subtoken_strings) def _tokens_to_subtoken_ids(self, tokens): """Converts a list of tokens to a list of subtoken ids. Args: tokens: a list of strings. Returns: a list of integers in the range [0, vocab_size) """ ret = [] for token in tokens: ret.extend(self._token_to_subtoken_ids(token)) return ret def _token_to_subtoken_ids(self, token): """Converts token to a list of subtoken ids. Args: token: a string. Returns: a list of integers in the range [0, vocab_size) """ cache_location = hash(token) % self._cache_size cache_key, cache_value = self._cache[cache_location] if cache_key == token: return cache_value ret = self._escaped_token_to_subtoken_ids( _escape_token(token, self._alphabet)) self._cache[cache_location] = (token, ret) return ret def _subtoken_ids_to_tokens(self, subtokens): """Converts a list of subtoken ids to a list of tokens. Args: subtokens: a list of integers in the range [0, vocab_size) Returns: a list of strings. """ concatenated = "".join( [self._subtoken_id_to_subtoken_string(s) for s in subtokens]) split = concatenated.split("_") ret = [] for t in split: if t: unescaped = _unescape_token(t + "_") if unescaped: ret.append(unescaped) return ret def _subtoken_id_to_subtoken_string(self, subtoken): """Converts a subtoken integer ID to a subtoken string.""" if 0 <= subtoken < self.vocab_size: return self._all_subtoken_strings[subtoken] return u"" def _escaped_token_to_subtoken_strings(self, escaped_token): """Converts an escaped token string to a list of subtoken strings. Args: escaped_token: An escaped token as a unicode string. Returns: A list of subtokens as unicode strings. """ # NOTE: This algorithm is greedy; it won't necessarily produce the "best" # list of subtokens. ret = [] start = 0 token_len = len(escaped_token) while start < token_len: for end in range( min(token_len, start + self._max_subtoken_len), start, -1): subtoken = escaped_token[start:end] if subtoken in self._subtoken_string_to_id: ret.append(subtoken) start = end break else: # Did not break # If there is no possible encoding of the escaped token then one of the # characters in the token is not in the alphabet. This should be # impossible and would be indicative of a bug. assert False, "Token substring not found in subtoken vocabulary." return ret def _escaped_token_to_subtoken_ids(self, escaped_token): """Converts an escaped token string to a list of subtoken IDs. Args: escaped_token: An escaped token as a unicode string. Returns: A list of subtoken IDs as integers. """ return [ self._subtoken_string_to_id[subtoken] for subtoken in self._escaped_token_to_subtoken_strings(escaped_token) ] @classmethod def build_from_generator(cls, generator, target_size, max_subtoken_length=None, reserved_tokens=None): """Builds a SubwordTextEncoder from the generated text. Args: generator: yields text. target_size: int, approximate vocabulary size to create. max_subtoken_length: Maximum length of a subtoken. If this is not set, then the runtime and memory use of creating the vocab is quadratic in the length of the longest token. If this is set, then it is instead O(max_subtoken_length * length of longest token). reserved_tokens: List of reserved tokens. The global variable `RESERVED_TOKENS` must be a prefix of `reserved_tokens`. If this argument is `None`, it will use `RESERVED_TOKENS`. Returns: SubwordTextEncoder with `vocab_size` approximately `target_size`. """ token_counts = collections.defaultdict(int) for item in generator: for tok in tokenizer.encode(native_to_unicode(item)): token_counts[tok] += 1 encoder = cls.build_to_target_size( target_size, token_counts, 1, 1e3, max_subtoken_length=max_subtoken_length, reserved_tokens=reserved_tokens) return encoder @classmethod def build_to_target_size(cls, target_size, token_counts, min_val, max_val, max_subtoken_length=None, reserved_tokens=None, num_iterations=4): """Builds a SubwordTextEncoder that has `vocab_size` near `target_size`. Uses simple recursive binary search to find a minimum token count that most closely matches the `target_size`. Args: target_size: Desired vocab_size to approximate. token_counts: A dictionary of token counts, mapping string to int. min_val: An integer; lower bound for the minimum token count. max_val: An integer; upper bound for the minimum token count. max_subtoken_length: Maximum length of a subtoken. If this is not set, then the runtime and memory use of creating the vocab is quadratic in the length of the longest token. If this is set, then it is instead O(max_subtoken_length * length of longest token). reserved_tokens: List of reserved tokens. The global variable `RESERVED_TOKENS` must be a prefix of `reserved_tokens`. If this argument is `None`, it will use `RESERVED_TOKENS`. num_iterations: An integer; how many iterations of refinement. Returns: A SubwordTextEncoder instance. Raises: ValueError: If `min_val` is greater than `max_val`. """ if min_val > max_val: raise ValueError("Lower bound for the minimum token count " "is greater than the upper bound.") if target_size < 1: raise ValueError("Target size must be positive.") if reserved_tokens is None: reserved_tokens = RESERVED_TOKENS def bisect(min_val, max_val): """Bisection to find the right size.""" present_count = (max_val + min_val) // 2 tf.logging.info("Trying min_count %d" % present_count) subtokenizer = cls() subtokenizer.build_from_token_counts( token_counts, present_count, num_iterations, max_subtoken_length=max_subtoken_length, reserved_tokens=reserved_tokens) # Being within 1% of the target size is ok. is_ok = abs(subtokenizer.vocab_size - target_size) * 100 < target_size # If min_val == max_val, we can't do any better than this. if is_ok or min_val >= max_val or present_count < 2: return subtokenizer if subtokenizer.vocab_size > target_size: other_subtokenizer = bisect(present_count + 1, max_val) else: other_subtokenizer = bisect(min_val, present_count - 1) if other_subtokenizer is None: return subtokenizer if (abs(other_subtokenizer.vocab_size - target_size) < abs(subtokenizer.vocab_size - target_size)): return other_subtokenizer return subtokenizer return bisect(min_val, max_val) def build_from_token_counts(self, token_counts, min_count, num_iterations=4, reserved_tokens=None, max_subtoken_length=None): """Train a SubwordTextEncoder based on a dictionary of word counts. Args: token_counts: a dictionary of Unicode strings to int. min_count: an integer - discard subtokens with lower counts. num_iterations: an integer. how many iterations of refinement. reserved_tokens: List of reserved tokens. The global variable `RESERVED_TOKENS` must be a prefix of `reserved_tokens`. If this argument is `None`, it will use `RESERVED_TOKENS`. max_subtoken_length: Maximum length of a subtoken. If this is not set, then the runtime and memory use of creating the vocab is quadratic in the length of the longest token. If this is set, then it is instead O(max_subtoken_length * length of longest token). Raises: ValueError: if reserved is not 0 or len(RESERVED_TOKENS). In this case, it is not clear what the space is being reserved for, or when it will be filled in. """ if reserved_tokens is None: reserved_tokens = RESERVED_TOKENS else: # There is not complete freedom in replacing RESERVED_TOKENS. for default, proposed in zip(RESERVED_TOKENS, reserved_tokens): if default != proposed: raise ValueError("RESERVED_TOKENS must be a prefix of " "reserved_tokens.") # Initialize the alphabet. Note, this must include reserved tokens or it can # result in encoding failures. alphabet_tokens = chain(six.iterkeys(token_counts), [native_to_unicode(t) for t in reserved_tokens]) self._init_alphabet_from_tokens(alphabet_tokens) # Bootstrap the initial list of subtokens with the characters from the # alphabet plus the escaping characters. self._init_subtokens_from_list(list(self._alphabet), reserved_tokens=reserved_tokens) # We build iteratively. On each iteration, we segment all the words, # then count the resulting potential subtokens, keeping the ones # with high enough counts for our new vocabulary. if min_count < 1: min_count = 1 for i in range(num_iterations): tf.logging.info("Iteration {0}".format(i)) # Collect all substrings of the encoded token that break along current # subtoken boundaries. subtoken_counts = collections.defaultdict(int) for token, count in six.iteritems(token_counts): iter_start_time = time.time() escaped_token = _escape_token(token, self._alphabet) subtokens = self._escaped_token_to_subtoken_strings(escaped_token) start = 0 for subtoken in subtokens: last_position = len(escaped_token) + 1 if max_subtoken_length is not None: last_position = min(last_position, start + max_subtoken_length) for end in range(start + 1, last_position): new_subtoken = escaped_token[start:end] subtoken_counts[new_subtoken] += count start += len(subtoken) iter_time_secs = time.time() - iter_start_time if iter_time_secs > 0.1: tf.logging.info(u"Processing token [{0}] took {1} seconds, consider " "setting Text2TextProblem.max_subtoken_length to a " "smaller value.".format(token, iter_time_secs)) # Array of sets of candidate subtoken strings, by length. len_to_subtoken_strings = [] for subtoken_string, count in six.iteritems(subtoken_counts): lsub = len(subtoken_string) if count >= min_count: while len(len_to_subtoken_strings) <= lsub: len_to_subtoken_strings.append(set()) len_to_subtoken_strings[lsub].add(subtoken_string) # Consider the candidates longest to shortest, so that if we accept # a longer subtoken string, we can decrement the counts of its prefixes. new_subtoken_strings = [] for lsub in range(len(len_to_subtoken_strings) - 1, 0, -1): subtoken_strings = len_to_subtoken_strings[lsub] for subtoken_string in subtoken_strings: count = subtoken_counts[subtoken_string] if count >= min_count: # Exclude alphabet tokens here, as they must be included later, # explicitly, regardless of count. if subtoken_string not in self._alphabet: new_subtoken_strings.append((count, subtoken_string)) for l in range(1, lsub): subtoken_counts[subtoken_string[:l]] -= count # Include the alphabet explicitly to guarantee all strings are encodable. new_subtoken_strings.extend((subtoken_counts.get(a, 0), a) for a in self._alphabet) new_subtoken_strings.sort(reverse=True) # Reinitialize to the candidate vocabulary. new_subtoken_strings = [subtoken for _, subtoken in new_subtoken_strings] if reserved_tokens: escaped_reserved_tokens = [ _escape_token(native_to_unicode(t), self._alphabet) for t in reserved_tokens ] new_subtoken_strings = escaped_reserved_tokens + new_subtoken_strings self._init_subtokens_from_list(new_subtoken_strings) tf.logging.info("vocab_size = %d" % self.vocab_size) @property def all_subtoken_strings(self): return tuple(self._all_subtoken_strings) def dump(self): """Debugging dump of the current subtoken vocabulary.""" subtoken_strings = [(i, s) for s, i in six.iteritems(self._subtoken_string_to_id)] print(u", ".join(u"{0} : '{1}'".format(i, s) for i, s in sorted(subtoken_strings))) def _init_subtokens_from_list(self, subtoken_strings, reserved_tokens=None): """Initialize token information from a list of subtoken strings. Args: subtoken_strings: a list of subtokens reserved_tokens: List of reserved tokens. We must have `reserved_tokens` as None or the empty list, or else the global variable `RESERVED_TOKENS` must be a prefix of `reserved_tokens`. Raises: ValueError: if reserved is not 0 or len(RESERVED_TOKENS). In this case, it is not clear what the space is being reserved for, or when it will be filled in. """ if reserved_tokens is None: reserved_tokens = [] if reserved_tokens: self._all_subtoken_strings = reserved_tokens + subtoken_strings else: self._all_subtoken_strings = subtoken_strings # we remember the maximum length of any subtoken to avoid having to # check arbitrarily long strings. self._max_subtoken_len = max([len(s) for s in subtoken_strings]) self._subtoken_string_to_id = { s: i + len(reserved_tokens) for i, s in enumerate(subtoken_strings) if s } # Initialize the cache to empty. self._cache_size = 2 ** 20 self._cache = [(None, None)] * self._cache_size def _init_alphabet_from_tokens(self, tokens): """Initialize alphabet from an iterable of token or subtoken strings.""" # Include all characters from all tokens in the alphabet to guarantee that # any token can be encoded. Additionally, include all escaping characters. self._alphabet = {c for token in tokens for c in token} self._alphabet |= _ESCAPE_CHARS def _load_from_file_object(self, f): """Load from a file object. Args: f: File object to load vocabulary from """ subtoken_strings = [] for line in f: s = line.strip() # Some vocab files wrap words in single quotes, but others don't if ((s.startswith("'") and s.endswith("'")) or (s.startswith("\"") and s.endswith("\""))): s = s[1:-1] subtoken_strings.append(native_to_unicode(s)) self._init_subtokens_from_list(subtoken_strings) self._init_alphabet_from_tokens(subtoken_strings) def _load_from_file(self, filename): """Load from a vocab file.""" if not tf.gfile.Exists(filename): raise ValueError("File %s not found" % filename) with tf.gfile.Open(filename) as f: self._load_from_file_object(f) def store_to_file(self, filename, add_single_quotes=True): with tf.gfile.Open(filename, "w") as f: for subtoken_string in self._all_subtoken_strings: if add_single_quotes: f.write("'" + unicode_to_native(subtoken_string) + "'\n") else: f.write(unicode_to_native(subtoken_string) + "\n") class ImageEncoder(object): """Encoder class for saving and loading images.""" def __init__(self, num_reserved_ids=0, height=None, width=None, channels=3): assert num_reserved_ids == 0 self._height = height self._width = width self._channels = channels @property def num_reserved_ids(self): return 0 def encode(self, s): """Transform a string with a filename into a list of RGB integers. Args: s: path to the file with an image. Returns: ids: list of integers """ try: import matplotlib.image as im # pylint: disable=g-import-not-at-top except ImportError as e: tf.logging.warning( "Reading an image requires matplotlib to be installed: %s", e) raise NotImplementedError("Image reading not implemented.") return im.imread(s) def decode(self, ids, strip_extraneous=False): """Transform a sequence of int ids into an image file. Args: ids: list of integers to be converted. strip_extraneous: unused Returns: Path to the temporary file where the image was saved. Raises: ValueError: if the ids are not of the appropriate size. """ del strip_extraneous _, tmp_file_path = tempfile.mkstemp("_decode.png") if self._height is None or self._width is None: size = int(math.sqrt(len(ids) / self._channels)) length = size * size * self._channels else: size = None length = self._height * self._width * self._channels if len(ids) != length: raise ValueError("Length of ids (%d) must be height (%d) x width (%d) x " "channels (%d); %d != %d.\n Ids: %s" % (len(ids), self._height, self._width, self._channels, len(ids), length, " ".join([str(i) for i in ids]))) with tf.Graph().as_default(): raw = tf.constant(ids, dtype=tf.uint8) if size is None: img = tf.reshape(raw, [self._height, self._width, self._channels]) else: img = tf.reshape(raw, [size, size, self._channels]) png = tf.image.encode_png(img) op = tf.write_file(tmp_file_path, png) with tf.Session() as sess: sess.run(op) return tmp_file_path def decode_list(self, ids): """Transform a sequence of int ids into an image file. Args: ids: list of integers to be converted. Returns: Singleton list: path to the temporary file where the image was saved. """ return [self.decode(ids)] @property def vocab_size(self): return 256 class RealEncoder(object): """Encoder class for saving and loading float values.""" def encode(self, s): """Transform a string (space separated float values) into a float array. Args: s: space separated float values. Returns: Array of float values. """ return [float(w) for w in s.split()] def decode(self, ids, strip_extraneous=False): """Transform sequence of float values into string (float values). Args: ids: array of floats to be converted. strip_extraneous: unused Returns: String having space separated float values. Raises: ValueError: if the ids are not of the appropriate size. """ del strip_extraneous return " ".join([str(i) for i in ids])
{ "content_hash": "59dd4277716a618eeb28f0a10437096d", "timestamp": "", "source": "github", "line_count": 1045, "max_line_length": 91, "avg_line_length": 33.97129186602871, "alnum_prop": 0.6469577464788733, "repo_name": "vthorsteinsson/tensor2tensor", "id": "42d136a04fc5702fa54edfd667db0717c7731468", "size": "36105", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tensor2tensor/data_generators/text_encoder.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "34646" }, { "name": "JavaScript", "bytes": "78396" }, { "name": "Jupyter Notebook", "bytes": "2423366" }, { "name": "Python", "bytes": "3566836" }, { "name": "Shell", "bytes": "7888" } ], "symlink_target": "" }
using System; using Microsoft.VisualStudio.TestTools.UnitTesting; using Reusable.Collections; namespace Reusable.Tests.Collections { [TestClass] public class ProjectionEqualityComparerTest { [TestMethod] public void Equals_SameValues_True() { var comparer = ProjectionEqualityComparer<Foo>.Create(f => new { f.Bar, f.Baz }); Assert.IsTrue(comparer.Equals( new Foo { Bar = "foo", Baz = 2, Qux = DateTime.Now }, new Foo { Bar = "foo", Baz = 2, Qux = DateTime.Now.AddHours(-1) }) ); } [TestMethod] public void Equals_DifferentValues_False() { var comparer = ProjectionEqualityComparer<Foo>.Create(f => new { f.Bar, f.Baz }); Assert.IsFalse(comparer.Equals( new Foo { Bar = "foo", Baz = 2, Qux = DateTime.Now }, new Foo { Bar = "foo", Baz = 3, Qux = DateTime.Now.AddHours(-1) }) ); } private class Foo { public string Bar { get; set; } public int Baz { get; set; } public DateTime Qux { get; set; } } } }
{ "content_hash": "10a11b0124bdbbac652017354eca9f0e", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 93, "avg_line_length": 29.6, "alnum_prop": 0.535472972972973, "repo_name": "he-dev/Reusable", "id": "b290f9e85c6b73dbd10bd669876d9a15ff3e293d", "size": "1186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Reusable.Tests.MSTest/src/Core/Collections/ProjectionEqualityComparerTest.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1047239" }, { "name": "CSS", "bytes": "588" }, { "name": "HTML", "bytes": "1757" } ], "symlink_target": "" }
package net.automatalib.modelcheckers.ltsmin.monitor; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.function.Function; import net.automatalib.automata.base.compact.CompactTransition; import net.automatalib.automata.transducers.MealyMachine; import net.automatalib.automata.transducers.impl.compact.CompactMealy; import net.automatalib.exception.ModelCheckingException; import net.automatalib.modelcheckers.ltsmin.AbstractLTSmin; import net.automatalib.modelcheckers.ltsmin.LTSminLTLParser; import net.automatalib.modelcheckers.ltsmin.LTSminMealy; import net.automatalib.modelcheckers.ltsmin.ltl.AbstractLTSminLTL; import net.automatalib.serialization.fsm.parser.FSMFormatException; import net.automatalib.words.Word; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An monitor model checker using LTSmin for Mealy machines. * * @param <I> * the input type. * @param <O> * the output type. * * @author Jeroen Meijer */ public abstract class AbstractLTSminMonitorMealy<I, O> extends AbstractLTSminMonitor<I, MealyMachine<?, I, ?, O>, MealyMachine<?, I, ?, O>> implements LTSminMealy<I, O, MealyMachine<?, I, ?, O>> { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLTSminMonitorMealy.class); /** * @see #getString2Output() */ private final Function<String, O> string2Output; /** * @see #getSkipOutputs() * @see #setSkipOutputs(Collection) */ private Collection<? super O> skipOutputs; /** * Constructs a new AbstractLTSminLTLMealy. * * @param string2Output * the function that transforms edges in the FSM file to actual output. * @param skipOutputs * the set of outputs that need to be skipped while writing the Mealy machine to ETF. * * @see AbstractLTSminLTL */ protected AbstractLTSminMonitorMealy(boolean keepFiles, Function<String, I> string2Input, Function<String, O> string2Output, Collection<? super O> skipOutputs) { super(keepFiles, string2Input); this.string2Output = string2Output; this.skipOutputs = skipOutputs; } /** * Gets a function that transforms edges in the FSM file to actual output. * * @return the Function. */ @Override public Function<String, O> getString2Output() { return string2Output; } /** * Gets a set of outputs that need to be skipped while writing the Mealy machine to ETF. * * @return the Colleciton. */ @Override public Collection<? super O> getSkipOutputs() { return skipOutputs; } /** * Sets a set of outputs that need to be skipped while writing the Mealy machine to ETF. */ @Override public void setSkipOutputs(Collection<? super O> skipOutputs) { this.skipOutputs = skipOutputs; } @Override protected void verifyFormula(String formula) { LTSminLTLParser.requireValidIOFormula(formula); } /** * Converts the FSM file to a {@link MealyMachine}. * * @see AbstractLTSmin#findCounterExample(Object, Collection, Object) */ @Override public @Nullable MealyMachine<?, I, ?, O> findCounterExample(MealyMachine<?, I, ?, O> automaton, Collection<? extends I> inputs, String property) { final File fsm = findCounterExampleFSM(automaton, inputs, property); if (fsm == null) { return null; } try { final CompactMealy<I, O> result = fsm2Mealy(fsm, automaton, inputs); final Integer deadlock = result.getStates() .stream() .filter(s -> inputs.stream() .allMatch(i -> result.getSuccessor(s, i) == null)) .findFirst() .orElseThrow(() -> new ModelCheckingException("No deadlock found")); return new MealyMachine<Integer, I, CompactTransition<O>, O>() { @Override @SuppressWarnings("nullness") // TODO XXX FIXME: Returning non-null values would currently break PropertyOracles in LearnLib. We should rethink a clean API here. public Word<O> computeStateOutput(Integer state, Iterable<? extends I> input) { final Integer succ = getSuccessor(state, input); return deadlock.equals(succ) ? MealyMachine.super.computeStateOutput(state, input) : null; } @Override public @Nullable Integer getInitialState() { return result.getInitialState(); } @Override public Integer getSuccessor(CompactTransition<O> transition) { return result.getSuccessor(transition); } @Override public @Nullable CompactTransition<O> getTransition(Integer state, I input) { return result.getTransition(state, input); } @Override public O getTransitionOutput(CompactTransition<O> transition) { return result.getTransitionOutput(transition); } @Override public Collection<Integer> getStates() { return result.getStates(); } }; } catch (IOException | FSMFormatException e) { throw new ModelCheckingException(e); } finally { // check if we must keep the FSM if (!isKeepFiles() && !fsm.delete()) { LOGGER.warn("Could not delete file: " + fsm.getAbsolutePath()); } } } }
{ "content_hash": "d34e83f98cca7a9f3937e48fa182eb69", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 177, "avg_line_length": 36.3, "alnum_prop": 0.5892075838599903, "repo_name": "LearnLib/automatalib", "id": "31d5e46b030621cf0f16bf415daa54a7f7797493", "size": "6837", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "modelchecking/ltsmin/src/main/java/net/automatalib/modelcheckers/ltsmin/monitor/AbstractLTSminMonitorMealy.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "1128" }, { "name": "Java", "bytes": "3407389" }, { "name": "Makefile", "bytes": "250" }, { "name": "Python", "bytes": "1499" }, { "name": "Shell", "bytes": "6808" } ], "symlink_target": "" }
using std::string; using std::cout; using std::endl; using std::random_device; using std::default_random_engine; using std::uniform_int_distribution; using std::chrono::milliseconds; using std::thread; using std::this_thread::sleep_for; using std::mutex; using std::lock_guard; using std::unique_lock; using std::condition_variable; namespace RW { int data = 0; mutex LRm; condition_variable LR; int read_count = 0; mutex LW; } void DoSomethingElse() { static default_random_engine rnd((random_device())()); uniform_int_distribution<> wait_time(0, 1000); sleep_for(milliseconds(wait_time(rnd))); } // @include // LR and LW are variables in the RW namespace. // They serve as read and write locks. The integer // variable read_count in RW tracks the number of readers. void Reader(string name) { while (true) { { lock_guard<mutex> lock(RW::LRm); ++RW::read_count; } // @exclude cout << "Reader " << name << " is about to read" << endl; // @include cout << RW::data << endl; { lock_guard<mutex> lock(RW::LRm); --RW::read_count; RW::LR.notify_one(); } DoSomethingElse(); } } void Writer(string name) { while (true) { { lock_guard<mutex> lock_w(RW::LW); bool done = false; while (!done) { unique_lock<mutex> lock(RW::LRm); if (RW::read_count == 0) { // @exclude cout << "Writer " << name << " is about to write" << endl; // @include ++RW::data; done = true; } else { // use wait/notify to avoid busy waiting while (RW::read_count != 0) { RW::LR.wait(lock); } } } } DoSomethingElse(); } } // @exclude int main(int argc, char* argv[]) { thread r0(Reader, "r0"); thread r1(Reader, "r1"); thread w0(Writer, "w0"); thread w1(Writer, "w1"); sleep_for(milliseconds(10000)); return 0; }
{ "content_hash": "428694106dd1f4b2bb493fa439a1a3c2", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 68, "avg_line_length": 22.729411764705883, "alnum_prop": 0.5848861283643892, "repo_name": "adnanaziz/epicode", "id": "f608951af166bc6e57a2fc1a0ace91b09eb60596", "size": "2152", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cpp/rw.cc", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "4173" }, { "name": "C++", "bytes": "1067786" }, { "name": "CMake", "bytes": "24990" }, { "name": "Java", "bytes": "1006880" }, { "name": "Makefile", "bytes": "531" }, { "name": "Python", "bytes": "8685" }, { "name": "Shell", "bytes": "3357" }, { "name": "TeX", "bytes": "299325" } ], "symlink_target": "" }
package at.ac.tuwien.dsg.hcu.common.model; public class Functionality { protected String name; public Functionality(String name) { super(); this.name = name; } public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((name == null) ? 0 : name.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Functionality other = (Functionality) obj; if (name == null) { if (other.name != null) return false; } else if (!name.equals(other.name)) return false; return true; } @Override public String toString() { return name; } }
{ "content_hash": "6220e4e8a78f9ed21e749ca27bc20f60", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 73, "avg_line_length": 21.807692307692307, "alnum_prop": 0.48677248677248675, "repo_name": "tuwiendsg/RAHYMS", "id": "a6c205d4c6a1f88a4db6330365f5c1739e791803", "size": "1134", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hcu/hcu-common/src/main/java/at/ac/tuwien/dsg/hcu/common/model/Functionality.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "52112" }, { "name": "HTML", "bytes": "22914" }, { "name": "Java", "bytes": "4239740" }, { "name": "JavaScript", "bytes": "266497" } ], "symlink_target": "" }
var gulp = require('gulp'); var concat = require('gulp-concat'); var uglify = require('gulp-uglify'); var notify = require('gulp-notify'); var browserSync = require('browser-sync').create(); gulp.task('admin', function() { return gulp.src('js/admin/*.js') .pipe(concat('jetstash-admin.js')) .pipe(uglify()) .pipe(gulp.dest('js')) .pipe(notify({ message: "Completed minifying admin javascript"})); }); gulp.task('app', function() { return gulp.src('js/app/*.js') .pipe(concat('jetstash-app.js')) .pipe(uglify()) .pipe(gulp.dest('js')) .pipe(notify({ message: "Completed minifying app javascript"})); }); gulp.task('default', ['admin', 'app']);
{ "content_hash": "4950a52ec1edd984f8ec38c132697821", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 70, "avg_line_length": 30.608695652173914, "alnum_prop": 0.6150568181818182, "repo_name": "shampine/patrickshampine.com", "id": "809c883c5811e112145100e1caecbd1d0c49dd9a", "size": "704", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "public/wp-content/plugins/jetstash-connect/gulpfile.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2084264" }, { "name": "HTML", "bytes": "6600" }, { "name": "JavaScript", "bytes": "8066890" }, { "name": "PHP", "bytes": "17749147" }, { "name": "Python", "bytes": "3069" }, { "name": "Ruby", "bytes": "823" }, { "name": "Shell", "bytes": "4773" }, { "name": "XSLT", "bytes": "4267" } ], "symlink_target": "" }
package org.xlcloud.iam; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Produces; import org.apache.log4j.Logger; import org.xlcloud.iam.performance.IamPerformanceMonitorFilter; import com.sun.jersey.api.client.Client; /** * Creates Jersey client instances tailored to OpenAM communication needs. * * @author Krzysztof Szafrański, AMG.net */ @ApplicationScoped public class IamRestClientProducer { private static final Logger LOG = Logger.getLogger(IamRestClientProducer.class); private final Client client; public IamRestClientProducer() { client = new Client(); client.addFilter(new IamPerformanceMonitorFilter()); LOG.debug("Created Jersey client " + client.hashCode()); } @Produces @IamRestClient public Client createRestClient() { LOG.debug("Produced Jersey client " + client.hashCode()); return client; } }
{ "content_hash": "8ed55ccb5e2fccf5dca3f507c9233eb7", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 84, "avg_line_length": 25.675675675675677, "alnum_prop": 0.716842105263158, "repo_name": "ow2-xlcloud/xlcloud", "id": "8200db1694da4eb677845d41ac9ee650445d346f", "size": "1565", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "framework/iam-utils/src/main/java/org/xlcloud/iam/IamRestClientProducer.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "8901" }, { "name": "Java", "bytes": "255659" }, { "name": "Ruby", "bytes": "3022" }, { "name": "Shell", "bytes": "28649" } ], "symlink_target": "" }
<!doctype html> <head> <meta charset="utf-8"> <title>PhilSHORE Gallery Item - 34</title> <!-- Fonts --> <link href='https://fonts.googleapis.com/css?family=EB+Garamond|Raleway:400,400italic,500,500italic,700,300|Josefin+Sans:400,600,400italic,700|Quicksand:400,700|Lora:400,700,400italic,700italic' rel='stylesheet' type='text/css'> <!-- endFonts --> <!-- bower:css --> <link rel="stylesheet" href="../../bower_components/bootstrap/dist/css/bootstrap.min.css?h=8eb176c7" /> <!-- <link rel="stylesheet" href="../../bower_components/videojs/dist/video-js/video-js.css?h=2c7e886e" /> <link rel="stylesheet" href="../../bower_components/flat-ui/dist/css/flat-ui.css?h=288987af" /> --> <link rel="stylesheet" href="../../bower_components/components-font-awesome/css/font-awesome.css?h=b4886004" /> <!-- endbower--> <link rel="stylesheet" href="../../static/css/style.css?h=8bc699d1"> </head> <!-- bower:js --> <script src="../../bower_components/jquery/dist/jquery.js?h=09b2fbec"></script> <script src="../../bower_components/bootstrap/dist/js/bootstrap.js?h=8cf4186c"></script> <!-- <script src="../../bower_components/videojs/dist/video-js/video.js?h=89562bc3"></script> <script src="../../bower_components/flat-ui/dist/js/flat-ui.js?h=8db8b80c"></script> --> <!-- endbower --> <!--livereload--> <script src="//localhost:35729/livereload.js"></script> <!--endlivereload--> <body> <header> <nav id="wiki_nav" class="navbar navbar-default navbar-fixed-top" role="navigation"> <div class="container-fluid"> <!-- Brand and toggle get grouped for better mobile display --> <div class="navbar-header"> <button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#wiki_nav_div"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a class="navbar-brand" href="../../"> <img src=../../images/header/philshore-logo.png alt="PhilSHORE"> </a> </div> <!-- Collect the nav links, forms, and other content for toggling --> <div class="collapse navbar-collapse" id="wiki_nav_div"> <ul class="nav navbar-nav navbar-right"> <li><a href="../../">HOME</a></li> <li class="dropdown" > <a class="dropdown-toggle" data-toggle="dropdown" href="/">ABOUT <span class="caret"></span></a> <ul class="dropdown-menu"> <li><a href="../../about/">ABOUT PHILSHORE</a></li> <li><a href="../../component/">PROJECT COMPONENTS</a></li> <li><a href="../../team/">TEAM</a></li> </ul> </li> <li><a href="../../philshore-tool/">MSP TOOL</a></li> <li class="active"><a href="../../gallery/">GALLERY</a></li> <li><a href="../../faq/">FAQS</a></li> </ul> </div> <!-- /.navbar-collapse --> </div> <!-- /.container --> </nav> </header> <div> <!--Header Block--> <div id="intro" class="row" style="background-image: url('https://raw.githubusercontent.com/philshore/cdn/master/images/header/about.jpg');background-position: center;" > <h4>MARINE SPATIAL PLANNING TOOL</h4> <h1>MSP TOOL</h1> <p>WebGIS Platform for tidal in-stream current energy development</p> </div> <!--endIntroBlock--> <!--Body Block--> <div class="row page_gallery_item" > <div class="col-md-12"> <img src=../../gallery/34/img_20160307_101838.jpg> </div> <div class="col-md-12"> </div> </div> <!--endBodyBlock--> <!--Footer Block--> <footer class="row" id="footer"> <div class="col-md-4"> <p id="footer_copy">&copy; Copyright 2016 by PhilSHORE</p> <p><i class="fa fa-phone"></i> 981-8500 local 3147</p> <p><i class="fa fa-envelope"></i> philshoreteam@gmail.com</p> </div> <div class="col-md-8"> <img id ="footer_logos" src=../../about_logos_ds.png> </div> </footer> <!--endFooterBlock--> </div> </body>
{ "content_hash": "0776d51befb9f3e29891a52a07b0d59c", "timestamp": "", "source": "github", "line_count": 146, "max_line_length": 228, "avg_line_length": 31.171232876712327, "alnum_prop": 0.5319709953856295, "repo_name": "philshore/philshore.github.io", "id": "706f913fa12d1a5f2b9eb3a8b12842cbe545280b", "size": "4551", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gallery/34/index.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "69802" }, { "name": "HTML", "bytes": "546581" }, { "name": "JavaScript", "bytes": "695145" } ], "symlink_target": "" }
package com.jme3.system.lwjgl; import com.jme3.input.lwjgl.GlfwJoystickInput; import com.jme3.input.lwjgl.GlfwKeyInputVR; import com.jme3.input.lwjgl.GlfwMouseInputVR; import com.jme3.renderer.Renderer; import com.jme3.renderer.RendererException; import com.jme3.renderer.lwjgl.LwjglGL; import com.jme3.renderer.lwjgl.LwjglGLExt; import com.jme3.renderer.lwjgl.LwjglGLFboEXT; import com.jme3.renderer.lwjgl.LwjglGLFboGL3; import com.jme3.renderer.opengl.*; import com.jme3.system.*; import org.lwjgl.glfw.GLFW; import org.lwjgl.opengl.ARBDebugOutput; import org.lwjgl.opengl.ARBFramebufferObject; import org.lwjgl.opengl.EXTFramebufferMultisample; import org.lwjgl.opengl.GLCapabilities; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; import static org.lwjgl.opengl.GL.createCapabilities; import static org.lwjgl.opengl.GL11.glGetInteger; /** * A VR oriented LWJGL implementation of a graphics context. * @author reden - phr00t - https://github.com/phr00t * @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a> */ public abstract class LwjglContextVR implements JmeContext { private static final Logger logger = Logger.getLogger(LwjglContextVR.class.getName()); protected static final String THREAD_NAME = "jME3 Main"; protected AtomicBoolean created = new AtomicBoolean(false); protected AtomicBoolean renderable = new AtomicBoolean(false); protected final Object createdLock = new Object(); protected AppSettings settings = new AppSettings(true); protected Renderer renderer; protected GlfwKeyInputVR keyInput; protected GlfwMouseInputVR mouseInput; protected GlfwJoystickInput joyInput; protected Timer timer; protected SystemListener listener; @Override public void setSystemListener(SystemListener listener) { this.listener = listener; } protected void printContextInitInfo() { logger.log(Level.INFO, "LWJGL {0} context running on thread {1}\n" + " * Graphics Adapter: GLFW {2}", new Object[]{Integer.toString(org.lwjgl.Version.VERSION_MAJOR), Thread.currentThread().getName(), GLFW.glfwGetVersionString()}); } protected int determineMaxSamples() { // If we already have a valid context, determine samples using current context. logger.log(Level.SEVERE, "glfwExtensionSupported(\"GL_ARB_framebuffer_object\"): "+GLFW.glfwExtensionSupported("GL_ARB_framebuffer_object")); logger.log(Level.SEVERE, "glfwExtensionSupported(\"GL_EXT_framebuffer_multisample\"): "+GLFW.glfwExtensionSupported("GL_ARB_framebuffer_object")); if (GLFW.glfwExtensionSupported("GL_ARB_framebuffer_object")) { return glGetInteger(ARBFramebufferObject.GL_MAX_SAMPLES); } else if (GLFW.glfwExtensionSupported("GL_EXT_framebuffer_multisample")) { return glGetInteger(EXTFramebufferMultisample.GL_MAX_SAMPLES_EXT); } return Integer.MAX_VALUE; } protected void loadNatives() { if (JmeSystem.isLowPermissions()) { return; } if ("LWJGL".equals(settings.getAudioRenderer())) { NativeLibraryLoader.loadNativeLibrary("openal-lwjgl3", true); } if (NativeLibraryLoader.isUsingNativeBullet()) { NativeLibraryLoader.loadNativeLibrary("bulletjme", true); } NativeLibraryLoader.loadNativeLibrary("glfw-lwjgl3", true); NativeLibraryLoader.loadNativeLibrary("jemalloc-lwjgl3", true); NativeLibraryLoader.loadNativeLibrary("lwjgl3", true); } /** * Check if the display is a retina display. * @return <code>true</code> if the display is a retina display and <code>false</code> otherwise. */ public boolean isRetinaDisplay() { return GLFW.glfwGetVersionString().contains("retina"); } protected int getNumSamplesToUse() { int samples = 0; if (settings.getSamples() > 1) { samples = settings.getSamples(); final int supportedSamples = determineMaxSamples(); if (supportedSamples < samples) { logger.log(Level.WARNING, "Couldn't satisfy antialiasing samples requirement: x{0}. " + "Video hardware only supports: x{1}", new Object[]{samples, supportedSamples}); samples = supportedSamples; } } return samples; } protected void initContextFirstTime() { final GLCapabilities capabilities = createCapabilities(settings.getRenderer().equals(AppSettings.LWJGL_OPENGL32)); if (!capabilities.OpenGL20) { throw new RendererException("OpenGL 2.0 or higher is required for jMonkeyEngine"); } if (settings.getRenderer().equals(AppSettings.LWJGL_OPENGL2) || settings.getRenderer().equals(AppSettings.LWJGL_OPENGL32)) { GL gl = new LwjglGL(); GLExt glext = new LwjglGLExt(); GLFbo glfbo; if (capabilities.OpenGL30) { glfbo = new LwjglGLFboGL3(); } else { glfbo = new LwjglGLFboEXT(); } if (settings.isGraphicsDebug()) { gl = (GL) GLDebug.createProxy(gl, gl, GL.class, GL2.class, GL3.class, GL4.class); glext = (GLExt) GLDebug.createProxy(gl, glext, GLExt.class); glfbo = (GLFbo) GLDebug.createProxy(gl, glfbo, GLFbo.class); } if (settings.isGraphicsTiming()) { GLTimingState timingState = new GLTimingState(); gl = (GL) GLTiming.createGLTiming(gl, timingState, GL.class, GL2.class, GL3.class, GL4.class); glext = (GLExt) GLTiming.createGLTiming(glext, timingState, GLExt.class); glfbo = (GLFbo) GLTiming.createGLTiming(glfbo, timingState, GLFbo.class); } if (settings.isGraphicsTrace()) { gl = (GL) GLTracer.createDesktopGlTracer(gl, GL.class, GL2.class, GL3.class, GL4.class); glext = (GLExt) GLTracer.createDesktopGlTracer(glext, GLExt.class); glfbo = (GLFbo) GLTracer.createDesktopGlTracer(glfbo, GLFbo.class); } renderer = new GLRenderer(gl, glext, glfbo); renderer.initialize(); } else { throw new UnsupportedOperationException("Unsupported renderer: " + settings.getRenderer()); } if (capabilities.GL_ARB_debug_output && settings.isGraphicsDebug()) { ARBDebugOutput.glDebugMessageCallbackARB(new LwjglGLDebugOutputHandler(), 0); } renderer.setMainFrameBufferSrgb(settings.isGammaCorrection()); renderer.setLinearizeSrgbImages(settings.isGammaCorrection()); // Init input if (keyInput != null) { keyInput.initialize(); } if (mouseInput != null) { mouseInput.initialize(); } if (joyInput != null) { joyInput.initialize(); } renderable.set(true); } /** * Context internal destroy. */ public void internalDestroy() { renderer = null; timer = null; renderable.set(false); synchronized (createdLock) { created.set(false); createdLock.notifyAll(); } } /** * Context internal create. */ public void internalCreate() { synchronized (createdLock) { created.set(true); createdLock.notifyAll(); } initContextFirstTime(); } /** * Create the context. */ public void create() { create(false); } /** * Destroy the context. */ public void destroy() { destroy(false); } /** * * @param createdVal */ protected void waitFor(boolean createdVal) { synchronized (createdLock) { while (created.get() != createdVal) { try { createdLock.wait(); } catch (InterruptedException ignored) { } } } } @Override public boolean isCreated() { return created.get(); } @Override public boolean isRenderable() { return renderable.get(); } @Override public void setSettings(AppSettings settings) { this.settings.copyFrom(settings); } @Override public AppSettings getSettings() { return settings; } @Override public Renderer getRenderer() { return renderer; } @Override public Timer getTimer() { return timer; } }
{ "content_hash": "7e90c5b49800a814c0c76efc026b6ef6", "timestamp": "", "source": "github", "line_count": 270, "max_line_length": 154, "avg_line_length": 33.69259259259259, "alnum_prop": 0.6083324172804221, "repo_name": "zzuegg/jmonkeyengine", "id": "c32f5607922b784c2ff5ad7c61c9a6f319231165", "size": "10704", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "jme3-vr/src/main/java/com/jme3/system/lwjgl/LwjglContextVR.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "44189" }, { "name": "GLSL", "bytes": "300855" }, { "name": "Groovy", "bytes": "3202" }, { "name": "HTML", "bytes": "26583" }, { "name": "Java", "bytes": "13917178" }, { "name": "JavaScript", "bytes": "42" }, { "name": "Makefile", "bytes": "8850" } ], "symlink_target": "" }
import { Pipe, PipeTransform } from '@angular/core'; @Pipe({ name: 'titlecase', pure: true }) export class TitleCasePipe implements PipeTransform { transform(input: string): string { return input.length === 0 ? '' : input.replace(/\w\S*/g, (txt) => txt[0].toUpperCase() + txt.substr(1).toLowerCase()); } }
{ "content_hash": "06f098201054e73399671796455894a3", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 93, "avg_line_length": 32.7, "alnum_prop": 0.636085626911315, "repo_name": "thymikee/jest-preset-angular", "id": "0ac6bd14d0d1d10c16e498e9a3d99663dd2139da", "size": "327", "binary": false, "copies": "5", "ref": "refs/heads/main", "path": "examples/example-app-v15/src/app/shared/title-case.pipe.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5403" }, { "name": "HTML", "bytes": "171" }, { "name": "JavaScript", "bytes": "22487" }, { "name": "SCSS", "bytes": "54" }, { "name": "Shell", "bytes": "836" }, { "name": "TypeScript", "bytes": "80734" } ], "symlink_target": "" }
from pyVmomi import vim import nested_vsphere.utils as utils import re import random trailer = ">>>>>>> " class VESX(object): def __init__(self, vm_name, host, datastore, vmfolder, network, mgmt_network, iso, mem, vcpu, si, guestid='vmkernel6Guest', vmx_version='vmx-11', disk_size=8): self.vm_name = vm_name self.vm_mo = None self.si = si self.guestid = guestid self.vmx_version = vmx_version self.disk_size = disk_size content = self.si.content try: self.host = utils.get_obj(content, [vim.HostSystem], host) except ValueError: raise try: self.datastore = utils.validate_datastore(content, datastore, self.host) except ValueError: raise try: self.network = utils.validate_network(content, network, self.host) self.mgmt_network = utils.validate_network(content, mgmt_network, self.host) except ValueError: raise try: self.vmfolder = utils.get_obj(si.content, [vim.Folder], vmfolder) except ValueError: raise try: iso_split = iso.split('/') iso_filename = iso_split.pop() j = '/' top_lv = iso_split[0].split().pop() iso_datastore = re.sub('\[|\]', '', iso_split.pop(0).split()[0]) iso_folder = j.join([top_lv] + iso_split) except IndexError, e: print e iso_split = iso.split() iso_filename = iso_split.pop() iso_datastore = re.sub('\[|\]', '', iso_split[0]) iso_folder = '' print trailer + "Validating Datastore for VM " + self.vm_name res = utils.validate_datastore_file(utils.get_obj(self.si.content, [vim.Datastore], iso_datastore), iso_folder, iso_filename) if res.info.state == 'error': raise ValueError(res.info.error.msg) else: self.iso = iso self.mem = mem self.vcpu = vcpu datastore_path = '[' + self.datastore.name + '] ' + self.vm_name add = vim.vm.device.VirtualDeviceSpec.Operation.add create = vim.vm.device.VirtualDeviceSpec.FileOperation.create if 'dvportgroup' in str(self.network): net_spec = utils.create_vds_net_spec(self.network) print trailer + "VDS Portgroup found for VM Network" else: net_spec = utils.create_vss_net_spec(self.network) print trailer + "VSS Portgroup found for VM Network" if 'dvportgroup' in str(self.mgmt_network): net_mgmt_spec = utils.create_vds_net_spec(self.mgmt_network) print trailer + "VDS Portgroup found for Management Network" else: net_mgmt_spec = utils.create_vss_net_spec(self.mgmt_network) print trailer + "VSS Portgroup found for Management Network" print trailer + "Creating disk controller" noSharing = vim.vm.device.VirtualSCSIController.Sharing.noSharing disk_ctrl = vim.vm.device.VirtualLsiLogicSASController(busNumber=0, sharedBus=noSharing) ctrl_spec = vim.vm.device.VirtualDeviceSpec(device=disk_ctrl, operation=add) con_info = vim.vm.device.VirtualDevice.ConnectInfo(startConnected=True, allowGuestControl=True) cdrom_backing_info = vim.vm.device.VirtualCdrom.IsoBackingInfo(fileName=self.iso) cdrom = vim.vm.device.VirtualCdrom(backing=cdrom_backing_info, connectable=con_info, controllerKey=201) cdrom_spec = vim.vm.device.VirtualDeviceSpec(device=cdrom, operation=add) print trailer + "Adding Hard Drive" vdisk_backing_info = vim.vm.device.VirtualDisk.FlatVer2BackingInfo(thinProvisioned=True, diskMode='persistent', fileName=datastore_path + r'/' + vm_name + '.vmdk') vdisk = vim.vm.device.VirtualDisk(unitNumber=0, capacityInKB=self.disk_size*1024*1024, controllerKey=disk_ctrl.key, backing=vdisk_backing_info) vdisk_spec = vim.vm.device.VirtualDeviceSpec(device=vdisk, operation=add, fileOperation=create) vnc_rand_port = random.randint(5900, 6300) self.vnc_port = vnc_rand_port print trailer + "Enabling VNC on port " + str(vnc_rand_port) vnc_enabled = vim.option.OptionValue(key='RemoteDisplay.vnc.enabled', value='true') vnc_port = vim.option.OptionValue(key='RemoteDisplay.vnc.port', value=str(vnc_rand_port)) vmx_file = vim.vm.FileInfo(vmPathName='[' + datastore + '] ' + vm_name + '/' + vm_name + '.vmx') self.config = vim.vm.ConfigSpec(name=self.vm_name, memoryMB=self.mem, numCPUs=self.vcpu, files=vmx_file, guestId=self.guestid, version=self.vmx_version, nestedHVEnabled=True, extraConfig=[vnc_enabled, vnc_port], deviceChange=[net_mgmt_spec, net_spec, net_spec, net_spec, ctrl_spec, cdrom_spec, vdisk_spec]) def deploy_vm_task(self): if 'Cluster' in str(self.host.parent): print trailer + "Creating Virtual Machine" task = self.vmfolder.CreateVM_Task(config=self.config, pool=self.host.parent.resourcePool, host=self.host) while task.info.state != 'error' and task.info.state != 'success': pass return task else: print trailer + "Creating Virtual Machine" task = self.vmfolder.CreateVM_Task(config=self.config, pool=self.host.resourcePool) while task.info.state != 'error' and task.info.state != 'success': pass return task def boot(self): self.vm_mo = utils.get_obj(self.si.content, [vim.VirtualMachine], self.vm_name) task = self.vm_mo.PowerOnVM_Task() print trailer + "Booting Virtual Machine" while task.info.state != 'error' and task.info.state != 'success': pass return task def delete(self): pass
{ "content_hash": "a06a4fbf29e61efe43b9c6e34b40e40e", "timestamp": "", "source": "github", "line_count": 133, "max_line_length": 119, "avg_line_length": 47.75187969924812, "alnum_prop": 0.5800661313179027, "repo_name": "vfiftyfive/nested_vsphere", "id": "bf1a6166d219b8c8c1b448d94f597dfae28e8599", "size": "6351", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vesx.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "13641" } ], "symlink_target": "" }
namespace SquabPie.Mono.Cecil { public sealed class LinkedResource : Resource { internal byte [] hash; string file; public byte [] Hash { get { return hash; } } public string File { get { return file; } set { file = value; } } public override ResourceType ResourceType { get { return ResourceType.Linked; } } public LinkedResource (string name, ManifestResourceAttributes flags) : base (name, flags) { } public LinkedResource (string name, ManifestResourceAttributes flags, string file) : base (name, flags) { this.file = file; } } }
{ "content_hash": "b21a270d4c2e78999b8e9a2792913e05", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 84, "avg_line_length": 18.5, "alnum_prop": 0.6655405405405406, "repo_name": "ttRevan/cecil", "id": "5e12a399b73b8aa009fe7ee10a5abfa65df75dd6", "size": "769", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Mono.Cecil/LinkedResource.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1238218" } ], "symlink_target": "" }
<?php /** * Provides the navigation editing capabilities for admins to add pages * and reorganize them in the site tree. */ if (! User::require_admin ()) { $this->redirect ('/admin'); } $page->title = i18n_get ('Navigation'); $page->layout = 'admin'; $page->add_script ('<script src="/apps/navigation/js/jquery.jstree.js"></script>'); // get ids already in tree to skip $nav = new Navigation; $ids = $nav->get_all_ids (); // build other page list require_once ('apps/navigation/lib/Functions.php'); $pages = navigation_get_other_pages ($ids); echo $tpl->render ('navigation/admin', array ( 'pages' => $pages )); ?>
{ "content_hash": "492b9471f4f841d5f3c62e45858eee75", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 83, "avg_line_length": 22.357142857142858, "alnum_prop": 0.6613418530351438, "repo_name": "jbroadway/elefant-quickstart", "id": "292a27a215f34cbf7c636ca28dc17803b06b81f5", "size": "626", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "apps/navigation/handlers/admin.php", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "2266" }, { "name": "JavaScript", "bytes": "691639" }, { "name": "PHP", "bytes": "1199471" } ], "symlink_target": "" }
<?php function move_crossselling() { if (Db::getInstance()->ExecuteS('SELECT FROM `'._DB_PREFIX_.'module` WHERE `name` = \'crossselling\'')) { Db::getInstance()->Execute(' INSERT INTO `'._DB_PREFIX_.'hook_module` (`id_module`, `id_hook`, `position`) VALUES ((SELECT `id_module` FROM `'._DB_PREFIX_.'module` WHERE `name` = \'crossselling\'), 9, (SELECT max_position FROM (SELECT MAX(position)+1 as max_position FROM `'._DB_PREFIX_.'hook_module` WHERE `id_hook` = 9) tmp))'); } }
{ "content_hash": "31bf884b8dfd11d8d6646177c4d70ba2", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 223, "avg_line_length": 34.42857142857143, "alnum_prop": 0.6514522821576764, "repo_name": "j1v3/lakombi", "id": "44aefb5f9d3ebde8ee67e0c37d799af6b7b411c6", "size": "482", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/LaKombi/ShopBundle/install/php/move_crossselling.php", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1927410" }, { "name": "PHP", "bytes": "32090445" } ], "symlink_target": "" }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package ogldevtutorials.util; import jglm.Quat; import jglm.Vec3; /** * * @author gbarbieri */ public class ViewData { private Vec3 targetPos; private Quat orient; private float radius; public ViewData(Vec3 targetPos, Quat orient, float radius) { this.targetPos = targetPos; this.orient = orient; this.radius = radius; } public void reset(){ targetPos = new Vec3(0, 0, 0); orient = new Quat(0, 0, 0, 1); } // public void save() public Vec3 getTargetPos() { return targetPos; } public Quat getOrient() { return orient; } public float getRadius() { return radius; } public void setOrient(Quat orient) { this.orient = orient; } public void setRadius(float radius) { this.radius = radius; } public void setTargetPos(Vec3 targetPos) { this.targetPos = targetPos; } }
{ "content_hash": "5d3e330d9b2ab1eba5739388081e7e15", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 64, "avg_line_length": 19.25423728813559, "alnum_prop": 0.5519366197183099, "repo_name": "elect86/oglDevTutorials", "id": "2b8185707eab3d6cd2a00ad9b7441776f5fe2ecb", "size": "1136", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "oglDevTutorials/src/ogldevtutorials/util/ViewData.java", "mode": "33188", "license": "mit", "language": [ { "name": "GLSL", "bytes": "49944" }, { "name": "Java", "bytes": "237450" } ], "symlink_target": "" }
@implementation KNTLiteralsRule + (NSArray*)validateSource:(NSString *)source filename:(NSString *)filename { static NSRegularExpression * number, * array, * dictionary; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ number = RX(@"(NSNumber number|NSNumber alloc] init)With"); array = RX(@"(NSArray array|NSArray alloc] init)(With|])"); dictionary = RX(@"(NSDictionary dictionary|NSDictionary alloc] init)(]|WithObjects)"); }); return [KNTRule validateLines:source rule:^NSString *(NSString *line, NSUInteger lineNumber, BOOL *stop) { if ([number hasAnyMatches:line]) { return @"Use @() literal for creating NSNumber objects"; } if ([array hasAnyMatches:line]) { return @"Use @[] literal for creating NSArray objects"; } if ([dictionary hasAnyMatches:line]) { return @"Use @{:} literal for creating NSDictionary objects"; } return nil; }]; } @end
{ "content_hash": "3f085f461d6e5d7a232ea1b07f47d5e2", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 110, "avg_line_length": 37.592592592592595, "alnum_prop": 0.6246305418719211, "repo_name": "programmingthomas/Knyt", "id": "ae8696777ba79b0514c4676d672a1db4395b9590", "size": "1660", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Knyt/Source code validation/Rules/Literals/KNTLiteralsRule.m", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Objective-C", "bytes": "65525" } ], "symlink_target": "" }
package org.jmotor.sbt.service import org.apache.maven.artifact.versioning.{ArtifactVersion, DefaultArtifactVersion} import org.jmotor.artifact.Versions import org.jmotor.artifact.exception.ArtifactNotFoundException import org.jmotor.artifact.metadata.MetadataLoader import org.jmotor.artifact.metadata.loader.{IvyPatternsMetadataLoader, MavenRepoMetadataLoader, MavenSearchMetadataLoader} import org.jmotor.sbt.dto.{ModuleStatus, Status} import org.jmotor.sbt.exception.MultiException import org.jmotor.sbt.metadata.MetadataLoaderGroup import sbt.Credentials import sbt.librarymanagement.{MavenRepository, ModuleID, Resolver, URLRepository} import sbt.util.Logger import java.net.URL import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.util.control.NonFatal import scala.util.{Failure, Success, Try} /** * Component: Description: Date: 2018/2/9 * * @author * AI */ class VersionServiceImpl( logger: Logger, scalaVersion: String, scalaBinaryVersion: String, resolvers: Seq[Resolver], credentials: Seq[Credentials] ) extends VersionService { private[this] lazy val groups = getLoaderGroups(resolvers, credentials) override def checkForUpdates(module: ModuleID): Future[ModuleStatus] = check(module) override def checkPluginForUpdates( module: ModuleID, sbtBinaryVersion: String, sbtScalaBinaryVersion: String ): Future[ModuleStatus] = check(module, Option(sbtBinaryVersion -> sbtScalaBinaryVersion)) private[this] def check(module: ModuleID, sbtSettings: Option[(String, String)] = None): Future[ModuleStatus] = { val mv = new DefaultArtifactVersion(module.revision) val released = Versions.isReleaseVersion(mv) val qualifierOpt = if (released && Option(mv.getQualifier).isDefined) Option(mv.getQualifier) else None groups.foldLeft(Future.successful(Seq.empty[String] -> Option.empty[ModuleStatus])) { (future, group) => future.flatMap { case (_, opt@Some(_)) => Future.successful(Seq.empty[String] -> opt) case (errors, _) => group.getVersions(module, sbtSettings).map { case Nil => errors -> None case versions => val (max: ArtifactVersion, status: Status.Value) = getModuleStatus(mv, released, qualifierOpt, versions) Seq.empty[String] -> Option(ModuleStatus(module, status, max.toString)) } recover { case NonFatal(_: ArtifactNotFoundException) => errors -> None case NonFatal(t: MultiException) => (errors ++ t.getMessages) -> None case NonFatal(t) => (errors :+ t.getLocalizedMessage) -> None } } } map { case (_, Some(status)) => status case (errors, _) if errors.nonEmpty => ModuleStatus(module, Status.Error, errors) case _ => ModuleStatus(module, Status.NotFound) } } private def getModuleStatus( mv: DefaultArtifactVersion, released: Boolean, qualifierOpt: Option[String], versions: Seq[ArtifactVersion] ) = { val releases = versions.filter(Versions.isReleaseVersion) val matches = qualifierOpt match { case None => releases.filter { av => Option(av.getQualifier) match { case None => true case Some(qualifier) => !Versions.isJreQualifier(qualifier) } } case Some(q) => releases.filter(av => Option(av.getQualifier).isDefined && q == av.getQualifier) } val max = matches.max val status = if (!released) { Status.Unreleased } else { mv.compareTo(max) match { case 0 | 1 => Status.Success case _ => Status.Expired } } (max, status) } private[this] def getLoaderGroups( resolvers: Seq[Resolver], credentials: Seq[Credentials] ): Seq[MetadataLoaderGroup] = { val loaders: Seq[MetadataLoader] = resolvers.map { case repo: MavenRepository => val url = repo.root if (isRemote(url)) { Option(new MavenRepoMetadataLoader(url, getCredentials(url, credentials))) } else { None } case repo: URLRepository => val patterns = repo.patterns.ivyPatterns if (patterns.forall(isRemote)) { Option(new IvyPatternsMetadataLoader(patterns, getCredentials(patterns.head, credentials))) } else { None } case _ => None } collect { case Some(loader) => loader } val mavenSearchMaxRows = 100 Seq( MetadataLoaderGroup(scalaVersion, scalaBinaryVersion, loaders: _*), MetadataLoaderGroup(scalaVersion, scalaBinaryVersion, MavenSearchMetadataLoader(mavenSearchMaxRows)) ) } private[this] def getCredentials(url: String, credentials: Seq[Credentials]): Option[String] = { val host = new URL(url).getHost Try { Credentials.forHost(credentials, host).map { c => okhttp3.Credentials.basic(c.userName, c.passwd) } } match { case Success(r) => r case Failure(t) => logger.warn(t.getLocalizedMessage) None } } private[this] def isRemote(url: String): Boolean = { url.startsWith("http://") || url.startsWith("https://") } }
{ "content_hash": "11ae08d28fdb3fca598f890271d2e153", "timestamp": "", "source": "github", "line_count": 147, "max_line_length": 122, "avg_line_length": 38.80952380952381, "alnum_prop": 0.6141980718667835, "repo_name": "aiyanbo/sbt-dependency-updates", "id": "f02d16086b0b27cdfd861b190e1e329774d1a409", "size": "5705", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/scala/org/jmotor/sbt/service/VersionServiceImpl.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Scala", "bytes": "40387" } ], "symlink_target": "" }
package com.blstream.myhoard.biz.model; import com.blstream.myhoard.db.model.UserDS; import java.util.List; import java.util.Set; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.annotate.JsonSerialize; /** * * @author z0lfik */ public class UserDTO { private String id; @NotNull(message = "Adres e-mail jest wymagany") @Pattern(message = "Niepoprawny e-mail", regexp = "^[a-zA-Z0-9_-]+(\\.[a-zA-Z0-9_-]+)*@[a-zA-Z0-9]+(\\.[a-zA-Z0-9]+)+(\\.[a-zA-Z0-9]+)*$") private String email; @Pattern(regexp = "^[a-zA-Z0-9_]+", message = "Nazwa użytkownika może zawierać jedynie litery, cyfry oraz znak _") private String username; @NotNull(message = "Hasło jest wymagane") @Size(min = 4, message = "Hasło musi zawierać co najmniej 4 znaki") private String password; private String grantType; private String refreshToken; private boolean visible; @JsonIgnore private List<CollectionDTO> favourites; public UserDTO() { id = "0"; } public UserDTO(String id, String email, String username, String password, boolean visible) { this.id = id; this.email = email; this.username = username; this.password = password; this.visible = visible; } @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) public String getId() { return id; } @JsonIgnore public void setId(String id) { this.id = id; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } @JsonIgnore public String getPassword() { return password; } @JsonProperty("password") public void setPassword(String password) { this.password = password; } @JsonIgnore public String getGrantType() { return grantType; } @JsonProperty("grant_type") public void setGrantType(String grantType) { this.grantType = grantType; } @JsonIgnore public String getRefreshToken() { return refreshToken; } @JsonProperty("refresh_token") public void setRefreshToken(String refreshToken) { this.refreshToken = refreshToken; } public List<CollectionDTO> getFavourites() { return favourites; } public void setFavourites(List<CollectionDTO> favourites) { this.favourites = favourites; } public void updateObject(UserDTO object) { if (this == object || object == null) { return; } if (username == null || object.username != null && !username.equals(object.username)) { username = object.username; } if (email == null || object.email != null && !email.equals(object.email)) { email = object.email; } if (password == null || object.password != null && !password.equals(object.password)) { password = object.password; } } public UserDS toUserDS() { return new UserDS(Integer.parseInt(id), email, username, password, visible); } public boolean equals(Object obj) { if (obj == null || !(obj instanceof UserDTO)) return false; UserDTO user = (UserDTO)obj; return id.equals(user.id); } /** * @return the visible */ @JsonIgnore public boolean getVisible() { return visible; } /** * @param visible the visible to set */ @JsonProperty(value = "public") public void setVisible(boolean visible) { this.visible = visible; } }
{ "content_hash": "7589d37075bdb4aae8aa302ecf1d1cbc", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 118, "avg_line_length": 25.22151898734177, "alnum_prop": 0.6155583437892095, "repo_name": "blstream/myHoard_Java2", "id": "6a9b6d3f0c309926ae56a050505e7d56ef28bcd1", "size": "3991", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/blstream/myhoard/biz/model/UserDTO.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "173629" }, { "name": "SQL", "bytes": "15443" } ], "symlink_target": "" }
package com.cyc.model.templates.web; import com.cyc.model.templates.VelocityTemplateGroup; /** * * @author nwinant */ abstract public class WebTemplateGroup extends VelocityTemplateGroup { public WebTemplateGroup(String baseTargetDir, String baseTemplateDir) { super(baseTargetDir, baseTemplateDir); } public WebTemplateGroup() { this(WebTemplate.WEB_BASE_TARGET_DIR, WebTemplate.DEFAULT_WEB_TEMPLATE_DIR); } // Protected static protected String pluralize(String word) { if (word.endsWith("s")) { return word + "es"; } return word + "s"; } // Internal protected static class JSFile extends LocalFile { public JSFile(String dir, String filename) { super(dir, filename); } @Override public String getFileName() { return this.filename + ".js"; } } protected static class HTMLFile extends LocalFile { public HTMLFile(String dir, String filename) { super(dir, filename); } @Override public String getFileName() { return this.filename + ".html"; } } protected static class CSSFile extends LocalFile { public CSSFile(String dir, String filename) { super(dir, filename); } @Override public String getFileName() { return this.filename + ".css"; } } }
{ "content_hash": "e7054e4958af3a4de5f57a39bdf474ee", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 80, "avg_line_length": 24.73076923076923, "alnum_prop": 0.6757387247278382, "repo_name": "cycorp/model-generator-suite", "id": "ccc96ca70fa287c1da23fc6c0fb9b35555237eb2", "size": "1286", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "model-generator/src/main/java/com/cyc/model/templates/web/WebTemplateGroup.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "618" }, { "name": "Java", "bytes": "235173" } ], "symlink_target": "" }
package pivotal.au.se.gemfirexdweb.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import pivotal.au.se.gemfirexdweb.main.Result; import pivotal.au.se.gemfirexdweb.main.SqlFireException; import pivotal.au.se.gemfirexdweb.utils.AdminUtil; import pivotal.au.se.gemfirexdweb.utils.JDBCUtil; public class GemFireXDWebDAOUtil { static public Result runCommand (String command, String userKey) throws SqlFireException { Result res = new Result(); Connection conn = null; Statement stmt = null; res.setCommand(command); try { conn = AdminUtil.getConnection(userKey); stmt = conn.createStatement(); stmt.execute(command); // no need to commit it's auto commit already as it's DDL statement. res.setCommand(command); res.setMessage("SUCCESS"); } catch (SQLException se) { // we don't want to stop it running we just need the error res.setMessage(se.getMessage()); } catch (Exception ex) { throw new SqlFireException(ex); } finally { JDBCUtil.close(stmt); } return res; } static public Result runStoredCommand (String command, String userKey) throws SqlFireException { Result res = new Result(); Connection conn = null; PreparedStatement stmt = null; res.setCommand(command); try { conn = AdminUtil.getConnection(userKey); stmt = conn.prepareCall(command); stmt.execute(); // no need to commit it's auto commit already as it's DDL statement. res.setCommand(command); res.setMessage("SUCCESS"); } catch (SQLException se) { // we don't want to stop it running we just need the error res.setMessage(se.getMessage()); } catch (Exception ex) { throw new SqlFireException(ex); } finally { JDBCUtil.close(stmt); } return res; } static public List<String> getAllSchemas (String userKey) throws SqlFireException { List<String> schemas = new ArrayList<String>(); Connection conn = null; Statement stmt = null; ResultSet rset = null; String sql = "select schemaname from sys.sysschemas order by 1"; try { conn = AdminUtil.getConnection(userKey); stmt = conn.createStatement(); rset = stmt.executeQuery(sql); while (rset.next()) { schemas.add(rset.getString(1)); } } catch (Exception ex) { throw new SqlFireException(ex); } finally { JDBCUtil.close(stmt); } return schemas; } }
{ "content_hash": "2b5111ed2958548b79aaf291611c1903", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 97, "avg_line_length": 24.873949579831933, "alnum_prop": 0.5986486486486486, "repo_name": "papicella/snappy-store", "id": "e0cede1911d033fb488885f88eb49a2783f35f79", "size": "3634", "binary": false, "copies": "1", "ref": "refs/heads/snappy/master", "path": "Snappy-Web/src/main/java/pivotal/au/se/gemfirexdweb/dao/GemFireXDWebDAOUtil.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "AGS Script", "bytes": "90653" }, { "name": "Assembly", "bytes": "962433" }, { "name": "Batchfile", "bytes": "30248" }, { "name": "C", "bytes": "311620" }, { "name": "C#", "bytes": "1352292" }, { "name": "C++", "bytes": "2029050" }, { "name": "CSS", "bytes": "81939" }, { "name": "Gnuplot", "bytes": "3125" }, { "name": "HTML", "bytes": "8610280" }, { "name": "Java", "bytes": "119311633" }, { "name": "JavaScript", "bytes": "36789" }, { "name": "Makefile", "bytes": "18443" }, { "name": "Mathematica", "bytes": "92588" }, { "name": "Objective-C", "bytes": "1069" }, { "name": "PHP", "bytes": "581417" }, { "name": "PLSQL", "bytes": "86549" }, { "name": "PLpgSQL", "bytes": "33847" }, { "name": "Pascal", "bytes": "808" }, { "name": "Perl", "bytes": "196843" }, { "name": "Python", "bytes": "12796" }, { "name": "Ruby", "bytes": "1380" }, { "name": "SQLPL", "bytes": "219147" }, { "name": "Shell", "bytes": "547363" }, { "name": "SourcePawn", "bytes": "29958" }, { "name": "Thrift", "bytes": "31125" }, { "name": "XSLT", "bytes": "77109" } ], "symlink_target": "" }
import logging import re invite_pattern = re.compile(r"discord\.gg/\w{3}") tag_pattern = re.compile(r"add.*tag.*\d{4}") twitch_pattern = re.compile(r"twitch\.tv") twitter_pattern = re.compile(r"twitter\.com") debugging_pattern = re.compile(r"elmerbot_spam_name_debugging") name_patterns = [invite_pattern, tag_pattern, twitch_pattern, twitter_pattern, debugging_pattern] def check_name(name): """This aims to catch a few common types of spammers. Right now it includes: @discord.gg/abcdefg @pls add blahblah (tag) 1234 """ logger = logging.getLogger("elmerbot.namecheck") for pattern in name_patterns: if pattern.search(name): logger.warn(f'Found spammer with name "{name}" using pattern "{pattern.pattern}"') return True return False
{ "content_hash": "8523b50caa4c7678d899c3b4d42ba9f1", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 97, "avg_line_length": 33.458333333333336, "alnum_prop": 0.6874221668742216, "repo_name": "scott-hand/elmerbot", "id": "b441466f56eecfaa447749fd64261482d6c8e5aa", "size": "803", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "elmerbot/antispam.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "27490" } ], "symlink_target": "" }
package burai.atoms.visible; import burai.atoms.design.Design; import burai.com.env.Environments; import burai.com.font.FontTools; import javafx.geometry.Point3D; import javafx.scene.Group; import javafx.scene.paint.Color; import javafx.scene.paint.PhongMaterial; import javafx.scene.shape.Cylinder; import javafx.scene.text.Font; import javafx.scene.text.Text; import javafx.scene.transform.Affine; import javafx.scene.transform.Rotate; public class XYZAxis extends Group { private static final double CYLINDER_RADIUS = 0.03; private static final double CYLINDER_HEIGHT = 1.00; private static final double TEXT_SIZE = Environments.isLinux() ? 1.0 : 0.5; private static final double TEXT_SCALE = Environments.isLinux() ? 0.5 : 1.0; private static final String TEXT_FONT = FontTools.getRomanFont(); private Design design; public XYZAxis() { this(null); } public XYZAxis(Design design) { super(); this.design = design; this.creatAx(0, "X", Color.RED); this.creatAx(1, "Y", Color.BLUE); this.creatAx(2, "Z", Color.GREEN); } private void creatAx(int index, String label, Color color) { PhongMaterial material = new PhongMaterial(); material.setDiffuseColor(color); material.setSpecularColor(Color.SILVER); Cylinder cylinder = new Cylinder(CYLINDER_RADIUS, CYLINDER_HEIGHT); cylinder.setMaterial(material); Text text = new Text(label); text.setFont(Font.font(TEXT_FONT, TEXT_SIZE)); text.setTranslateX(-0.33 * TEXT_SIZE); text.setTranslateY(0.10 * TEXT_SIZE + CYLINDER_HEIGHT); text.setRotationAxis(Rotate.Z_AXIS); text.setRotate(180.0); text.setScaleX(TEXT_SCALE); text.setScaleY(TEXT_SCALE); if (this.design != null) { Color fontColor = this.design.getFontColor(); if (fontColor != null) { text.setFill(fontColor); } this.design.addOnFontColorChanged(fontColor_ -> { if (fontColor_ != null) { text.setFill(fontColor_); } }); } Group group = new Group(); group.getChildren().add(cylinder); group.getChildren().add(text); Affine affine = new Affine(); affine.prependRotation(180.0, Point3D.ZERO, Rotate.Y_AXIS); affine.prependTranslation(0.0, 0.5 * CYLINDER_HEIGHT, 0.0); if (index == 0) { affine.prependRotation(-90.0, Point3D.ZERO, Rotate.Z_AXIS); affine.prependRotation(-90.0, Point3D.ZERO, Rotate.X_AXIS); affine.prependRotation(45.0, Point3D.ZERO, Rotate.X_AXIS); } else if (index == 1) { affine.prependRotation(45.0, Point3D.ZERO, Rotate.Y_AXIS); } else if (index == 2) { affine.prependRotation(90.0, Point3D.ZERO, Rotate.X_AXIS); affine.prependRotation(90.0, Point3D.ZERO, Rotate.Z_AXIS); affine.prependRotation(45.0, Point3D.ZERO, Rotate.Z_AXIS); } group.getTransforms().add(affine); this.getChildren().add(group); } }
{ "content_hash": "23adab0afedd95eb07536297dedc1db4", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 80, "avg_line_length": 34.659574468085104, "alnum_prop": 0.6083486801718846, "repo_name": "BURAI-team/burai", "id": "839cafb8216a3ae47cd97cace96a5faba80174d6", "size": "3878", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/burai/atoms/visible/XYZAxis.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "35888" }, { "name": "Java", "bytes": "2874039" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.maven.plugins.pmd.its</groupId> <artifactId>mpmd-138-parent</artifactId> <version>1.0-SNAPSHOT</version> </parent> <artifactId>mpmd-138-mod-1</artifactId> <name>Module 1</name> <properties> <project.reporting.outputEncoding>ISO-8859-1</project.reporting.outputEncoding> </properties> </project>
{ "content_hash": "f2712391953662409211d437b1310c7f", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 204, "avg_line_length": 36.12820512820513, "alnum_prop": 0.751596877217885, "repo_name": "apache/maven-plugins", "id": "0704be59b4d24f562dcec101e08cc3f44bb64708", "size": "1409", "binary": false, "copies": "6", "ref": "refs/heads/trunk", "path": "maven-pmd-plugin/src/it/mpmd-138/mod-1/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "2714" }, { "name": "CSS", "bytes": "15875" }, { "name": "GAP", "bytes": "1429" }, { "name": "Groovy", "bytes": "347385" }, { "name": "HTML", "bytes": "87959" }, { "name": "Java", "bytes": "7686955" }, { "name": "JavaScript", "bytes": "4347" }, { "name": "Shell", "bytes": "58647" } ], "symlink_target": "" }
package org.apache.spark.deploy.yarn import java.io.{FileSystem => _, _} import java.net.{InetAddress, UnknownHostException, URI} import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.security.PrivilegedExceptionAction import java.util.{Locale, Properties, UUID} import java.util.zip.{ZipEntry, ZipOutputStream} import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, ListBuffer, Map} import scala.util.control.NonFatal import com.google.common.base.Objects import com.google.common.io.Files import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs._ import org.apache.hadoop.fs.permission.FsPermission import org.apache.hadoop.io.DataOutputBuffer import org.apache.hadoop.mapreduce.MRJobConfig import org.apache.hadoop.security.{Credentials, UserGroupInformation} import org.apache.hadoop.util.StringUtils import org.apache.hadoop.yarn.api._ import org.apache.hadoop.yarn.api.ApplicationConstants.Environment import org.apache.hadoop.yarn.api.protocolrecords._ import org.apache.hadoop.yarn.api.records._ import org.apache.hadoop.yarn.client.api.{YarnClient, YarnClientApplication} import org.apache.hadoop.yarn.conf.YarnConfiguration import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException import org.apache.hadoop.yarn.util.Records import org.apache.spark.{SecurityManager, SparkConf, SparkException} import org.apache.spark.deploy.{SparkApplication, SparkHadoopUtil} import org.apache.spark.deploy.yarn.config._ import org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.launcher.{LauncherBackend, SparkAppHandle, YarnCommandBuilderUtils} import org.apache.spark.util.{CallerContext, Utils} private[spark] class Client( val args: ClientArguments, val sparkConf: SparkConf) extends Logging { import Client._ import YarnSparkHadoopUtil._ private val yarnClient = YarnClient.createYarnClient private val hadoopConf = new YarnConfiguration(SparkHadoopUtil.newConfiguration(sparkConf)) private val isClusterMode = sparkConf.get("spark.submit.deployMode", "client") == "cluster" // AM related configurations private val amMemory = if (isClusterMode) { sparkConf.get(DRIVER_MEMORY).toInt } else { sparkConf.get(AM_MEMORY).toInt } private val amMemoryOverhead = { val amMemoryOverheadEntry = if (isClusterMode) DRIVER_MEMORY_OVERHEAD else AM_MEMORY_OVERHEAD sparkConf.get(amMemoryOverheadEntry).getOrElse( math.max((MEMORY_OVERHEAD_FACTOR * amMemory).toLong, MEMORY_OVERHEAD_MIN)).toInt } private val amCores = if (isClusterMode) { sparkConf.get(DRIVER_CORES) } else { sparkConf.get(AM_CORES) } // Executor related configurations private val executorMemory = sparkConf.get(EXECUTOR_MEMORY) private val executorMemoryOverhead = sparkConf.get(EXECUTOR_MEMORY_OVERHEAD).getOrElse( math.max((MEMORY_OVERHEAD_FACTOR * executorMemory).toLong, MEMORY_OVERHEAD_MIN)).toInt private val distCacheMgr = new ClientDistributedCacheManager() private val principal = sparkConf.get(PRINCIPAL).orNull private val keytab = sparkConf.get(KEYTAB).orNull private val loginFromKeytab = principal != null private val amKeytabFileName: String = { require((principal == null) == (keytab == null), "Both principal and keytab must be defined, or neither.") if (loginFromKeytab) { logInfo(s"Kerberos credentials: principal = $principal, keytab = $keytab") // Generate a file name that can be used for the keytab file, that does not conflict // with any user file. new File(keytab).getName() + "-" + UUID.randomUUID().toString } else { null } } private val launcherBackend = new LauncherBackend() { override protected def conf: SparkConf = sparkConf override def onStopRequest(): Unit = { if (isClusterMode && appId != null) { yarnClient.killApplication(appId) } else { setState(SparkAppHandle.State.KILLED) stop() } } } private val fireAndForget = isClusterMode && !sparkConf.get(WAIT_FOR_APP_COMPLETION) private var appId: ApplicationId = null // The app staging dir based on the STAGING_DIR configuration if configured // otherwise based on the users home directory. private val appStagingBaseDir = sparkConf.get(STAGING_DIR).map { new Path(_) } .getOrElse(FileSystem.get(hadoopConf).getHomeDirectory()) def reportLauncherState(state: SparkAppHandle.State): Unit = { launcherBackend.setState(state) } def stop(): Unit = { launcherBackend.close() yarnClient.stop() } /** * Submit an application running our ApplicationMaster to the ResourceManager. * * The stable Yarn API provides a convenience method (YarnClient#createApplication) for * creating applications and setting up the application submission context. This was not * available in the alpha API. */ def submitApplication(): ApplicationId = { var appId: ApplicationId = null try { launcherBackend.connect() yarnClient.init(hadoopConf) yarnClient.start() logInfo("Requesting a new application from cluster with %d NodeManagers" .format(yarnClient.getYarnClusterMetrics.getNumNodeManagers)) // Get a new application from our RM val newApp = yarnClient.createApplication() val newAppResponse = newApp.getNewApplicationResponse() appId = newAppResponse.getApplicationId() new CallerContext("CLIENT", sparkConf.get(APP_CALLER_CONTEXT), Option(appId.toString)).setCurrentContext() // Verify whether the cluster has enough resources for our AM verifyClusterResources(newAppResponse) // Set up the appropriate contexts to launch our AM val containerContext = createContainerLaunchContext(newAppResponse) val appContext = createApplicationSubmissionContext(newApp, containerContext) // Finally, submit and monitor the application logInfo(s"Submitting application $appId to ResourceManager") yarnClient.submitApplication(appContext) launcherBackend.setAppId(appId.toString) reportLauncherState(SparkAppHandle.State.SUBMITTED) appId } catch { case e: Throwable => if (appId != null) { cleanupStagingDir(appId) } throw e } } /** * Cleanup application staging directory. */ private def cleanupStagingDir(appId: ApplicationId): Unit = { if (sparkConf.get(PRESERVE_STAGING_FILES)) { return } def cleanupStagingDirInternal(): Unit = { val stagingDirPath = new Path(appStagingBaseDir, getAppStagingDir(appId)) try { val fs = stagingDirPath.getFileSystem(hadoopConf) if (fs.delete(stagingDirPath, true)) { logInfo(s"Deleted staging directory $stagingDirPath") } } catch { case ioe: IOException => logWarning("Failed to cleanup staging dir " + stagingDirPath, ioe) } } if (isClusterMode && principal != null && keytab != null) { val newUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab) newUgi.doAs(new PrivilegedExceptionAction[Unit] { override def run(): Unit = { cleanupStagingDirInternal() } }) } else { cleanupStagingDirInternal() } } /** * Set up the context for submitting our ApplicationMaster. * This uses the YarnClientApplication not available in the Yarn alpha API. */ def createApplicationSubmissionContext( newApp: YarnClientApplication, containerContext: ContainerLaunchContext): ApplicationSubmissionContext = { val appContext = newApp.getApplicationSubmissionContext appContext.setApplicationName(sparkConf.get("spark.app.name", "Spark")) appContext.setQueue(sparkConf.get(QUEUE_NAME)) appContext.setAMContainerSpec(containerContext) appContext.setApplicationType("SPARK") sparkConf.get(APPLICATION_TAGS).foreach { tags => appContext.setApplicationTags(new java.util.HashSet[String](tags.asJava)) } sparkConf.get(MAX_APP_ATTEMPTS) match { case Some(v) => appContext.setMaxAppAttempts(v) case None => logDebug(s"${MAX_APP_ATTEMPTS.key} is not set. " + "Cluster's default value will be used.") } sparkConf.get(AM_ATTEMPT_FAILURE_VALIDITY_INTERVAL_MS).foreach { interval => appContext.setAttemptFailuresValidityInterval(interval) } val capability = Records.newRecord(classOf[Resource]) capability.setMemory(amMemory + amMemoryOverhead) capability.setVirtualCores(amCores) sparkConf.get(AM_NODE_LABEL_EXPRESSION) match { case Some(expr) => val amRequest = Records.newRecord(classOf[ResourceRequest]) amRequest.setResourceName(ResourceRequest.ANY) amRequest.setPriority(Priority.newInstance(0)) amRequest.setCapability(capability) amRequest.setNumContainers(1) amRequest.setNodeLabelExpression(expr) appContext.setAMContainerResourceRequest(amRequest) case None => appContext.setResource(capability) } sparkConf.get(ROLLED_LOG_INCLUDE_PATTERN).foreach { includePattern => try { val logAggregationContext = Records.newRecord(classOf[LogAggregationContext]) // These two methods were added in Hadoop 2.6.4, so we still need to use reflection to // avoid compile error when building against Hadoop 2.6.0 ~ 2.6.3. val setRolledLogsIncludePatternMethod = logAggregationContext.getClass.getMethod("setRolledLogsIncludePattern", classOf[String]) setRolledLogsIncludePatternMethod.invoke(logAggregationContext, includePattern) sparkConf.get(ROLLED_LOG_EXCLUDE_PATTERN).foreach { excludePattern => val setRolledLogsExcludePatternMethod = logAggregationContext.getClass.getMethod("setRolledLogsExcludePattern", classOf[String]) setRolledLogsExcludePatternMethod.invoke(logAggregationContext, excludePattern) } appContext.setLogAggregationContext(logAggregationContext) } catch { case NonFatal(e) => logWarning(s"Ignoring ${ROLLED_LOG_INCLUDE_PATTERN.key} because the version of YARN " + "does not support it", e) } } appContext } /** * Set up security tokens for launching our ApplicationMaster container. * * This method will obtain delegation tokens from all the registered providers, and set them in * the AM's launch context. */ private def setupSecurityToken(amContainer: ContainerLaunchContext): Unit = { val credentials = UserGroupInformation.getCurrentUser().getCredentials() val credentialManager = new YARNHadoopDelegationTokenManager(sparkConf, hadoopConf) credentialManager.obtainDelegationTokens(hadoopConf, credentials) // When using a proxy user, copy the delegation tokens to the user's credentials. Avoid // that for regular users, since in those case the user already has access to the TGT, // and adding delegation tokens could lead to expired or cancelled tokens being used // later, as reported in SPARK-15754. val currentUser = UserGroupInformation.getCurrentUser() if (SparkHadoopUtil.get.isProxyUser(currentUser)) { currentUser.addCredentials(credentials) } val dob = new DataOutputBuffer credentials.writeTokenStorageToStream(dob) amContainer.setTokens(ByteBuffer.wrap(dob.getData)) } /** Get the application report from the ResourceManager for an application we have submitted. */ def getApplicationReport(appId: ApplicationId): ApplicationReport = yarnClient.getApplicationReport(appId) /** * Return the security token used by this client to communicate with the ApplicationMaster. * If no security is enabled, the token returned by the report is null. */ private def getClientToken(report: ApplicationReport): String = Option(report.getClientToAMToken).map(_.toString).getOrElse("") /** * Fail fast if we have requested more resources per container than is available in the cluster. */ private def verifyClusterResources(newAppResponse: GetNewApplicationResponse): Unit = { val maxMem = newAppResponse.getMaximumResourceCapability().getMemory() logInfo("Verifying our application has not requested more than the maximum " + s"memory capability of the cluster ($maxMem MB per container)") val executorMem = executorMemory + executorMemoryOverhead if (executorMem > maxMem) { throw new IllegalArgumentException(s"Required executor memory ($executorMemory" + s"+$executorMemoryOverhead MB) is above the max threshold ($maxMem MB) of this cluster! " + "Please check the values of 'yarn.scheduler.maximum-allocation-mb' and/or " + "'yarn.nodemanager.resource.memory-mb'.") } val amMem = amMemory + amMemoryOverhead if (amMem > maxMem) { throw new IllegalArgumentException(s"Required AM memory ($amMemory" + s"+$amMemoryOverhead MB) is above the max threshold ($maxMem MB) of this cluster! " + "Please increase the value of 'yarn.scheduler.maximum-allocation-mb'.") } logInfo("Will allocate AM container, with %d MB memory including %d MB overhead".format( amMem, amMemoryOverhead)) // We could add checks to make sure the entire cluster has enough resources but that involves // getting all the node reports and computing ourselves. } /** * Copy the given file to a remote file system (e.g. HDFS) if needed. * The file is only copied if the source and destination file systems are different or the source * scheme is "file". This is used for preparing resources for launching the ApplicationMaster * container. Exposed for testing. */ private[yarn] def copyFileToRemote( destDir: Path, srcPath: Path, replication: Short, symlinkCache: Map[URI, Path], force: Boolean = false, destName: Option[String] = None): Path = { val destFs = destDir.getFileSystem(hadoopConf) val srcFs = srcPath.getFileSystem(hadoopConf) var destPath = srcPath if (force || !compareFs(srcFs, destFs) || "file".equals(srcFs.getScheme)) { destPath = new Path(destDir, destName.getOrElse(srcPath.getName())) logInfo(s"Uploading resource $srcPath -> $destPath") FileUtil.copy(srcFs, srcPath, destFs, destPath, false, hadoopConf) destFs.setReplication(destPath, replication) destFs.setPermission(destPath, new FsPermission(APP_FILE_PERMISSION)) } else { logInfo(s"Source and destination file systems are the same. Not copying $srcPath") } // Resolve any symlinks in the URI path so using a "current" symlink to point to a specific // version shows the specific version in the distributed cache configuration val qualifiedDestPath = destFs.makeQualified(destPath) val qualifiedDestDir = qualifiedDestPath.getParent val resolvedDestDir = symlinkCache.getOrElseUpdate(qualifiedDestDir.toUri(), { val fc = FileContext.getFileContext(qualifiedDestDir.toUri(), hadoopConf) fc.resolvePath(qualifiedDestDir) }) new Path(resolvedDestDir, qualifiedDestPath.getName()) } /** * Upload any resources to the distributed cache if needed. If a resource is intended to be * consumed locally, set up the appropriate config for downstream code to handle it properly. * This is used for setting up a container launch context for our ApplicationMaster. * Exposed for testing. */ def prepareLocalResources( destDir: Path, pySparkArchives: Seq[String]): HashMap[String, LocalResource] = { logInfo("Preparing resources for our AM container") // Upload Spark and the application JAR to the remote file system if necessary, // and add them as local resources to the application master. val fs = destDir.getFileSystem(hadoopConf) // Used to keep track of URIs added to the distributed cache. If the same URI is added // multiple times, YARN will fail to launch containers for the app with an internal // error. val distributedUris = new HashSet[String] // Used to keep track of URIs(files) added to the distribute cache have the same name. If // same name but different path files are added multiple time, YARN will fail to launch // containers for the app with an internal error. val distributedNames = new HashSet[String] val replication = sparkConf.get(STAGING_FILE_REPLICATION).map(_.toShort) .getOrElse(fs.getDefaultReplication(destDir)) val localResources = HashMap[String, LocalResource]() FileSystem.mkdirs(fs, destDir, new FsPermission(STAGING_DIR_PERMISSION)) val statCache: Map[URI, FileStatus] = HashMap[URI, FileStatus]() val symlinkCache: Map[URI, Path] = HashMap[URI, Path]() def addDistributedUri(uri: URI): Boolean = { val uriStr = uri.toString() val fileName = new File(uri.getPath).getName if (distributedUris.contains(uriStr)) { logWarning(s"Same path resource $uri added multiple times to distributed cache.") false } else if (distributedNames.contains(fileName)) { logWarning(s"Same name resource $uri added multiple times to distributed cache") false } else { distributedUris += uriStr distributedNames += fileName true } } /** * Distribute a file to the cluster. * * If the file's path is a "local:" URI, it's actually not distributed. Other files are copied * to HDFS (if not already there) and added to the application's distributed cache. * * @param path URI of the file to distribute. * @param resType Type of resource being distributed. * @param destName Name of the file in the distributed cache. * @param targetDir Subdirectory where to place the file. * @param appMasterOnly Whether to distribute only to the AM. * @return A 2-tuple. First item is whether the file is a "local:" URI. Second item is the * localized path for non-local paths, or the input `path` for local paths. * The localized path will be null if the URI has already been added to the cache. */ def distribute( path: String, resType: LocalResourceType = LocalResourceType.FILE, destName: Option[String] = None, targetDir: Option[String] = None, appMasterOnly: Boolean = false): (Boolean, String) = { val trimmedPath = path.trim() val localURI = Utils.resolveURI(trimmedPath) if (localURI.getScheme != LOCAL_SCHEME) { if (addDistributedUri(localURI)) { val localPath = getQualifiedLocalPath(localURI, hadoopConf) val linkname = targetDir.map(_ + "/").getOrElse("") + destName.orElse(Option(localURI.getFragment())).getOrElse(localPath.getName()) val destPath = copyFileToRemote(destDir, localPath, replication, symlinkCache) val destFs = FileSystem.get(destPath.toUri(), hadoopConf) distCacheMgr.addResource( destFs, hadoopConf, destPath, localResources, resType, linkname, statCache, appMasterOnly = appMasterOnly) (false, linkname) } else { (false, null) } } else { (true, trimmedPath) } } // If we passed in a keytab, make sure we copy the keytab to the staging directory on // HDFS, and setup the relevant environment vars, so the AM can login again. if (loginFromKeytab) { logInfo("To enable the AM to login from keytab, credentials are being copied over to the AM" + " via the YARN Secure Distributed Cache.") val (_, localizedPath) = distribute(keytab, destName = Some(amKeytabFileName), appMasterOnly = true) require(localizedPath != null, "Keytab file already distributed.") } /** * Add Spark to the cache. There are two settings that control what files to add to the cache: * - if a Spark archive is defined, use the archive. The archive is expected to contain * jar files at its root directory. * - if a list of jars is provided, filter the non-local ones, resolve globs, and * add the found files to the cache. * * Note that the archive cannot be a "local" URI. If none of the above settings are found, * then upload all files found in $SPARK_HOME/jars. */ val sparkArchive = sparkConf.get(SPARK_ARCHIVE) if (sparkArchive.isDefined) { val archive = sparkArchive.get require(!isLocalUri(archive), s"${SPARK_ARCHIVE.key} cannot be a local URI.") distribute(Utils.resolveURI(archive).toString, resType = LocalResourceType.ARCHIVE, destName = Some(LOCALIZED_LIB_DIR)) } else { sparkConf.get(SPARK_JARS) match { case Some(jars) => // Break the list of jars to upload, and resolve globs. val localJars = new ArrayBuffer[String]() jars.foreach { jar => if (!isLocalUri(jar)) { val path = getQualifiedLocalPath(Utils.resolveURI(jar), hadoopConf) val pathFs = FileSystem.get(path.toUri(), hadoopConf) pathFs.globStatus(path).filter(_.isFile()).foreach { entry => val uri = entry.getPath().toUri() statCache.update(uri, entry) distribute(uri.toString(), targetDir = Some(LOCALIZED_LIB_DIR)) } } else { localJars += jar } } // Propagate the local URIs to the containers using the configuration. sparkConf.set(SPARK_JARS, localJars) case None => // No configuration, so fall back to uploading local jar files. logWarning(s"Neither ${SPARK_JARS.key} nor ${SPARK_ARCHIVE.key} is set, falling back " + "to uploading libraries under SPARK_HOME.") val jarsDir = new File(YarnCommandBuilderUtils.findJarsDir( sparkConf.getenv("SPARK_HOME"))) val jarsArchive = File.createTempFile(LOCALIZED_LIB_DIR, ".zip", new File(Utils.getLocalDir(sparkConf))) val jarsStream = new ZipOutputStream(new FileOutputStream(jarsArchive)) try { jarsStream.setLevel(0) jarsDir.listFiles().foreach { f => if (f.isFile && f.getName.toLowerCase(Locale.ROOT).endsWith(".jar") && f.canRead) { jarsStream.putNextEntry(new ZipEntry(f.getName)) Files.copy(f, jarsStream) jarsStream.closeEntry() } } } finally { jarsStream.close() } distribute(jarsArchive.toURI.getPath, resType = LocalResourceType.ARCHIVE, destName = Some(LOCALIZED_LIB_DIR)) jarsArchive.delete() } } /** * Copy user jar to the distributed cache if their scheme is not "local". * Otherwise, set the corresponding key in our SparkConf to handle it downstream. */ Option(args.userJar).filter(_.trim.nonEmpty).foreach { jar => val (isLocal, localizedPath) = distribute(jar, destName = Some(APP_JAR_NAME)) if (isLocal) { require(localizedPath != null, s"Path $jar already distributed") // If the resource is intended for local use only, handle this downstream // by setting the appropriate property sparkConf.set(APP_JAR, localizedPath) } } /** * Do the same for any additional resources passed in through ClientArguments. * Each resource category is represented by a 3-tuple of: * (1) comma separated list of resources in this category, * (2) resource type, and * (3) whether to add these resources to the classpath */ val cachedSecondaryJarLinks = ListBuffer.empty[String] List( (sparkConf.get(JARS_TO_DISTRIBUTE), LocalResourceType.FILE, true), (sparkConf.get(FILES_TO_DISTRIBUTE), LocalResourceType.FILE, false), (sparkConf.get(ARCHIVES_TO_DISTRIBUTE), LocalResourceType.ARCHIVE, false) ).foreach { case (flist, resType, addToClasspath) => flist.foreach { file => val (_, localizedPath) = distribute(file, resType = resType) // If addToClassPath, we ignore adding jar multiple times to distributed cache. if (addToClasspath) { if (localizedPath != null) { cachedSecondaryJarLinks += localizedPath } } else { if (localizedPath == null) { throw new IllegalArgumentException(s"Attempt to add ($file) multiple times" + " to the distributed cache.") } } } } if (cachedSecondaryJarLinks.nonEmpty) { sparkConf.set(SECONDARY_JARS, cachedSecondaryJarLinks) } if (isClusterMode && args.primaryPyFile != null) { distribute(args.primaryPyFile, appMasterOnly = true) } pySparkArchives.foreach { f => distribute(f) } // The python files list needs to be treated especially. All files that are not an // archive need to be placed in a subdirectory that will be added to PYTHONPATH. sparkConf.get(PY_FILES).foreach { f => val targetDir = if (f.endsWith(".py")) Some(LOCALIZED_PYTHON_DIR) else None distribute(f, targetDir = targetDir) } // Update the configuration with all the distributed files, minus the conf archive. The // conf archive will be handled by the AM differently so that we avoid having to send // this configuration by other means. See SPARK-14602 for one reason of why this is needed. distCacheMgr.updateConfiguration(sparkConf) // Upload the conf archive to HDFS manually, and record its location in the configuration. // This will allow the AM to know where the conf archive is in HDFS, so that it can be // distributed to the containers. // // This code forces the archive to be copied, so that unit tests pass (since in that case both // file systems are the same and the archive wouldn't normally be copied). In most (all?) // deployments, the archive would be copied anyway, since it's a temp file in the local file // system. val remoteConfArchivePath = new Path(destDir, LOCALIZED_CONF_ARCHIVE) val remoteFs = FileSystem.get(remoteConfArchivePath.toUri(), hadoopConf) sparkConf.set(CACHED_CONF_ARCHIVE, remoteConfArchivePath.toString()) val localConfArchive = new Path(createConfArchive().toURI()) copyFileToRemote(destDir, localConfArchive, replication, symlinkCache, force = true, destName = Some(LOCALIZED_CONF_ARCHIVE)) // Manually add the config archive to the cache manager so that the AM is launched with // the proper files set up. distCacheMgr.addResource( remoteFs, hadoopConf, remoteConfArchivePath, localResources, LocalResourceType.ARCHIVE, LOCALIZED_CONF_DIR, statCache, appMasterOnly = false) // Clear the cache-related entries from the configuration to avoid them polluting the // UI's environment page. This works for client mode; for cluster mode, this is handled // by the AM. CACHE_CONFIGS.foreach(sparkConf.remove) localResources } /** * Create an archive with the config files for distribution. * * These will be used by AM and executors. The files are zipped and added to the job as an * archive, so that YARN will explode it when distributing to AM and executors. This directory * is then added to the classpath of AM and executor process, just to make sure that everybody * is using the same default config. * * This follows the order of precedence set by the startup scripts, in which HADOOP_CONF_DIR * shows up in the classpath before YARN_CONF_DIR. * * Currently this makes a shallow copy of the conf directory. If there are cases where a * Hadoop config directory contains subdirectories, this code will have to be fixed. * * The archive also contains some Spark configuration. Namely, it saves the contents of * SparkConf in a file to be loaded by the AM process. */ private def createConfArchive(): File = { val hadoopConfFiles = new HashMap[String, File]() // SPARK_CONF_DIR shows up in the classpath before HADOOP_CONF_DIR/YARN_CONF_DIR sys.env.get("SPARK_CONF_DIR").foreach { localConfDir => val dir = new File(localConfDir) if (dir.isDirectory) { val files = dir.listFiles(new FileFilter { override def accept(pathname: File): Boolean = { pathname.isFile && pathname.getName.endsWith(".xml") } }) files.foreach { f => hadoopConfFiles(f.getName) = f } } } // SPARK-23630: during testing, Spark scripts filter out hadoop conf dirs so that user's // environments do not interfere with tests. This allows a special env variable during // tests so that custom conf dirs can be used by unit tests. val confDirs = Seq("HADOOP_CONF_DIR", "YARN_CONF_DIR") ++ (if (Utils.isTesting) Seq("SPARK_TEST_HADOOP_CONF_DIR") else Nil) confDirs.foreach { envKey => sys.env.get(envKey).foreach { path => val dir = new File(path) if (dir.isDirectory()) { val files = dir.listFiles() if (files == null) { logWarning("Failed to list files under directory " + dir) } else { files.foreach { file => if (file.isFile && !hadoopConfFiles.contains(file.getName())) { hadoopConfFiles(file.getName()) = file } } } } } } val confArchive = File.createTempFile(LOCALIZED_CONF_DIR, ".zip", new File(Utils.getLocalDir(sparkConf))) val confStream = new ZipOutputStream(new FileOutputStream(confArchive)) try { confStream.setLevel(0) // Upload $SPARK_CONF_DIR/log4j.properties file to the distributed cache to make sure that // the executors will use the latest configurations instead of the default values. This is // required when user changes log4j.properties directly to set the log configurations. If // configuration file is provided through --files then executors will be taking configurations // from --files instead of $SPARK_CONF_DIR/log4j.properties. // Also upload metrics.properties to distributed cache if exists in classpath. // If user specify this file using --files then executors will use the one // from --files instead. for { prop <- Seq("log4j.properties", "metrics.properties") url <- Option(Utils.getContextOrSparkClassLoader.getResource(prop)) if url.getProtocol == "file" } { val file = new File(url.getPath()) confStream.putNextEntry(new ZipEntry(file.getName())) Files.copy(file, confStream) confStream.closeEntry() } // Save the Hadoop config files under a separate directory in the archive. This directory // is appended to the classpath so that the cluster-provided configuration takes precedence. confStream.putNextEntry(new ZipEntry(s"$LOCALIZED_HADOOP_CONF_DIR/")) confStream.closeEntry() hadoopConfFiles.foreach { case (name, file) => if (file.canRead()) { confStream.putNextEntry(new ZipEntry(s"$LOCALIZED_HADOOP_CONF_DIR/$name")) Files.copy(file, confStream) confStream.closeEntry() } } // Save the YARN configuration into a separate file that will be overlayed on top of the // cluster's Hadoop conf. confStream.putNextEntry(new ZipEntry(SparkHadoopUtil.SPARK_HADOOP_CONF_FILE)) hadoopConf.writeXml(confStream) confStream.closeEntry() // Save Spark configuration to a file in the archive, but filter out the app's secret. val props = new Properties() sparkConf.getAll.foreach { case (k, v) => props.setProperty(k, v) } // Override spark.yarn.key to point to the location in distributed cache which will be used // by AM. Option(amKeytabFileName).foreach { k => props.setProperty(KEYTAB.key, k) } confStream.putNextEntry(new ZipEntry(SPARK_CONF_FILE)) val writer = new OutputStreamWriter(confStream, StandardCharsets.UTF_8) props.store(writer, "Spark configuration.") writer.flush() confStream.closeEntry() } finally { confStream.close() } confArchive } /** * Set up the environment for launching our ApplicationMaster container. */ private def setupLaunchEnv( stagingDirPath: Path, pySparkArchives: Seq[String]): HashMap[String, String] = { logInfo("Setting up the launch environment for our AM container") val env = new HashMap[String, String]() populateClasspath(args, hadoopConf, sparkConf, env, sparkConf.get(DRIVER_CLASS_PATH)) env("SPARK_YARN_STAGING_DIR") = stagingDirPath.toString env("SPARK_USER") = UserGroupInformation.getCurrentUser().getShortUserName() // Pick up any environment variables for the AM provided through spark.yarn.appMasterEnv.* val amEnvPrefix = "spark.yarn.appMasterEnv." sparkConf.getAll .filter { case (k, v) => k.startsWith(amEnvPrefix) } .map { case (k, v) => (k.substring(amEnvPrefix.length), v) } .foreach { case (k, v) => YarnSparkHadoopUtil.addPathToEnvironment(env, k, v) } // If pyFiles contains any .py files, we need to add LOCALIZED_PYTHON_DIR to the PYTHONPATH // of the container processes too. Add all non-.py files directly to PYTHONPATH. // // NOTE: the code currently does not handle .py files defined with a "local:" scheme. val pythonPath = new ListBuffer[String]() val (pyFiles, pyArchives) = sparkConf.get(PY_FILES).partition(_.endsWith(".py")) if (pyFiles.nonEmpty) { pythonPath += buildPath(Environment.PWD.$$(), LOCALIZED_PYTHON_DIR) } (pySparkArchives ++ pyArchives).foreach { path => val uri = Utils.resolveURI(path) if (uri.getScheme != LOCAL_SCHEME) { pythonPath += buildPath(Environment.PWD.$$(), new Path(uri).getName()) } else { pythonPath += uri.getPath() } } // Finally, update the Spark config to propagate PYTHONPATH to the AM and executors. if (pythonPath.nonEmpty) { val pythonPathStr = (sys.env.get("PYTHONPATH") ++ pythonPath) .mkString(ApplicationConstants.CLASS_PATH_SEPARATOR) env("PYTHONPATH") = pythonPathStr sparkConf.setExecutorEnv("PYTHONPATH", pythonPathStr) } if (isClusterMode) { // propagate PYSPARK_DRIVER_PYTHON and PYSPARK_PYTHON to driver in cluster mode Seq("PYSPARK_DRIVER_PYTHON", "PYSPARK_PYTHON").foreach { envname => if (!env.contains(envname)) { sys.env.get(envname).foreach(env(envname) = _) } } sys.env.get("PYTHONHASHSEED").foreach(env.put("PYTHONHASHSEED", _)) } sys.env.get(ENV_DIST_CLASSPATH).foreach { dcp => env(ENV_DIST_CLASSPATH) = dcp } env } /** * Set up a ContainerLaunchContext to launch our ApplicationMaster container. * This sets up the launch environment, java options, and the command for launching the AM. */ private def createContainerLaunchContext(newAppResponse: GetNewApplicationResponse) : ContainerLaunchContext = { logInfo("Setting up container launch context for our AM") val appId = newAppResponse.getApplicationId val appStagingDirPath = new Path(appStagingBaseDir, getAppStagingDir(appId)) val pySparkArchives = if (sparkConf.get(IS_PYTHON_APP)) { findPySparkArchives() } else { Nil } val launchEnv = setupLaunchEnv(appStagingDirPath, pySparkArchives) val localResources = prepareLocalResources(appStagingDirPath, pySparkArchives) val amContainer = Records.newRecord(classOf[ContainerLaunchContext]) amContainer.setLocalResources(localResources.asJava) amContainer.setEnvironment(launchEnv.asJava) val javaOpts = ListBuffer[String]() // Set the environment variable through a command prefix // to append to the existing value of the variable var prefixEnv: Option[String] = None // Add Xmx for AM memory javaOpts += "-Xmx" + amMemory + "m" val tmpDir = new Path(Environment.PWD.$$(), YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) javaOpts += "-Djava.io.tmpdir=" + tmpDir // TODO: Remove once cpuset version is pushed out. // The context is, default gc for server class machines ends up using all cores to do gc - // hence if there are multiple containers in same node, Spark GC affects all other containers' // performance (which can be that of other Spark containers) // Instead of using this, rely on cpusets by YARN to enforce "proper" Spark behavior in // multi-tenant environments. Not sure how default Java GC behaves if it is limited to subset // of cores on a node. val useConcurrentAndIncrementalGC = launchEnv.get("SPARK_USE_CONC_INCR_GC").exists(_.toBoolean) if (useConcurrentAndIncrementalGC) { // In our expts, using (default) throughput collector has severe perf ramifications in // multi-tenant machines javaOpts += "-XX:+UseConcMarkSweepGC" javaOpts += "-XX:MaxTenuringThreshold=31" javaOpts += "-XX:SurvivorRatio=8" javaOpts += "-XX:+CMSIncrementalMode" javaOpts += "-XX:+CMSIncrementalPacing" javaOpts += "-XX:CMSIncrementalDutyCycleMin=0" javaOpts += "-XX:CMSIncrementalDutyCycle=10" } // Include driver-specific java options if we are launching a driver if (isClusterMode) { sparkConf.get(DRIVER_JAVA_OPTIONS).foreach { opts => javaOpts ++= Utils.splitCommandString(opts) .map(Utils.substituteAppId(_, appId.toString)) .map(YarnSparkHadoopUtil.escapeForShell) } val libraryPaths = Seq(sparkConf.get(DRIVER_LIBRARY_PATH), sys.props.get("spark.driver.libraryPath")).flatten if (libraryPaths.nonEmpty) { prefixEnv = Some(getClusterPath(sparkConf, Utils.libraryPathEnvPrefix(libraryPaths))) } if (sparkConf.get(AM_JAVA_OPTIONS).isDefined) { logWarning(s"${AM_JAVA_OPTIONS.key} will not take effect in cluster mode") } } else { // Validate and include yarn am specific java options in yarn-client mode. sparkConf.get(AM_JAVA_OPTIONS).foreach { opts => if (opts.contains("-Dspark")) { val msg = s"${AM_JAVA_OPTIONS.key} is not allowed to set Spark options (was '$opts')." throw new SparkException(msg) } if (opts.contains("-Xmx")) { val msg = s"${AM_JAVA_OPTIONS.key} is not allowed to specify max heap memory settings " + s"(was '$opts'). Use spark.yarn.am.memory instead." throw new SparkException(msg) } javaOpts ++= Utils.splitCommandString(opts) .map(Utils.substituteAppId(_, appId.toString)) .map(YarnSparkHadoopUtil.escapeForShell) } sparkConf.get(AM_LIBRARY_PATH).foreach { paths => prefixEnv = Some(getClusterPath(sparkConf, Utils.libraryPathEnvPrefix(Seq(paths)))) } } // For log4j configuration to reference javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR) val userClass = if (isClusterMode) { Seq("--class", YarnSparkHadoopUtil.escapeForShell(args.userClass)) } else { Nil } val userJar = if (args.userJar != null) { Seq("--jar", args.userJar) } else { Nil } val primaryPyFile = if (isClusterMode && args.primaryPyFile != null) { Seq("--primary-py-file", new Path(args.primaryPyFile).getName()) } else { Nil } val primaryRFile = if (args.primaryRFile != null) { Seq("--primary-r-file", args.primaryRFile) } else { Nil } val amClass = if (isClusterMode) { Utils.classForName("org.apache.spark.deploy.yarn.ApplicationMaster").getName } else { Utils.classForName("org.apache.spark.deploy.yarn.ExecutorLauncher").getName } if (args.primaryRFile != null && args.primaryRFile.endsWith(".R")) { args.userArgs = ArrayBuffer(args.primaryRFile) ++ args.userArgs } val userArgs = args.userArgs.flatMap { arg => Seq("--arg", YarnSparkHadoopUtil.escapeForShell(arg)) } val amArgs = Seq(amClass) ++ userClass ++ userJar ++ primaryPyFile ++ primaryRFile ++ userArgs ++ Seq("--properties-file", buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, SPARK_CONF_FILE)) // Command for the ApplicationMaster val commands = prefixEnv ++ Seq(Environment.JAVA_HOME.$$() + "/bin/java", "-server") ++ javaOpts ++ amArgs ++ Seq( "1>", ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout", "2>", ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr") // TODO: it would be nicer to just make sure there are no null commands here val printableCommands = commands.map(s => if (s == null) "null" else s).toList amContainer.setCommands(printableCommands.asJava) logDebug("===============================================================================") logDebug("YARN AM launch context:") logDebug(s" user class: ${Option(args.userClass).getOrElse("N/A")}") logDebug(" env:") if (log.isDebugEnabled) { Utils.redact(sparkConf, launchEnv.toSeq).foreach { case (k, v) => logDebug(s" $k -> $v") } } logDebug(" resources:") localResources.foreach { case (k, v) => logDebug(s" $k -> $v")} logDebug(" command:") logDebug(s" ${printableCommands.mkString(" ")}") logDebug("===============================================================================") // send the acl settings into YARN to control who has access via YARN interfaces val securityManager = new SecurityManager(sparkConf) amContainer.setApplicationACLs( YarnSparkHadoopUtil.getApplicationAclsForYarn(securityManager).asJava) setupSecurityToken(amContainer) amContainer } /** * Report the state of an application until it has exited, either successfully or * due to some failure, then return a pair of the yarn application state (FINISHED, FAILED, * KILLED, or RUNNING) and the final application state (UNDEFINED, SUCCEEDED, FAILED, * or KILLED). * * @param appId ID of the application to monitor. * @param returnOnRunning Whether to also return the application state when it is RUNNING. * @param logApplicationReport Whether to log details of the application report every iteration. * @param interval How often to poll the YARN RM for application status (in ms). * @return A pair of the yarn application state and the final application state. */ def monitorApplication( appId: ApplicationId, returnOnRunning: Boolean = false, logApplicationReport: Boolean = true, interval: Long = sparkConf.get(REPORT_INTERVAL)): YarnAppReport = { var lastState: YarnApplicationState = null while (true) { Thread.sleep(interval) val report: ApplicationReport = try { getApplicationReport(appId) } catch { case e: ApplicationNotFoundException => logError(s"Application $appId not found.") cleanupStagingDir(appId) return YarnAppReport(YarnApplicationState.KILLED, FinalApplicationStatus.KILLED, None) case NonFatal(e) => val msg = s"Failed to contact YARN for application $appId." logError(msg, e) // Don't necessarily clean up staging dir because status is unknown return YarnAppReport(YarnApplicationState.FAILED, FinalApplicationStatus.FAILED, Some(msg)) } val state = report.getYarnApplicationState if (logApplicationReport) { logInfo(s"Application report for $appId (state: $state)") // If DEBUG is enabled, log report details every iteration // Otherwise, log them every time the application changes state if (log.isDebugEnabled) { logDebug(formatReportDetails(report)) } else if (lastState != state) { logInfo(formatReportDetails(report)) } } if (lastState != state) { state match { case YarnApplicationState.RUNNING => reportLauncherState(SparkAppHandle.State.RUNNING) case YarnApplicationState.FINISHED => report.getFinalApplicationStatus match { case FinalApplicationStatus.FAILED => reportLauncherState(SparkAppHandle.State.FAILED) case FinalApplicationStatus.KILLED => reportLauncherState(SparkAppHandle.State.KILLED) case _ => reportLauncherState(SparkAppHandle.State.FINISHED) } case YarnApplicationState.FAILED => reportLauncherState(SparkAppHandle.State.FAILED) case YarnApplicationState.KILLED => reportLauncherState(SparkAppHandle.State.KILLED) case _ => } } if (state == YarnApplicationState.FINISHED || state == YarnApplicationState.FAILED || state == YarnApplicationState.KILLED) { cleanupStagingDir(appId) return createAppReport(report) } if (returnOnRunning && state == YarnApplicationState.RUNNING) { return createAppReport(report) } lastState = state } // Never reached, but keeps compiler happy throw new SparkException("While loop is depleted! This should never happen...") } private def formatReportDetails(report: ApplicationReport): String = { val details = Seq[(String, String)]( ("client token", getClientToken(report)), ("diagnostics", report.getDiagnostics), ("ApplicationMaster host", report.getHost), ("ApplicationMaster RPC port", report.getRpcPort.toString), ("queue", report.getQueue), ("start time", report.getStartTime.toString), ("final status", report.getFinalApplicationStatus.toString), ("tracking URL", report.getTrackingUrl), ("user", report.getUser) ) // Use more loggable format if value is null or empty details.map { case (k, v) => val newValue = Option(v).filter(_.nonEmpty).getOrElse("N/A") s"\n\t $k: $newValue" }.mkString("") } /** * Submit an application to the ResourceManager. * If set spark.yarn.submit.waitAppCompletion to true, it will stay alive * reporting the application's status until the application has exited for any reason. * Otherwise, the client process will exit after submission. * If the application finishes with a failed, killed, or undefined status, * throw an appropriate SparkException. */ def run(): Unit = { this.appId = submitApplication() if (!launcherBackend.isConnected() && fireAndForget) { val report = getApplicationReport(appId) val state = report.getYarnApplicationState logInfo(s"Application report for $appId (state: $state)") logInfo(formatReportDetails(report)) if (state == YarnApplicationState.FAILED || state == YarnApplicationState.KILLED) { throw new SparkException(s"Application $appId finished with status: $state") } } else { val YarnAppReport(appState, finalState, diags) = monitorApplication(appId) if (appState == YarnApplicationState.FAILED || finalState == FinalApplicationStatus.FAILED) { diags.foreach { err => logError(s"Application diagnostics message: $err") } throw new SparkException(s"Application $appId finished with failed status") } if (appState == YarnApplicationState.KILLED || finalState == FinalApplicationStatus.KILLED) { throw new SparkException(s"Application $appId is killed") } if (finalState == FinalApplicationStatus.UNDEFINED) { throw new SparkException(s"The final status of application $appId is undefined") } } } private def findPySparkArchives(): Seq[String] = { sys.env.get("PYSPARK_ARCHIVES_PATH") .map(_.split(",").toSeq) .getOrElse { val pyLibPath = Seq(sys.env("SPARK_HOME"), "python", "lib").mkString(File.separator) val pyArchivesFile = new File(pyLibPath, "pyspark.zip") require(pyArchivesFile.exists(), s"$pyArchivesFile not found; cannot run pyspark application in YARN mode.") val py4jFile = new File(pyLibPath, "py4j-0.10.7-src.zip") require(py4jFile.exists(), s"$py4jFile not found; cannot run pyspark application in YARN mode.") Seq(pyArchivesFile.getAbsolutePath(), py4jFile.getAbsolutePath()) } } } private object Client extends Logging { // Alias for the user jar val APP_JAR_NAME: String = "__app__.jar" // URI scheme that identifies local resources val LOCAL_SCHEME = "local" // Staging directory for any temporary jars or files val SPARK_STAGING: String = ".sparkStaging" // Staging directory is private! -> rwx-------- val STAGING_DIR_PERMISSION: FsPermission = FsPermission.createImmutable(Integer.parseInt("700", 8).toShort) // App files are world-wide readable and owner writable -> rw-r--r-- val APP_FILE_PERMISSION: FsPermission = FsPermission.createImmutable(Integer.parseInt("644", 8).toShort) // Distribution-defined classpath to add to processes val ENV_DIST_CLASSPATH = "SPARK_DIST_CLASSPATH" // Subdirectory where the user's Spark and Hadoop config files will be placed. val LOCALIZED_CONF_DIR = "__spark_conf__" // Subdirectory in the conf directory containing Hadoop config files. val LOCALIZED_HADOOP_CONF_DIR = "__hadoop_conf__" // File containing the conf archive in the AM. See prepareLocalResources(). val LOCALIZED_CONF_ARCHIVE = LOCALIZED_CONF_DIR + ".zip" // Name of the file in the conf archive containing Spark configuration. val SPARK_CONF_FILE = "__spark_conf__.properties" // Subdirectory where the user's python files (not archives) will be placed. val LOCALIZED_PYTHON_DIR = "__pyfiles__" // Subdirectory where Spark libraries will be placed. val LOCALIZED_LIB_DIR = "__spark_libs__" /** * Return the path to the given application's staging directory. */ private def getAppStagingDir(appId: ApplicationId): String = { buildPath(SPARK_STAGING, appId.toString()) } /** * Populate the classpath entry in the given environment map with any application * classpath specified through the Hadoop and Yarn configurations. */ private[yarn] def populateHadoopClasspath(conf: Configuration, env: HashMap[String, String]) : Unit = { val classPathElementsToAdd = getYarnAppClasspath(conf) ++ getMRAppClasspath(conf) classPathElementsToAdd.foreach { c => YarnSparkHadoopUtil.addPathToEnvironment(env, Environment.CLASSPATH.name, c.trim) } } private def getYarnAppClasspath(conf: Configuration): Seq[String] = Option(conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH)) match { case Some(s) => s.toSeq case None => getDefaultYarnApplicationClasspath } private def getMRAppClasspath(conf: Configuration): Seq[String] = Option(conf.getStrings("mapreduce.application.classpath")) match { case Some(s) => s.toSeq case None => getDefaultMRApplicationClasspath } private[yarn] def getDefaultYarnApplicationClasspath: Seq[String] = YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH.toSeq private[yarn] def getDefaultMRApplicationClasspath: Seq[String] = StringUtils.getStrings(MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH).toSeq /** * Populate the classpath entry in the given environment map. * * User jars are generally not added to the JVM's system classpath; those are handled by the AM * and executor backend. When the deprecated `spark.yarn.user.classpath.first` is used, user jars * are included in the system classpath, though. The extra class path and other uploaded files are * always made available through the system class path. * * @param args Client arguments (when starting the AM) or null (when starting executors). */ private[yarn] def populateClasspath( args: ClientArguments, conf: Configuration, sparkConf: SparkConf, env: HashMap[String, String], extraClassPath: Option[String] = None): Unit = { extraClassPath.foreach { cp => addClasspathEntry(getClusterPath(sparkConf, cp), env) } addClasspathEntry(Environment.PWD.$$(), env) addClasspathEntry(Environment.PWD.$$() + Path.SEPARATOR + LOCALIZED_CONF_DIR, env) if (sparkConf.get(USER_CLASS_PATH_FIRST)) { // in order to properly add the app jar when user classpath is first // we have to do the mainJar separate in order to send the right thing // into addFileToClasspath val mainJar = if (args != null) { getMainJarUri(Option(args.userJar)) } else { getMainJarUri(sparkConf.get(APP_JAR)) } mainJar.foreach(addFileToClasspath(sparkConf, conf, _, APP_JAR_NAME, env)) val secondaryJars = if (args != null) { getSecondaryJarUris(Option(sparkConf.get(JARS_TO_DISTRIBUTE))) } else { getSecondaryJarUris(sparkConf.get(SECONDARY_JARS)) } secondaryJars.foreach { x => addFileToClasspath(sparkConf, conf, x, null, env) } } // Add the Spark jars to the classpath, depending on how they were distributed. addClasspathEntry(buildPath(Environment.PWD.$$(), LOCALIZED_LIB_DIR, "*"), env) if (sparkConf.get(SPARK_ARCHIVE).isEmpty) { sparkConf.get(SPARK_JARS).foreach { jars => jars.filter(isLocalUri).foreach { jar => val uri = new URI(jar) addClasspathEntry(getClusterPath(sparkConf, uri.getPath()), env) } } } populateHadoopClasspath(conf, env) sys.env.get(ENV_DIST_CLASSPATH).foreach { cp => addClasspathEntry(getClusterPath(sparkConf, cp), env) } // Add the localized Hadoop config at the end of the classpath, in case it contains other // files (such as configuration files for different services) that are not part of the // YARN cluster's config. addClasspathEntry( buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, LOCALIZED_HADOOP_CONF_DIR), env) } /** * Returns a list of URIs representing the user classpath. * * @param conf Spark configuration. */ def getUserClasspath(conf: SparkConf): Array[URI] = { val mainUri = getMainJarUri(conf.get(APP_JAR)) val secondaryUris = getSecondaryJarUris(conf.get(SECONDARY_JARS)) (mainUri ++ secondaryUris).toArray } private def getMainJarUri(mainJar: Option[String]): Option[URI] = { mainJar.flatMap { path => val uri = Utils.resolveURI(path) if (uri.getScheme == LOCAL_SCHEME) Some(uri) else None }.orElse(Some(new URI(APP_JAR_NAME))) } private def getSecondaryJarUris(secondaryJars: Option[Seq[String]]): Seq[URI] = { secondaryJars.getOrElse(Nil).map(new URI(_)) } /** * Adds the given path to the classpath, handling "local:" URIs correctly. * * If an alternate name for the file is given, and it's not a "local:" file, the alternate * name will be added to the classpath (relative to the job's work directory). * * If not a "local:" file and no alternate name, the linkName will be added to the classpath. * * @param conf Spark configuration. * @param hadoopConf Hadoop configuration. * @param uri URI to add to classpath (optional). * @param fileName Alternate name for the file (optional). * @param env Map holding the environment variables. */ private def addFileToClasspath( conf: SparkConf, hadoopConf: Configuration, uri: URI, fileName: String, env: HashMap[String, String]): Unit = { if (uri != null && uri.getScheme == LOCAL_SCHEME) { addClasspathEntry(getClusterPath(conf, uri.getPath), env) } else if (fileName != null) { addClasspathEntry(buildPath(Environment.PWD.$$(), fileName), env) } else if (uri != null) { val localPath = getQualifiedLocalPath(uri, hadoopConf) val linkName = Option(uri.getFragment()).getOrElse(localPath.getName()) addClasspathEntry(buildPath(Environment.PWD.$$(), linkName), env) } } /** * Add the given path to the classpath entry of the given environment map. * If the classpath is already set, this appends the new path to the existing classpath. */ private def addClasspathEntry(path: String, env: HashMap[String, String]): Unit = YarnSparkHadoopUtil.addPathToEnvironment(env, Environment.CLASSPATH.name, path) /** * Returns the path to be sent to the NM for a path that is valid on the gateway. * * This method uses two configuration values: * * - spark.yarn.config.gatewayPath: a string that identifies a portion of the input path that may * only be valid in the gateway node. * - spark.yarn.config.replacementPath: a string with which to replace the gateway path. This may * contain, for example, env variable references, which will be expanded by the NMs when * starting containers. * * If either config is not available, the input path is returned. */ def getClusterPath(conf: SparkConf, path: String): String = { val localPath = conf.get(GATEWAY_ROOT_PATH) val clusterPath = conf.get(REPLACEMENT_ROOT_PATH) if (localPath != null && clusterPath != null) { path.replace(localPath, clusterPath) } else { path } } /** * Return whether two URI represent file system are the same */ private[spark] def compareUri(srcUri: URI, dstUri: URI): Boolean = { if (srcUri.getScheme() == null || srcUri.getScheme() != dstUri.getScheme()) { return false } val srcAuthority = srcUri.getAuthority() val dstAuthority = dstUri.getAuthority() if (srcAuthority != null && !srcAuthority.equalsIgnoreCase(dstAuthority)) { return false } var srcHost = srcUri.getHost() var dstHost = dstUri.getHost() // In HA or when using viewfs, the host part of the URI may not actually be a host, but the // name of the HDFS namespace. Those names won't resolve, so avoid even trying if they // match. if (srcHost != null && dstHost != null && srcHost != dstHost) { try { srcHost = InetAddress.getByName(srcHost).getCanonicalHostName() dstHost = InetAddress.getByName(dstHost).getCanonicalHostName() } catch { case e: UnknownHostException => return false } } Objects.equal(srcHost, dstHost) && srcUri.getPort() == dstUri.getPort() } /** * Return whether the two file systems are the same. */ protected def compareFs(srcFs: FileSystem, destFs: FileSystem): Boolean = { val srcUri = srcFs.getUri() val dstUri = destFs.getUri() compareUri(srcUri, dstUri) } /** * Given a local URI, resolve it and return a qualified local path that corresponds to the URI. * This is used for preparing local resources to be included in the container launch context. */ private def getQualifiedLocalPath(localURI: URI, hadoopConf: Configuration): Path = { val qualifiedURI = if (localURI.getScheme == null) { // If not specified, assume this is in the local filesystem to keep the behavior // consistent with that of Hadoop new URI(FileSystem.getLocal(hadoopConf).makeQualified(new Path(localURI)).toString) } else { localURI } new Path(qualifiedURI) } /** * Whether to consider jars provided by the user to have precedence over the Spark jars when * loading user classes. */ def isUserClassPathFirst(conf: SparkConf, isDriver: Boolean): Boolean = { if (isDriver) { conf.get(DRIVER_USER_CLASS_PATH_FIRST) } else { conf.get(EXECUTOR_USER_CLASS_PATH_FIRST) } } /** * Joins all the path components using Path.SEPARATOR. */ def buildPath(components: String*): String = { components.mkString(Path.SEPARATOR) } /** Returns whether the URI is a "local:" URI. */ def isLocalUri(uri: String): Boolean = { uri.startsWith(s"$LOCAL_SCHEME:") } def createAppReport(report: ApplicationReport): YarnAppReport = { val diags = report.getDiagnostics() val diagsOpt = if (diags != null && diags.nonEmpty) Some(diags) else None YarnAppReport(report.getYarnApplicationState(), report.getFinalApplicationStatus(), diagsOpt) } } private[spark] class YarnClusterApplication extends SparkApplication { override def start(args: Array[String], conf: SparkConf): Unit = { // SparkSubmit would use yarn cache to distribute files & jars in yarn mode, // so remove them from sparkConf here for yarn mode. conf.remove("spark.jars") conf.remove("spark.files") new Client(new ClientArguments(args), conf).run() } } private[spark] case class YarnAppReport( appState: YarnApplicationState, finalState: FinalApplicationStatus, diagnostics: Option[String])
{ "content_hash": "7af5adb3aba4f946b92a0528828aa95a", "timestamp": "", "source": "github", "line_count": 1491, "max_line_length": 100, "avg_line_length": 41.201877934272304, "alnum_prop": 0.6802969136606329, "repo_name": "lxsmnv/spark", "id": "7225ff03dc34e4a27402af11a3d5a124ed686ff8", "size": "62232", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "33839" }, { "name": "Batchfile", "bytes": "30285" }, { "name": "C", "bytes": "1493" }, { "name": "CSS", "bytes": "23956" }, { "name": "HTML", "bytes": "10056" }, { "name": "Java", "bytes": "3287204" }, { "name": "JavaScript", "bytes": "144886" }, { "name": "Makefile", "bytes": "7774" }, { "name": "PLpgSQL", "bytes": "163236" }, { "name": "PowerShell", "bytes": "3756" }, { "name": "Python", "bytes": "2559106" }, { "name": "R", "bytes": "1104088" }, { "name": "Roff", "bytes": "15276" }, { "name": "SQLPL", "bytes": "29964" }, { "name": "Scala", "bytes": "25268053" }, { "name": "Shell", "bytes": "166580" }, { "name": "Thrift", "bytes": "33605" } ], "symlink_target": "" }
import { PreparedCommits } from '../interfaces/prepared-commits.interface'; /** * Prepared commits */ export const preparedCommits: PreparedCommits = { major: [ { fileName: 'textarea.js', fileContent: '// Textarea', commit: { type: 'refactor', scope: 'textarea', message: 'Rename showCounter attribute to counter', body: 'BREAKING CHANGE: The textarea showCounter attribute is now called counter' } } ], minor: [ { fileName: 'input.js', fileContent: '// Input', commit: { type: 'feat', scope: 'input', message: 'Add input component' } }, { fileName: 'checkbox.js', fileContent: '// Chckbox', commit: { type: 'feat', scope: 'checkbox', message: 'Add checkbox & checkbox group component' } } ], none: [ { fileName: 'CONTRIBUTORS.md', fileContent: '# Contributors', commit: { message: 'Add contributors file' } } ], patch: [ { fileName: 'select.js', fileContent: '// Select', commit: { type: 'perf', scope: 'select', message: 'Improve rendering performance for select options loop' } }, { fileName: 'button.js', fileContent: '// Button', commit: { type: 'fix', scope: 'button', message: 'Add button type to prevent possible rendering issues' } } ] };
{ "content_hash": "02ac59fc1b5ab84b278e146bea8e6136", "timestamp": "", "source": "github", "line_count": 70, "max_line_length": 97, "avg_line_length": 26.3, "alnum_prop": 0.4296577946768061, "repo_name": "dominique-mueller/automatic-release", "id": "dc3eeafb65871b4966e8b9cf8c96a279b32d85c3", "size": "1841", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "test/data/prepared-commits.ts", "mode": "33188", "license": "mit", "language": [ { "name": "Handlebars", "bytes": "1962" }, { "name": "JavaScript", "bytes": "218" }, { "name": "TypeScript", "bytes": "67598" } ], "symlink_target": "" }
package radial import ( "math" ) // DegToRad converts degrees to radians. func DegToRad(deg float64) float64 { return (deg * math.Pi) / 180 } // RadToDeg converts radians to degrees. func RadToDeg(rad float64) float64 { return (rad * 180) / math.Pi }
{ "content_hash": "36d442a8731bbae651dcc7bd6c38e30a", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 40, "avg_line_length": 17.133333333333333, "alnum_prop": 0.7042801556420234, "repo_name": "Masterminds/convert", "id": "f7660be6284ee5584ba55e3de47b2f459c8e89a7", "size": "322", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "radial/radial.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "4710" } ], "symlink_target": "" }
Monster Battle ========================== ## Install ### Prerequisites - Node.js - Download and Install Node.js with [NVM](https://github.com/creationix/nvm) (Node Version Manager) - Simple bash script to manage multiple active node.js versions. ``` brew install mongodb npm install -g grunt ``` **NOTE:** After installing Node.js and MongoDB server has running, then its time to running your server. ``` $ git clone git@github.com:zhangchiqing/MonBattle.git $ cd MonBattle $ npm install $ mongodb $ grunt ``` Then visit [http://localhost:3001/](http://localhost:3001/) ### Directory structure ``` -app/ |__config/ |__controllers/ |__helper |__models/ |__mailer/ |__views/ |__routes -public/ |__css (all files will generate from Grunt) |__js |__less |__fonts |__img favicon.ico -Grunfile.coffee ```
{ "content_hash": "421b21646defab6fde11dfd36de0e142", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 174, "avg_line_length": 17.625, "alnum_prop": 0.6442080378250591, "repo_name": "zhangchiqing/MonBattle", "id": "be72059130e8040f0f4ad12b9fa8dca158d9ea79", "size": "846", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "341637" }, { "name": "CoffeeScript", "bytes": "4446" }, { "name": "JavaScript", "bytes": "664488" } ], "symlink_target": "" }
categories: - tumblr - archive - quote date: 2008-01-14T18:34:13Z title: Quote 20080114 url: /2008/01/14/quote-20080114/ --- <blockquote><p>The truth is that most artists are mediocre. Most art is ephemeral. The good eye, the true patron, recognises and supports the best. For my money, the best artist alive is Cy Twombly</p><footer><cite><a href="http://arts.guardian.co.uk/art/visualart/story/0,,2236945,00.html?gusrc=rss&feed=40">Jonathan Jones on how Rome woke up to modern art</a></cite></footer></blockquote>
{ "content_hash": "667b2d3cd5a481f09d7d711ca7a7c183", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 390, "avg_line_length": 51.7, "alnum_prop": 0.746615087040619, "repo_name": "stephenscott/sscott2", "id": "4a5befbb2e2d1acb6ad61117792ce74bfe1ccdc5", "size": "521", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/content/archive/2008-01-14-quote-20080114.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "123" }, { "name": "JavaScript", "bytes": "3187" } ], "symlink_target": "" }
import podcastparser import urllib import shutil import os # ToDo # - compartementalise more, seperate podcast parsing from file generation # - collect all podcasts in one variable and derive pls and m3u file def get_episodes(episodes): content = [] for e in episodes: title = e['title'] url = e['enclosures'][0]['url'] content.append({'title': title, 'url': url}) return content def pls_generator(name, content): nr_entries = len(content) version = 2 filename = ''.join([name, '.pls']) pls_file = [] # add header pls_file.append('[playlist] \n') for nr, entry in enumerate(content, start=1): url = entry['url'] title = entry['title'] t_str = u'Title{:d}={:s}\n'.format(nr, title) f_str = u'File{:d}={:s}\n'.format(nr, url) l_str = u'Length{:d}=-1\n'.format(nr) pls_file.append('\n') pls_file.append(f_str) pls_file.append(t_str) pls_file.append(l_str) # add footer pls_file.append('\n') pls_file.append(u'NumberOfEntries={:d}\n'.format(nr_entries)) pls_file.append(u'Version={:d}'.format(version)) pls_file = [x.encode('utf-8') for x in pls_file] with open(filename, 'w') as f: f.writelines(pls_file) return filename def make_playlist(feedurl, get_newest=False): # fetch podcast feed podcast = podcastparser.parse(feedurl, urllib.urlopen(feedurl), max_episodes=10) # create a name name = podcast['title'].encode('utf-8') name = name.replace(' ', '_') name = ''.join(['Podcast_',name]) # extract episodes episodes = podcast['episodes'] content = get_episodes(episodes) # create playlist filename = pls_generator(name, content) if get_newest: return filename, content[0] else: return filename def main(): pls_path = '/var/lib/mpd/music/WEBRADIO' m3u_path = '/var/lib/mpd/playlists' #pls_path = 'WEBRADIO' #m3u_path = 'playlists' with open('podcasts.txt', 'r') as f: feeds = f.readlines() new_podcasts = [] for feedurl in feeds: feedurl = feedurl.strip('\n\r') try: pls_name, newest = make_playlist(feedurl, get_newest=True) new_podcasts.append(u''.join([newest['url'], '\n'])) # move pls-file to dest. folder dest = ''.join([pls_path, os.sep, pls_name]) shutil.move(pls_name, dest) except: pass m3u_name = 'new_podcasts.m3u' urls = [x.encode('utf-8') for x in new_podcasts] with open(m3u_name, 'w') as f: f.writelines(urls) dest = ''.join([m3u_path, os.sep, m3u_name]) shutil.move(m3u_name, dest) if __name__ == '__main__': main()
{ "content_hash": "79eaeaeb3cc359c5649e28dbb428e7f4", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 84, "avg_line_length": 25.114035087719298, "alnum_prop": 0.5633950401676563, "repo_name": "tcvj/pod2pls", "id": "f3e8ae53b94020a354e8118cbdc5d3628e856266", "size": "2863", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pod2pls.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "2863" } ], "symlink_target": "" }
/** * Created by artemr on 11/25/2016. */ package by.stqa.pft.addressbook.tests; import by.stqa.pft.addressbook.model.GroupData; import by.stqa.pft.addressbook.model.Groups; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.thoughtworks.xstream.XStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.*; import java.security.acl.Group; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; public class GroupCreationTests extends TestBase { @BeforeMethod public void ensurePreconditions() { app.goTo().groupPage(); } @DataProvider public Iterator<Object[]> validGroupsFromXml() throws IOException { try(BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/groups.xml")))) { String xml = ""; String line = reader.readLine(); while (line != null) { xml += line; line = reader.readLine(); } XStream xstream = new XStream(); xstream.processAnnotations(GroupData.class); List<GroupData> groups = (List<GroupData>) xstream.fromXML(xml); return groups.stream().map((g) -> new Object[]{g}).collect(Collectors.toList()).iterator(); } } @DataProvider public Iterator<Object[]> validGroupsFromJson() throws IOException { try(BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/groups.json")))) { String json = ""; String line = reader.readLine(); while (line != null) { json += line; line = reader.readLine(); } Gson gson = new Gson(); List<GroupData> groups = gson.fromJson(json, new TypeToken<List<GroupData>>() { }.getType()); return groups.stream().map((g) -> new Object[]{g}).collect(Collectors.toList()).iterator(); } } @Test(dataProvider = "validGroupsFromJson") public void testGroupCreation(GroupData group) { Groups before = app.db().groups(); app.group().create(group); assertThat(app.group().count(), equalTo(before.size() + 1)); Groups after = app.db().groups(); group.withId(after.stream().mapToInt((g) -> g.getId()).max().getAsInt()); Groups expected = before.withAdded(group); assertThat(after, equalTo(expected)); } @Test public void testBadGroupCreation() { Groups before = app.db().groups(); GroupData group = new GroupData().withName("MyGroup'"); app.group().create(group); assertThat(app.group().count(), equalTo(before.size())); Groups after = app.db().groups(); assertThat(after, equalTo(before)); } }
{ "content_hash": "fc857ca5e5a77d586f692d5fa986d936", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 113, "avg_line_length": 32.17777777777778, "alnum_prop": 0.6875, "repo_name": "artemrudenko/java_training16", "id": "2a7b6c5738a89a164cfca4f6248bf42ae8e8ab68", "size": "2896", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "addressbook-web-tests/src/test/java/by/stqa/pft/addressbook/tests/GroupCreationTests.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Gherkin", "bytes": "212" }, { "name": "Java", "bytes": "154879" }, { "name": "PHP", "bytes": "419" }, { "name": "RobotFramework", "bytes": "472" } ], "symlink_target": "" }
package android.support.v4.app; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.os.Bundle; // Referenced classes of package android.support.v4.app: // TaskStackBuilder static class implements { public PendingIntent getPendingIntent(Context context, Intent aintent[], int i, int j, Bundle bundle) { Intent intent = new Intent(aintent[-1 + aintent.length]); intent.addFlags(0x10000000); return PendingIntent.getActivity(context, i, intent, j); } () { } }
{ "content_hash": "98029aa9f85774adb841e27be3b0f33e", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 105, "avg_line_length": 23.48, "alnum_prop": 0.6882453151618398, "repo_name": "Half-Shot/FarnApp", "id": "8f9ad28dcef5836c3ed76d7ebd09cb2a7728f165", "size": "762", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/android/support/v4/app/TaskStackBuilder$TaskStackBuilderImplBase.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "1571825" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Configuration; using System.Data; using System.Linq; using System.Windows; namespace Teammanager.View { /// <summary> /// Interaktionslogik für "App.xaml" /// </summary> public partial class App : Application { } }
{ "content_hash": "8b94888ebb7d28c1b588edea86d34fe6", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 42, "avg_line_length": 18.875, "alnum_prop": 0.6920529801324503, "repo_name": "LennyLeonard/RW-Ligamodus", "id": "b78b7720e36a7657ff4736e875d089a4d61b744e", "size": "305", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "RW-Ligamodus/Teammanager.View/App.xaml.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "105024" } ], "symlink_target": "" }
package com.cundong.fragmenttabhost; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentTabHost; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.TextView; public class MainActivity extends FragmentActivity { private FragmentTabHost mTabHost; // 定义数组来存放Fragment界面 private Class<?> mFragments[] = { Fragment1.class, Fragment2.class, Fragment3.class }; // 定义数组来存放按钮图片 private int mTabDrawables[] = { R.drawable.tabspec_chats, R.drawable.tabspec_contacts, R.drawable.tabspec_me }; // Tab选项卡的文字 private String mTabTitles[] = { "Chats", "Contacts", "Me" }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mTabHost = (FragmentTabHost) findViewById(android.R.id.tabhost); mTabHost.setup(this, getSupportFragmentManager(), R.id.realtabcontent); mTabHost.getTabWidget().setDividerDrawable(null); for (int i = 0; i < mFragments.length; i++) { Bundle b = new Bundle(); b.putString("key", "Simple" + i); mTabHost.addTab(mTabHost.newTabSpec(String.valueOf(i)).setIndicator(getView(i)), mFragments[i], b); } mTabHost.setCurrentTab(1); } private View getView(int i) { View view = View.inflate(this, R.layout.tabspec_item, null); ImageView imageView = (ImageView) view.findViewById(R.id.image); TextView textView = (TextView) view.findViewById(R.id.text); imageView.setImageResource(mTabDrawables[i]); textView.setText(mTabTitles[i]); return view; } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } }
{ "content_hash": "2d73eafb8c2cbfe41b19194e6de1f3a5", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 112, "avg_line_length": 30.216216216216218, "alnum_prop": 0.7401610017889088, "repo_name": "cundong/FragmentTabHostDemo", "id": "d9db9cb7a488435eb0389ffabd566aed0c54d0ed", "size": "2288", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/cundong/fragmenttabhost/MainActivity.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "3797" } ], "symlink_target": "" }
\documentclass{beamer} \usepackage{color,amsmath} \usepackage{subfigure} \usepackage{booktabs} \usepackage{framed} \usepackage{comment} \def\vf{\vfill} %%%%%%%%%%%%%%%%%%%%%%%%%% \title[]{Ethics} \author[]{Matthew J. Salganik\\Department of Sociology\\Princeton University} \date[]{Summer Institute in Computational Social Science\\June 19, 2017 \vfill \begin{flushright} \vspace{0.6in} \includegraphics[width=0.1\textwidth]{figures/cc-by.png} \end{flushright} } \begin{document} %%%%%%%%%%%%%%%%%%%%%%%%%% \frame{\titlepage} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{center} \LARGE{Why care about ethics?} \end{center} \pause \begin{itemize} \item fear-based reasons \pause \item hope-based reasons \pause \item we have no choice \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} In the past, what we \textbf{could} do has been the limitation, increasingly what we \textbf{should} do will be the limitation.\\ Research ethics will become increasingly central; it will become harder and harder to avoid.\\ \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} I want you to be able to: \begin{itemize} \item design ethically thoughtful research \item explain your decisions to others \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} A note on lectures and readings:\\ For today, I'm going to repeat some of what is in my book, but going forward, I will assume that you've read the chapter. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Three approaches: \begin{itemize} \item Rules-based approach \pause \item Ad hoc approach \pause \item Principles-based approach \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Examples: \begin{itemize} \item Emotional contagion \pause \item Tastes, Ties, and Time \pause \item Encore \pause \item Think-pair-share other examples? \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} What's the problem? \begin{itemize} \item increasing power \pause \item inconsistent and overlapping rules, norms, and expectations \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{center} \includegraphics[width=0.9\textwidth]{figures/ethics_schematic_simple.png} \end{center} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{itemize} \item Respect for persons \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Respect for persons:\\ Participants decide not you \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{itemize} \item Respect for persons \item Beneficence \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Beneficence:\\ Minimize risk, maximize benefits, then decide \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{itemize} \item Respect for persons \item Beneficence \item Justice \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Justice:\\ distribution of burdens and benefits of research \pause \begin{itemize} \item poorly education and disenfranchised citizens \item prisoners \item institutionalized and mentally disabled children \item old and debilitated hospital patients \end{itemize} \pause Also includes access to benefits of research \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{itemize} \item Respect for persons \item Beneficence \item Justice \item Respect for Law and Public Interest \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Respect for Law and Public Interest:\\ \begin{itemize} \item compliance \item transparency-based accountability \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Terms-of-service agreements \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{center} \includegraphics[width=0.9\textwidth]{figures/soeller_mapwatch_2016_title.png} \end{center} \vf \url{http://dx.doi.org/10.1145/2872427.2883016} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Abstract:\\ ``Maps have long played a crucial role in enabling people to conceptualize and navigate the world around them. However, maps also encode the world-views of their creators. Disputed international borders are one example of this: governments may mandate that cartographers produce maps that conform to their view of a territorial dispute. Today, online maps maintained by private corporations have become the norm. However, these new maps are still subject to old debates. Companies like Google and Bing resolve these disputes by localizing their maps to meet government requirements and user preferences, i.e., users in different locations are shown maps with different international boundaries. We argue that this non-transparent personalization of maps may exacerbate nationalistic disputes by promoting divergent views of geopolitical realities.'' \end{frame} %%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Abstract, part 2:\\ ``To address this problem, we present MapWatch, our system for detecting and cataloging personalization of international borders in online maps. Our system continuously crawls all map tiles from Google and Bing maps, and leverages crowdworkers to identify border personalization. In this paper, we present the architecture of MapWatch, and analyze the instances of border personalization on Google and Bing, including one border change that MapWatch identified live, as Google was rolling out the update.'' \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{center} \includegraphics[width=\textwidth]{figures/soeller_mapwatch_2016_fig5.png} \end{center} \vf \url{http://dx.doi.org/10.1145/2872427.2883016} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{center} \includegraphics[width=\textwidth]{figures/soeller_mapwatch_2016_ethics.png} \end{center} \vf \url{http://dx.doi.org/10.1145/2872427.2883016} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Researchers (with the support of the ACLU) have filed a case challenging the CFAA, Sandivg v Lynch:\\ \tiny{\textcolor{blue}{\url{https://www.aclu.org/cases/sandvig-v-lynch-challenge-cfaa-prohibition-uncovering-racial-discrimination-online}}} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{itemize} \item Respect for persons \item Beneficence \item Justice \item Respect for Law and Public Interest \end{itemize} \vf How do you balance these four principles? \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{itemize} \item Consequentialism \item Deontology \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} {Quick question} In arguing against the Emotional Contagion experiment (Kleinsman and Buckley, 2015) wrote: \begin{quote} ``Even if it is true that the risks for the Facebook experiment were low and even if, in hindsight, the results are judged to be useful, there is an important principle at stake here that must be upheld. In the same way that stealing is stealing no matter what amounts are involved, so we all have a right not to be experimented on without our knowledge and consent, whatever the nature of the research.'' \end{quote} This argument is rooted in which ethical framework? \begin{enumerate} \item Consequentialism \item Deontology \end{enumerate} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \begin{center} \includegraphics[width=0.9\textwidth]{figures/ethics_schematic_simple.png} \end{center} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} Applying these ideas can be tricky, and there are 4 areas of particular difficulty \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%% \end{document}
{ "content_hash": "a01ad6fe7f600608938e26fc1c3d8aeb", "timestamp": "", "source": "github", "line_count": 298, "max_line_length": 849, "avg_line_length": 25.177852348993287, "alnum_prop": 0.6914567506330801, "repo_name": "compsocialscience/summer-institute", "id": "91d4bf9e9b8352d1344c99086c0390d7ba1176ee", "size": "7503", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "2017/materials/day1-intro-ethics/03-ethics.tex", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "1788" }, { "name": "HTML", "bytes": "726694112" }, { "name": "JavaScript", "bytes": "1674327" }, { "name": "Jupyter Notebook", "bytes": "644511" }, { "name": "Python", "bytes": "4992" }, { "name": "R", "bytes": "53862" }, { "name": "Rich Text Format", "bytes": "25085" }, { "name": "Ruby", "bytes": "1670" }, { "name": "SCSS", "bytes": "2867" }, { "name": "TeX", "bytes": "3849734" } ], "symlink_target": "" }
package com.alorma.github.ui.actions; import rx.Subscriber; public abstract class Action<T> extends Subscriber<T> { private ActionCallback<T> callback; public abstract Action<T> execute(); public ActionCallback<T> getCallback() { return callback; } public Action<T> setCallback(ActionCallback<T> callback) { this.callback = callback; return this; } @Override public void onCompleted() { } @Override public void onError(Throwable e) { } @Override public void onNext(T t) { } }
{ "content_hash": "a3f4cc4e32a369413014043c46f86a56", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 60, "avg_line_length": 16.03030303030303, "alnum_prop": 0.6880907372400756, "repo_name": "gitskarios/Gitskarios", "id": "f93d94d1f482727a52377fcc7516d7888ce8aa3d", "size": "529", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "app/src/main/java/com/alorma/github/ui/actions/Action.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "503" }, { "name": "HTML", "bytes": "1319" }, { "name": "Java", "bytes": "1722020" }, { "name": "Shell", "bytes": "1451" } ], "symlink_target": "" }
package org.apereo.cas.services; import org.apereo.cas.config.CasCoreNotificationsConfiguration; import org.apereo.cas.config.CasCoreServicesConfiguration; import org.apereo.cas.config.CasCoreUtilConfiguration; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.notifications.CommunicationsManager; import org.apereo.cas.notifications.sms.SmsSender; import org.apereo.cas.sms.MockSmsSender; import org.apereo.cas.support.events.service.CasRegisteredServiceExpiredEvent; import org.apereo.cas.support.events.service.CasRegisteredServicesRefreshEvent; import org.apereo.cas.util.junit.EnabledIfPortOpen; import lombok.val; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.function.Executable; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.mail.MailSenderAutoConfiguration; import org.springframework.boot.autoconfigure.mail.MailSenderValidatorAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration; import org.springframework.cloud.context.environment.EnvironmentChangeEvent; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Lazy; import java.util.Set; import static org.junit.jupiter.api.Assertions.*; /** * This is {@link RegisteredServicesEventListenerTests}. * * @author Misagh Moayyed * @since 6.1.0 */ @SpringBootTest(classes = { RefreshAutoConfiguration.class, RegisteredServicesEventListenerTests.RegisteredServicesEventListenerTestConfiguration.class, CasCoreServicesConfiguration.class, CasCoreNotificationsConfiguration.class, CasCoreUtilConfiguration.class, MailSenderAutoConfiguration.class, MailSenderValidatorAutoConfiguration.class }, properties = { "spring.mail.host=localhost", "spring.mail.port=25000", "cas.service-registry.sms.text=Service %s has expired in CAS service registry", "cas.service-registry.sms.from=3477563421", "cas.service-registry.mail.from=admin@example.org", "cas.service-registry.mail.subject=Sample Subject", "cas.service-registry.mail.text=Service %s has expired in CAS service registry" }) @Tag("Mail") @EnabledIfPortOpen(port = 25000) @EnableConfigurationProperties(CasConfigurationProperties.class) public class RegisteredServicesEventListenerTests { @Autowired @Qualifier("servicesManager") private ServicesManager servicesManager; @Autowired @Qualifier("communicationsManager") private CommunicationsManager communicationsManager; @Autowired private CasConfigurationProperties casProperties; @Test public void verifyServiceExpirationEventNoContact() { val registeredService = RegisteredServiceTestUtils.getRegisteredService(); assertDoesNotThrow(new Executable() { @Override public void execute() throws Throwable { val listener = new RegisteredServicesEventListener(servicesManager, casProperties, communicationsManager); val event = new CasRegisteredServiceExpiredEvent(this, registeredService, false); listener.handleRegisteredServiceExpiredEvent(event); } }); } @Test public void verifyServiceExpirationEventWithContact() { val registeredService = RegisteredServiceTestUtils.getRegisteredService(); val contact = new DefaultRegisteredServiceContact(); contact.setName("Test"); contact.setEmail("casuser@example.org"); contact.setPhone("13477465421"); registeredService.getContacts().add(contact); val listener = new RegisteredServicesEventListener(this.servicesManager, casProperties, communicationsManager); val event = new CasRegisteredServiceExpiredEvent(this, registeredService, false); assertDoesNotThrow(new Executable() { @Override public void execute() throws Throwable { listener.handleRegisteredServiceExpiredEvent(event); } }); } @Test public void verifyServiceExpirationWithRemovalEvent() { val registeredService = RegisteredServiceTestUtils.getRegisteredService(); val contact = new DefaultRegisteredServiceContact(); contact.setName("Test"); contact.setEmail("casuser@example.org"); contact.setPhone("13477465421"); registeredService.getContacts().add(contact); val listener = new RegisteredServicesEventListener(this.servicesManager, casProperties, communicationsManager); listener.handleRefreshEvent(new CasRegisteredServicesRefreshEvent(this)); listener.handleEnvironmentChangeEvent(new EnvironmentChangeEvent(Set.of())); val event = new CasRegisteredServiceExpiredEvent(this, registeredService, true); listener.handleRegisteredServiceExpiredEvent(event); } @TestConfiguration("RegisteredServicesEventListenerTestConfiguration") @Lazy(false) public static class RegisteredServicesEventListenerTestConfiguration { @ConditionalOnMissingBean(name = "smsSender") @Bean public SmsSender smsSender() { return new MockSmsSender(); } } }
{ "content_hash": "528c0d6cb6c380d074accca179c7544f", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 122, "avg_line_length": 43.48062015503876, "alnum_prop": 0.7692993403458727, "repo_name": "pdrados/cas", "id": "394cd987002c547ca9e9a4c8616b91950625201e", "size": "5609", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/cas-server-core-services/src/test/java/org/apereo/cas/services/RegisteredServicesEventListenerTests.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "13992" }, { "name": "Dockerfile", "bytes": "75" }, { "name": "Groovy", "bytes": "31399" }, { "name": "HTML", "bytes": "195237" }, { "name": "Java", "bytes": "12509257" }, { "name": "JavaScript", "bytes": "85879" }, { "name": "Python", "bytes": "26699" }, { "name": "Ruby", "bytes": "1323" }, { "name": "Shell", "bytes": "177491" } ], "symlink_target": "" }
package com.example.coolweather1.util; public interface HttpCallbackListener { void onFinish(String response); void onError(Exception e); }
{ "content_hash": "4a691843ca34fcc9f4c6b97c2bdec756", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 39, "avg_line_length": 16.22222222222222, "alnum_prop": 0.7876712328767124, "repo_name": "xzzc/CoolWeather", "id": "299b64f5e282a3250b525c7bb185a9eec94c422b", "size": "146", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/example/coolweather1/util/HttpCallbackListener.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "27703" } ], "symlink_target": "" }
CommandTickets ============== CommandTicket AKA CTickets
{ "content_hash": "28e60cfdac0ce725db9f9ee236ff461e", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 26, "avg_line_length": 14.5, "alnum_prop": 0.6551724137931034, "repo_name": "WaltsuHosting/CommandTickets", "id": "b3af36772705c8eb859254fc5aeeabc313367e2a", "size": "58", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<resources> <string name="app_name">SimpleTodo</string> <string name="action_settings">Settings</string> <string name="title_activity_edit_item">EditItemActivity</string> </resources>
{ "content_hash": "4929bd17b2e30d9154f6e4a2640be2ac", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 69, "avg_line_length": 39.2, "alnum_prop": 0.7244897959183674, "repo_name": "koulmomo/SimpleTodo", "id": "00d1d3997739879f1ae0667ae8ba0e37b6bf1161", "size": "196", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/values/strings.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "7217" } ], "symlink_target": "" }
package at.ufo.app.domain.entities; import android.os.Parcel; import android.os.Parcelable; /** * Created by Marius-Constantin on 1/15/2016. */ public class Artist implements Parcelable { private int artistId; private String name; private String category; private String categoryColor; private String country; private String countryCode; private String picture; private String promoVideo; public int getArtistId() { return artistId; } public void setArtistId(int artistId) { this.artistId = artistId; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public String getCountryCode() { return countryCode; } public void setCountryCode(String countryCode) { this.countryCode = countryCode; } public String getCountry() { return country; } public void setCountry(String country) { this.country = country; } public String getPicture() { return picture; } public void setPicture(String picture) { this.picture = picture; } public String getPromoVideo() { return promoVideo; } public void setPromoVideo(String promoVideo) { this.promoVideo = promoVideo; } public String getCategoryColor() { return categoryColor; } public void setCategoryColor(String categoryColor) { this.categoryColor = categoryColor; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(artistId); dest.writeString(name); dest.writeString(category); dest.writeString(categoryColor); dest.writeString(country); dest.writeString(countryCode); dest.writeString(picture); dest.writeString(promoVideo); } public static final Parcelable.Creator<Artist> CREATOR = new Parcelable.Creator<Artist>() { @Override public Artist createFromParcel(Parcel source) { return new Artist(source); } @Override public Artist[] newArray(int size) { return new Artist[size]; } }; private Artist(Parcel in) { artistId = in.readInt(); name = in.readString(); category = in.readString(); categoryColor = in.readString(); country = in.readString(); countryCode = in.readString(); picture = in.readString(); promoVideo = in.readString(); } public Artist() { } }
{ "content_hash": "c90c26d0a0ce614ea5a0180ff42f5a5c", "timestamp": "", "source": "github", "line_count": 126, "max_line_length": 95, "avg_line_length": 22.333333333333332, "alnum_prop": 0.6158493248045487, "repo_name": "Xpitfire/ufo", "id": "b3ec160332d1a47df1dc9dececc7650de9faffc5", "size": "2814", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "UFO.App/app/src/main/java/at/ufo/app/domain/entities/Artist.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "1052254" }, { "name": "C#", "bytes": "726573" }, { "name": "CSS", "bytes": "4452" }, { "name": "HTML", "bytes": "33904" }, { "name": "Java", "bytes": "296747" }, { "name": "JavaScript", "bytes": "986" }, { "name": "PLpgSQL", "bytes": "9088" } ], "symlink_target": "" }
package org.eclipse.draw2d.text; import java.util.List; /** * The layout manager for {@link InlineFlow} figures. * * <P> * WARNING: This class is not intended to be subclassed by clients. * * @author hudsonr * @since 2.1 */ @SuppressWarnings({"rawtypes", "unchecked"}) public class InlineFlowLayout extends FlowContainerLayout { /** * Creates a new InlineFlowLayout with the given FlowFigure. * * @param flow * The FlowFigure */ public InlineFlowLayout(FlowFigure flow) { super(flow); } /** * Adds the given box as a line below the current line. * * @param box * the box to add */ @Override public void addLine(CompositeBox box) { endLine(); getContext().addLine(box); } /** * @see FlowContainerLayout#createNewLine() */ @Override protected void createNewLine() { currentLine = new NestedLine((InlineFlow) getFlowFigure()); setupLine(currentLine); } /** * @see FlowContext#endLine() */ @Override public void endLine() { flush(); getContext().endLine(); } /** * @see FlowContainerLayout#flush() */ @Override protected void flush() { if (currentLine != null && currentLine.isOccupied()) { // We want to preserve the state when a linebox is being added boolean sameLine = getContext().getContinueOnSameLine(); getContext().addToCurrentLine(currentLine); ((InlineFlow) getFlowFigure()).getFragments().add(currentLine); currentLine = null; getContext().setContinueOnSameLine(sameLine); } } /** * InlineFlowLayout gets this information from its context. * * @see FlowContext#getContinueOnSameLine() */ @Override public boolean getContinueOnSameLine() { return getContext().getContinueOnSameLine(); } /** * @see FlowContext#getWidthLookahead(FlowFigure, int[]) */ @Override public void getWidthLookahead(FlowFigure child, int result[]) { List children = getFlowFigure().getChildren(); int index = -1; if (child != null) index = children.indexOf(child); for (int i = index + 1; i < children.size(); i++) if (((FlowFigure) children.get(i)) .addLeadingWordRequirements(result)) return; getContext().getWidthLookahead(getFlowFigure(), result); } /** * @see FlowContainerLayout#isCurrentLineOccupied() */ @Override public boolean isCurrentLineOccupied() { return (currentLine != null && !currentLine.getFragments().isEmpty()) || getContext().isCurrentLineOccupied(); } /** * Clears out all fragments prior to the call to layoutChildren(). */ @Override public void preLayout() { ((InlineFlow) getFlowFigure()).getFragments().clear(); } /** * InlineFlow passes this information to its context. * * @see FlowContext#setContinueOnSameLine(boolean) */ @Override public void setContinueOnSameLine(boolean value) { getContext().setContinueOnSameLine(value); } /** * Initializes the given LineBox. Called by createNewLine(). * * @param line * The LineBox to initialize. */ protected void setupLine(LineBox line) { line.setX(0); line.setRecommendedWidth(getContext().getRemainingLineWidth()); } }
{ "content_hash": "23b408073d11070c9caf620766bc700e", "timestamp": "", "source": "github", "line_count": 139, "max_line_length": 77, "avg_line_length": 25.841726618705035, "alnum_prop": 0.592706013363029, "repo_name": "archimatetool/archi", "id": "5aa5f1b49996e528b0692e7b875056361540aa43", "size": "4132", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "org.eclipse.draw2d/src/org/eclipse/draw2d/text/InlineFlowLayout.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "112334" }, { "name": "HTML", "bytes": "625101" }, { "name": "Java", "bytes": "11305214" }, { "name": "JavaScript", "bytes": "222743" } ], "symlink_target": "" }
from setuptools import setup setup( name='pyaltmetric', version='0.2.0', packages=['pyaltmetric'], description='Python Altmetric API v1 wrapper', long_description=open('README.rst').read(), author='Will Earp', author_email='will.earp@icloud.com', url='https://github.com/wearp/pyaltmetric', install_requires=['requests'], license="BSD", zip_safe=True, keywords='Altmetric altmetric altmetrics api wrapper', test_suite='tests', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
{ "content_hash": "6cbdb566b5ba523e67f3d34123ff419d", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 70, "avg_line_length": 32.76923076923077, "alnum_prop": 0.6373239436619719, "repo_name": "wearp/pyAltmetric", "id": "9da5cf2d1e9b6582099897e6a138d32d189976fb", "size": "898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "8980" } ], "symlink_target": "" }
product: material-ui title: React Accordion component components: Accordion, AccordionActions, AccordionDetails, AccordionSummary githubLabel: 'component: accordion' materialDesign: https://m1.material.io/components/expansion-panels.html waiAria: https://www.w3.org/WAI/ARIA/apg/patterns/accordion/ --- # Accordion <p class="description">The accordion component allows the user to show and hide sections of related content on a page.</p> An accordion is a lightweight container that may either be used standalone, or be connected to a larger surface, such as a card. {{"component": "modules/components/ComponentLinkHeader.js"}} :::info **Note:** Accordions are no longer documented in the [Material Design guidelines](https://m2.material.io/), but MUI will continue to support them. It was formerly known as the "expansion panel". ::: ## Basic accordion {{"demo": "BasicAccordion.js", "bg": true}} ## Controlled accordion Extend the default behavior to create an accordion with the `Accordion` component. {{"demo": "ControlledAccordions.js", "bg": true}} ## Customization Here is an example of customizing the component. You can learn more about this in the [overrides documentation page](/material-ui/customization/how-to-customize/). {{"demo": "CustomizedAccordions.js"}} ## Performance The content of Accordions is mounted by default even if the accordion is not expanded. This default behavior has server-side rendering and SEO in mind. If you render expensive component trees inside your accordion details or simply render many accordions it might be a good idea to change this default behavior by enabling the `unmountOnExit` in `TransitionProps`: ```jsx <Accordion TransitionProps={{ unmountOnExit: true }} /> ``` As with any performance optimization this is not a silver bullet. Be sure to identify bottlenecks first and then try out these optimization strategies. ## Accessibility (WAI-ARIA: https://www.w3.org/WAI/ARIA/apg/patterns/accordion/) For optimal accessibility we recommend setting `id` and `aria-controls` on the `AccordionSummary`. The `Accordion` will derive the necessary `aria-labelledby` and `id` for the content region of the accordion.
{ "content_hash": "19d02f22437fd058083330d25832150c", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 184, "avg_line_length": 36.38333333333333, "alnum_prop": 0.7750801649106733, "repo_name": "mui-org/material-ui", "id": "f1e97ed84b63ec782531239e1081d67acb73d706", "size": "2187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/data/material/components/accordion/accordion.md", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2126" }, { "name": "JavaScript", "bytes": "4120512" }, { "name": "TypeScript", "bytes": "3263233" } ], "symlink_target": "" }
{% extends "_base.html" %} {% import "bootstrap/wtf.html" as wtf %} {% block content %} <div class=" body-content"> <div class="row"> <h1>Estimate CBOMs</h1> </div> </div> <form class="form" role="form" method="post" action="/estimate/" enctype="multipart/form-data"> {{ form.csrf_token }} {{ form.hidden_tag() }} {{ wtf.form_errors(form, hiddens="only") }} <div class="col-lg-4 col-sm-4"> {{ wtf.form_field(form.file) }} <button class="btn btn-success" type="submit">Submit</button> </div> </form> {% endblock %}
{ "content_hash": "95931ceb0736a5bb520b336adbd43dde", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 97, "avg_line_length": 20.571428571428573, "alnum_prop": 0.5746527777777778, "repo_name": "a-leut/costed_bill_of_materials", "id": "3b2ef8f978f86edc0921368c6cb7e5c309c1a0ff", "size": "576", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "project/templates/cbom/estimate.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "56" }, { "name": "HTML", "bytes": "11206" }, { "name": "JavaScript", "bytes": "20" }, { "name": "Mako", "bytes": "412" }, { "name": "Python", "bytes": "30580" } ], "symlink_target": "" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title>OR-Tools</title> <meta http-equiv="Content-Type" content="text/html;"/> <meta charset="utf-8"/> <!--<link rel='stylesheet' type='text/css' href="https://fonts.googleapis.com/css?family=Ubuntu:400,700,400italic"/>--> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> <link href="styleSheet.tmp.css" rel="stylesheet" type="text/css"/> </head> <body> <div id="banner-container"> <div id="banner"> <span id="sfml">Google OR-Tools 9.4</span> </div> </div> <div id="content" style="width: 100%; overflow: hidden;"> <div style="margin-left: 15px; margin-top: 5px; float: left; color: #145A32;"> <h2>C++ Reference</h2> <ul> <li><a href="../cpp_algorithms/annotated.html">Algorithms</a></li> <li><a href="../cpp_sat/annotated.html">CP-SAT</a></li> <li><a href="../cpp_graph/annotated.html">Graph</a></li> <li><a href="../cpp_routing/annotated.html">Routing</a></li> <li><a href="../cpp_linear/annotated.html">Linear solver</a></li> </ul> </div> <div id="content"> <div align="center"> <h1 style="color: #145A32;">C++ Reference: CP-SAT</h1> </div> <!-- Generated by Doxygen 1.9.4 --> <script type="text/javascript"> /* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */ var searchBox = new SearchBox("searchBox", "search",'Search','.html'); /* @license-end */ </script> <script type="text/javascript" src="menudata.js"></script> <script type="text/javascript" src="menu.js"></script> <script type="text/javascript"> /* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */ $(function() { initMenu('',true,false,'search.php','Search'); $(document).ready(function() { init_search(); }); }); /* @license-end */ </script> <div id="main-nav"></div> </div><!-- top --> <div id="side-nav" class="ui-resizable side-nav-resizable"> <div id="nav-tree"> <div id="nav-tree-contents"> <div id="nav-sync" class="sync"></div> </div> </div> <div id="splitbar" style="-moz-user-select:none;" class="ui-resizable-handle"> </div> </div> <script type="text/javascript"> /* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */ $(document).ready(function(){initNavTree('classoperations__research_1_1sat_1_1_automaton_constraint.html',''); initResizable(); }); /* @license-end */ </script> <div id="doc-content"> <!-- window showing the filter options --> <div id="MSearchSelectWindow" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" onkeydown="return searchBox.OnSearchSelectKey(event)"> </div> <!-- iframe showing the search results (closed by default) --> <div id="MSearchResultsWindow"> <iframe src="javascript:void(0)" frameborder="0" name="MSearchResults" id="MSearchResults"> </iframe> </div> <div class="header"> <div class="summary"> <a href="#pub-methods">Public Member Functions</a> &#124; <a href="#pro-attribs">Protected Attributes</a> &#124; <a href="classoperations__research_1_1sat_1_1_automaton_constraint-members.html">List of all members</a> </div> <div class="headertitle"><div class="title">AutomatonConstraint</div></div> </div><!--header--> <div class="contents"> <a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2> <div class="textblock"><p >Specialized automaton constraint. </p> <p >This constraint allows adding transitions to the automaton constraint incrementally. </p> <p class="definition">Definition at line <a class="el" href="cp__model_8h_source.html#l00662">662</a> of file <a class="el" href="cp__model_8h_source.html">cp_model.h</a>.</p> </div><table class="memberdecls"> <tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a> Public Member Functions</h2></td></tr> <tr class="memitem:ab9fc8ad6fb12dda4642bef1753985784"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_automaton_constraint.html#ab9fc8ad6fb12dda4642bef1753985784">AddTransition</a> (int tail, int head, int64_t transition_label)</td></tr> <tr class="memdesc:ab9fc8ad6fb12dda4642bef1753985784"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds a transitions to the automaton. <a href="classoperations__research_1_1sat_1_1_automaton_constraint.html#ab9fc8ad6fb12dda4642bef1753985784">More...</a><br /></td></tr> <tr class="separator:ab9fc8ad6fb12dda4642bef1753985784"><td class="memSeparator" colspan="2">&#160;</td></tr> <tr class="memitem:a0ef1ea52810f5cb078f58799520b833c"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html">Constraint</a>&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a0ef1ea52810f5cb078f58799520b833c">OnlyEnforceIf</a> (absl::Span&lt; const <a class="el" href="classoperations__research_1_1sat_1_1_bool_var.html">BoolVar</a> &gt; literals)</td></tr> <tr class="memdesc:a0ef1ea52810f5cb078f58799520b833c"><td class="mdescLeft">&#160;</td><td class="mdescRight">The constraint will be enforced iff all literals listed here are true. <a href="classoperations__research_1_1sat_1_1_constraint.html#a0ef1ea52810f5cb078f58799520b833c">More...</a><br /></td></tr> <tr class="separator:a0ef1ea52810f5cb078f58799520b833c"><td class="memSeparator" colspan="2">&#160;</td></tr> <tr class="memitem:ad2f0eb6bad7bac457d265320faeeb310"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html">Constraint</a>&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#ad2f0eb6bad7bac457d265320faeeb310">OnlyEnforceIf</a> (<a class="el" href="classoperations__research_1_1sat_1_1_bool_var.html">BoolVar</a> literal)</td></tr> <tr class="memdesc:ad2f0eb6bad7bac457d265320faeeb310"><td class="mdescLeft">&#160;</td><td class="mdescRight">See <a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a0ef1ea52810f5cb078f58799520b833c" title="The constraint will be enforced iff all literals listed here are true.">OnlyEnforceIf(absl::Span&lt;const BoolVar&gt; literals)</a>. <a href="classoperations__research_1_1sat_1_1_constraint.html#ad2f0eb6bad7bac457d265320faeeb310">More...</a><br /></td></tr> <tr class="separator:ad2f0eb6bad7bac457d265320faeeb310"><td class="memSeparator" colspan="2">&#160;</td></tr> <tr class="memitem:a18bb41d87c6d089385c392476adb6465"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html">Constraint</a>&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a18bb41d87c6d089385c392476adb6465">WithName</a> (const std::string &amp;name)</td></tr> <tr class="memdesc:a18bb41d87c6d089385c392476adb6465"><td class="mdescLeft">&#160;</td><td class="mdescRight">Sets the name of the constraint. <a href="classoperations__research_1_1sat_1_1_constraint.html#a18bb41d87c6d089385c392476adb6465">More...</a><br /></td></tr> <tr class="separator:a18bb41d87c6d089385c392476adb6465"><td class="memSeparator" colspan="2">&#160;</td></tr> <tr class="memitem:a8546382b04c2126bd39cc17d72d0b5a2"><td class="memItemLeft" align="right" valign="top">const std::string &amp;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a8546382b04c2126bd39cc17d72d0b5a2">Name</a> () const</td></tr> <tr class="memdesc:a8546382b04c2126bd39cc17d72d0b5a2"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the name of the constraint (or the empty string if not set). <a href="classoperations__research_1_1sat_1_1_constraint.html#a8546382b04c2126bd39cc17d72d0b5a2">More...</a><br /></td></tr> <tr class="separator:a8546382b04c2126bd39cc17d72d0b5a2"><td class="memSeparator" colspan="2">&#160;</td></tr> <tr class="memitem:afb2777b64e9107c27955860b33d03303"><td class="memItemLeft" align="right" valign="top">const ConstraintProto &amp;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#afb2777b64e9107c27955860b33d03303">Proto</a> () const</td></tr> <tr class="memdesc:afb2777b64e9107c27955860b33d03303"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the underlying protobuf object (useful for testing). <a href="classoperations__research_1_1sat_1_1_constraint.html#afb2777b64e9107c27955860b33d03303">More...</a><br /></td></tr> <tr class="separator:afb2777b64e9107c27955860b33d03303"><td class="memSeparator" colspan="2">&#160;</td></tr> <tr class="memitem:a5e82f974e671d3d579d12101717b810c"><td class="memItemLeft" align="right" valign="top">ConstraintProto *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a5e82f974e671d3d579d12101717b810c">MutableProto</a> () const</td></tr> <tr class="memdesc:a5e82f974e671d3d579d12101717b810c"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the mutable underlying protobuf object (useful for model edition). <a href="classoperations__research_1_1sat_1_1_constraint.html#a5e82f974e671d3d579d12101717b810c">More...</a><br /></td></tr> <tr class="separator:a5e82f974e671d3d579d12101717b810c"><td class="memSeparator" colspan="2">&#160;</td></tr> </table><table class="memberdecls"> <tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pro-attribs" name="pro-attribs"></a> Protected Attributes</h2></td></tr> <tr class="memitem:a9ee30a925ae7127a28ae72965c8654d8"><td class="memItemLeft" align="right" valign="top">ConstraintProto *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a9ee30a925ae7127a28ae72965c8654d8">proto_</a> = nullptr</td></tr> <tr class="separator:a9ee30a925ae7127a28ae72965c8654d8"><td class="memSeparator" colspan="2">&#160;</td></tr> </table> <h2 class="groupheader">Member Function Documentation</h2> <a id="ab9fc8ad6fb12dda4642bef1753985784" name="ab9fc8ad6fb12dda4642bef1753985784"></a> <h2 class="memtitle"><span class="permalink"><a href="#ab9fc8ad6fb12dda4642bef1753985784">&#9670;&nbsp;</a></span>AddTransition()</h2> <div class="memitem"> <div class="memproto"> <table class="memname"> <tr> <td class="memname">void AddTransition </td> <td>(</td> <td class="paramtype">int&#160;</td> <td class="paramname"><em>tail</em>, </td> </tr> <tr> <td class="paramkey"></td> <td></td> <td class="paramtype">int&#160;</td> <td class="paramname"><em>head</em>, </td> </tr> <tr> <td class="paramkey"></td> <td></td> <td class="paramtype">int64_t&#160;</td> <td class="paramname"><em>transition_label</em>&#160;</td> </tr> <tr> <td></td> <td>)</td> <td></td><td></td> </tr> </table> </div><div class="memdoc"> <p>Adds a transitions to the automaton. </p> </div> </div> <a id="a5e82f974e671d3d579d12101717b810c" name="a5e82f974e671d3d579d12101717b810c"></a> <h2 class="memtitle"><span class="permalink"><a href="#a5e82f974e671d3d579d12101717b810c">&#9670;&nbsp;</a></span>MutableProto()</h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname">ConstraintProto * MutableProto </td> <td>(</td> <td class="paramname"></td><td>)</td> <td> const</td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p>Returns the mutable underlying protobuf object (useful for model edition). </p> <p class="definition">Definition at line <a class="el" href="cp__model_8h_source.html#l00551">551</a> of file <a class="el" href="cp__model_8h_source.html">cp_model.h</a>.</p> </div> </div> <a id="a8546382b04c2126bd39cc17d72d0b5a2" name="a8546382b04c2126bd39cc17d72d0b5a2"></a> <h2 class="memtitle"><span class="permalink"><a href="#a8546382b04c2126bd39cc17d72d0b5a2">&#9670;&nbsp;</a></span>Name()</h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname">const std::string &amp; Name </td> <td>(</td> <td class="paramname"></td><td>)</td> <td> const</td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p>Returns the name of the constraint (or the empty string if not set). </p> </div> </div> <a id="a0ef1ea52810f5cb078f58799520b833c" name="a0ef1ea52810f5cb078f58799520b833c"></a> <h2 class="memtitle"><span class="permalink"><a href="#a0ef1ea52810f5cb078f58799520b833c">&#9670;&nbsp;</a></span>OnlyEnforceIf() <span class="overload">[1/2]</span></h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html">Constraint</a> OnlyEnforceIf </td> <td>(</td> <td class="paramtype">absl::Span&lt; const <a class="el" href="classoperations__research_1_1sat_1_1_bool_var.html">BoolVar</a> &gt;&#160;</td> <td class="paramname"><em>literals</em></td><td>)</td> <td></td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p>The constraint will be enforced iff all literals listed here are true. </p> <p >If this is empty, then the constraint will always be enforced. An enforced constraint must be satisfied, and an un-enforced one will simply be ignored.</p> <p >This is also called half-reification. To have an equivalence between a literal and a constraint (full reification), one must add both a constraint (controlled by a literal l) and its negation (controlled by the negation of l).</p> <p >[Important] currently, only a few constraints support enforcement:</p><ul> <li>bool_or, bool_and, linear: fully supported.</li> <li>interval: only support a single enforcement literal.</li> <li>other: no support (but can be added on a per-demand basis). </li> </ul> </div> </div> <a id="ad2f0eb6bad7bac457d265320faeeb310" name="ad2f0eb6bad7bac457d265320faeeb310"></a> <h2 class="memtitle"><span class="permalink"><a href="#ad2f0eb6bad7bac457d265320faeeb310">&#9670;&nbsp;</a></span>OnlyEnforceIf() <span class="overload">[2/2]</span></h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html">Constraint</a> OnlyEnforceIf </td> <td>(</td> <td class="paramtype"><a class="el" href="classoperations__research_1_1sat_1_1_bool_var.html">BoolVar</a>&#160;</td> <td class="paramname"><em>literal</em></td><td>)</td> <td></td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p>See <a class="el" href="classoperations__research_1_1sat_1_1_constraint.html#a0ef1ea52810f5cb078f58799520b833c" title="The constraint will be enforced iff all literals listed here are true.">OnlyEnforceIf(absl::Span&lt;const BoolVar&gt; literals)</a>. </p> </div> </div> <a id="afb2777b64e9107c27955860b33d03303" name="afb2777b64e9107c27955860b33d03303"></a> <h2 class="memtitle"><span class="permalink"><a href="#afb2777b64e9107c27955860b33d03303">&#9670;&nbsp;</a></span>Proto()</h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname">const ConstraintProto &amp; Proto </td> <td>(</td> <td class="paramname"></td><td>)</td> <td> const</td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p>Returns the underlying protobuf object (useful for testing). </p> <p class="definition">Definition at line <a class="el" href="cp__model_8h_source.html#l00548">548</a> of file <a class="el" href="cp__model_8h_source.html">cp_model.h</a>.</p> </div> </div> <a id="a18bb41d87c6d089385c392476adb6465" name="a18bb41d87c6d089385c392476adb6465"></a> <h2 class="memtitle"><span class="permalink"><a href="#a18bb41d87c6d089385c392476adb6465">&#9670;&nbsp;</a></span>WithName()</h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname"><a class="el" href="classoperations__research_1_1sat_1_1_constraint.html">Constraint</a> WithName </td> <td>(</td> <td class="paramtype">const std::string &amp;&#160;</td> <td class="paramname"><em>name</em></td><td>)</td> <td></td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p>Sets the name of the constraint. </p> </div> </div> <h2 class="groupheader">Member Data Documentation</h2> <a id="a9ee30a925ae7127a28ae72965c8654d8" name="a9ee30a925ae7127a28ae72965c8654d8"></a> <h2 class="memtitle"><span class="permalink"><a href="#a9ee30a925ae7127a28ae72965c8654d8">&#9670;&nbsp;</a></span>proto_</h2> <div class="memitem"> <div class="memproto"> <table class="mlabels"> <tr> <td class="mlabels-left"> <table class="memname"> <tr> <td class="memname">ConstraintProto* proto_ = nullptr</td> </tr> </table> </td> <td class="mlabels-right"> <span class="mlabels"><span class="mlabel">protected</span><span class="mlabel">inherited</span></span> </td> </tr> </table> </div><div class="memdoc"> <p class="definition">Definition at line <a class="el" href="cp__model_8h_source.html#l00558">558</a> of file <a class="el" href="cp__model_8h_source.html">cp_model.h</a>.</p> </div> </div> <hr/>The documentation for this class was generated from the following file:<ul> <li><a class="el" href="cp__model_8h_source.html">cp_model.h</a></li> </ul> </div><!-- contents --> </div><!-- doc-content --> </div> </div> <div id="footer-container"> <div id="footer"> </div> </div> </body> </html>
{ "content_hash": "507b77d002c77d177d8c1978a9e874d5", "timestamp": "", "source": "github", "line_count": 375, "max_line_length": 501, "avg_line_length": 52.936, "alnum_prop": 0.673568082212483, "repo_name": "or-tools/docs", "id": "052306ae674b19a0c93f30418ccbd06ec5ca40d6", "size": "19851", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "docs/cpp_sat/classoperations__research_1_1sat_1_1_automaton_constraint.html", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"> <html> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8"/> <title></title> <meta name="generator" content="LibreOffice 6.1.3.2 (Linux)"/> <meta name="author" content="Mariano Forti"/> <meta name="created" content="2018-11-22T13:46:00.662157305"/> <meta name="changedby" content="Mariano Forti"/> <meta name="changed" content="2018-11-22T22:41:26.802537762"/> </head> <body lang="en-US" link="#000080" vlink="#800000" dir="ltr"><p align="center" style="margin-top: 0.21cm; margin-bottom: 0.21cm; font-style: normal; line-height: 150%"> <p align="center" style="text-indent: 0cm; margin-top: 0.4cm; margin-bottom: 0.4cm; border: none; padding: 0.50cm; line-height: 150%; background: #800080; page-break-after: avoid"> <font size="6" style="font-size: 28pt"><b>Mariano Daniel Forti</b></font></p> <h1 class="western">Introduction</h1> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> I am currently a researcher at Comisión Nacional de Energía Atómica, Argentina. My activities are carried on at División Aleaciones Especiales (Special Alloys Division) where I started working as a specialist in DFT calculations in 2017. My research is about mechanical stability of interfacial systems, mainly in the Iron/Magnetite interface and more recently the Zr / ZrO2 system as well. I also participate actively in other activities carried on in the group, where we study point defects in ZrO2.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> Since my graduation as an Engineer I also work as a teaching assistant in an advanced course about the Finite Element Method for the Materials Engineering program at Instituto Sabato.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> In September 2017 I completed the requirements to obtain the Doctorado en Ciencia y Tecnología (Doctorate in Science and Technology, Ph.D. ) at Universidad de San Martín. My degree studies are in Materials Engineering. Since the Final Work to get The Engineering degree I have been working in DFT calculations based on VASP, applied to surface and interfacial systems.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> After obtaining my PhD in 2017, I started collaborating with the Special Alloy Foundry in Comisión de Energía Atómica in the preparations for the qualifications of the fabrication processes of alloys used in security components in Nuclear Power Plants. I also Assist the Foundry Staff in scientific basis for process optimization, quality assurance and technical decision taking. My responsibilities at DAE also include the maintenance of two private (though rather small) Linux computing clusters owned by DAE. Hence, during my professional career, I have acquired a wide experience in Linux System Administration for deskop and High Performance Computing Clusters. My programming skills include a variety of languages including Bash, Fortran, Python, php and javascript, and I also have wide experience in Matlab and Octave environments. I make a daily use of other tools as vim, OriginLab and gnuplot. In particular, I am an user and enthusiast of open source tools as the KDE desktop and the LibreOffice suite. </p> <h1 class="western">Profesional Experience</h1> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> February 2017 - Current. Comisión Nacional de Energía Atómica. Gerencia Materiales. División Aleaciones Especiales. Advisor: Dr. Paula Alonso (pralonso@cnea.gov.ar). Position: Researcher. Density Functional Theory calculations in interfacial systems: Fe / Fe3O4, Zr/ZrO2. Point Defects in ZrO2. Linux System Administration in Desktop and Cluster Computers for High Performance Calculations. Daily use of Bash, Fortran and Python. Scientific support for Special Alloy Foundry including quality assurance.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> August 2010 – Current. Teaching assistant at Instituto Sabato (UNSaM-CNEA), Ayudante de 1ra. “Computer Simulation of Processes and Materials”. Professor: Ruben Weht (ruweht@cnea.gov.ar). In this course students make their own implementation of several numerical methods for solving differential equations including Finite Differences and Finite Elements.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> August 2010 – January 2017. Comisión Nacional de Energía Atómica. Gerencia Materiales. División Aleaciones Especiales. Full Time Scolarship, “ Ab-Initio studies about adhesion in iron/magnetite interfaces”. Advisor: Dr. Paula Alonso (pralonso@cnea.gov.ar). Research and development activities. Linux System Administration in Desktop and Cluster Computers for High Performance Calculations. Daily use of Bash, Fortran and Python. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> August 2006- August 2010. Instituto de Tecnología Jorge Sabato (ITJS, UNSAM-CNEA). Materials Engineering. Full Time Scolarship. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> January 2010 – July 2010. Final Work to get the Materials Engineering Degree. Department of Chemical Engineering, Texas A&amp;M University, USA. “Ab-Initio Studies about Carburization of Fe3Al Based Alloys”. Advisor: Dr. Perla Balbuena (balbuena@tamu.edu) . </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> February 2008 . Internship at Gerencia Física, CNEA. “Synthesis and Mechanical Properties of Manganite / Polymer composites”. Reference: Griselda Polla (grispoll@cnea.gov.ar)</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> February 2007 . Internship at Condensed Matter Group, CNEA. “Eléctric and Magnetic Properties of Manganites/Polymer Composites”. Reference: Joaquín Sacanell (sacanell@cnea.gov.ar). </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> February 2017 – February 2020. PICT-2015-2267, “In the search for a new alloy for fuel elements of the CAREM-25 Power Plant” Comisión Nacional de Energía Atómica, Agencia Nacional de Promoción Científica y Tecnológica, Ministerio de Ciencia, Técnica e innovación Productiva.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> January 2017 – December 2019. Project 80020160500046SM, “In the search for a new alloy for fuel elements of the CAREM-25 Power Plant”. Instituto Sabato, Universidad de San Martin, Comisión Nacional de Energía Atómica, Ministerio de Ciencia, Técnica e Innovación Productiva.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> August 2011- August 2015. PICT-2011-1861, “Power Nuclear Reactor’s Materials integrity. Atomistic / Continuum models applied to inter-diffusion in disperse fuels and fracture in the oxide scales in steel pipes”. Ministerio de Ciencia, Técnica e Innovación Productiva. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> January 2011 – December 2012. Project UNSAM C063, “Pasivating oxide scale over iron”. Proyecto Universidad Nacional de San Martin. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> January 2011 – December 2011. “Constitutional Defects and Aluminum Migration Energy in UAl4”. Fundación Balseiro and Comisión Nacional de Energía Atómica. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> August 2010 – December 2015. “Prespective studies, research and development of technologies for nuclear power plants of the fourth generation” Comision Nacinoal de Energía Atómica.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> January 2010 - December 2012 . “Computational Methods applied to the study of physical and chemical properties of fuel elements in research reactors, and modelization of defect cluster migrations in technological materials” CONICET.</p> <h1 class="western">Participation in research projects</h1> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> February 2017 – February 2020.PICT-2015-2267, “In the search for a new alloy for fuel elements of the CAREM-25 Power Plant” Comisión Nacional de Energía Atómica, Agencia Nacional de Promoción Científica y Tecnológica, Ministerio de Ciencia, Técnica e innovación Productiva.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> January 2017 – December 2019.Project 80020160500046SM, “In the search for anewalloy for fuelelementsoftheCAREM-25PowerPlant”.InstitutoSabato, Universidad de San Martin, Comisión Nacional de Energía Atómica, Ministerio de Ciencia, Técnica e Innovación Productiva.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> August 2011- August 2015.PICT-2011-1861, “Power Nuclear Reactor’s Materials integrity. Atomistic / Continuum models applied to inter-diffusion in disperse fuels and fracture in the oxide scales in steel pipes”. Ministerio de Ciencia, Técnica e Innovación Productiva. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> January 2011 – December 2012.Project UNSAM C063, “Pasivating oxide scale over iron”. Proyecto Universidad Nacional de San Martin. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> January 2011 – December 2011. “Constitutional Defects and Aluminum Migration Energy in UAl4”. Fundación Balseiro andComisión Nacional de Energía Atómica. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> August 2010 – December 2015. “Prespective studies, research and development of technologies for nuclear power plants of the fourth generation” Comision Nacinoal de Energía Atómica.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> January 2010 - December 2012. “Computational Methods applied to the study of physical and chemical properties of fuel elements in research reactors, and modelization of defect cluster migrations in technologiacl materials” CONICET.</p> <h1 class="western">Publications</h1> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> “Shear Behavior of Fe/Fe3O 4 interfaces”. Revista Materia V23-N2 (2018). Mariano Forti, Paula Alonso, Pablo Gargano, Gerardo Rubiolo. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> “Properties of hexagonal Zr and tetragonal ZrO2 low index surfaces from DFT calculations”. Revista Materia V23-N2 (2018). Paula Alonso, Pablo Gargano, Laura Kniznik, Gerardo Rubiolo. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> “Concentration of constitutional and thermal defects in UAl4 ” Journal of Nuclear Materials 478 (2016) 74-82. Pablo Gargano, Laura Kniznik, Paula Alonso, Mariano Forti, Gerardo Rubiolo.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> “A DFT study of atomic structure and adhesion at the Fe(BCC)/Fe3 O4 interfaces”. Surface Science 647 (2016) 55–65. Mariano Forti, Paula Alonso, Pablo Gargano,Perla Balbuena, Gerardo Rubiolo.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> “Charge difference calculation in Fe/Fe3O4 interfaces from DFT results”. Procedia Materials Science 8 (2015) pp 1066 – 1072. Diego Tozini, Mariano Forti, Paula Alonso, Pablo Gargano, Gerardo Rubiolo.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> “Adhesion Energy of the Fe(BCC)/Magnetite Interface within the DFT approach”. Procedia Materials Science 9 (2015) pp 612 – 618. Diego Tozini, Mariano Forti, Pablo Gargano, Paula Alonso, Gerardo Rubiolo. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> “Ab-initio studies on carburization of Fe 3 Al based alloys”. Procedia Materials Science 1 ( 2012 ) 191 – 198. Mariano Forti, Perla Balbuena, Paula Alonso.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> “First principles study of U-Al system ground state”. Procedia Materials Science 1 ( 2012 ) 514 –519. Laura Kniznik, Paula R. Alonso, Pablo H. Gargano, Mariano D. Forti, Gerardo H. Rubiolo.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> “Transition metals monoxides. An LDA+U study”. Procedia Materials Science 1 ( 2012 ) 230 – 234. Mariano Forti, Paula R. Alonso, Pablo H. Gargano, Gerardo H. Rubiolo.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> “Electric and magnetic properties of PMMA/manganite composites”. Physica B 404 (2009) 2760– 2762. Artale, C., Fermepin, S., Latino, M., Quintero, M., Granja, L., Sacanell, J., Polla G., Levy P. </p> <h1 class="western">Participation in Congress</h1> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> October 2018. 5th Nuclear Materials Conference, Elsevier – IAEA, 14-18 October 2018, Seattle, USA. Poster Presentations: 1) “First-principles thermodynamic study of point-defect structure and electrical conductivity in tetragonal non-stoichiometric zirconia including lattice vibrations”, Gargano, Kniznik, Alonso, Forti, Rubuiolo. 2) “DFT Study of the Early Stages of Oxidation of the Zr(1010) Surface”, F. Soto, M. Forti, P. Alonso, P. Gargano, L. Kniznik, G. Rubiolo, P. Balbuena. 3) “A DFT study of the resistance to traction and shear loads of the Fe(BCC) / Fe3O4 interface”, M.Forti, P. Alonso, P. Gargano, L. Kniznik, G. Rubiolo. https://www.elsevier.com/events/conferences/the-nuclear-materials-conference</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> November 2016. 16º SAM-CONAMET. “Shear stress in Fe/Fe 3 O4 interfaces”. Sociedad Argentina de Materiales. <a class="western" href="http://sam-conamet2016.congresos.unc.edu.ar/">http://sam-conamet2016.congresos.unc.edu.ar/</a></p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> October 2014. 14th SAM-CONAMET. “Charge difference calculation in Fe/Fe3O4 interfaces from DFT results”. Sociedad Argentina de Materiales. http://www.unl.edu.ar/materiales2014/</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> March 2014. Workshop en Procesamiento Físico Químico Avanzado. Workshop, 10 – 15 March 2014, Universidad Nacional de Santander, Piedecuesta, Colombia. Invited Speaker. i) “Vasp Workshop”, b) “Mechanical Properties from DFT Calculations”. http://www.sc3.uis.edu.co/pfqa-procesos-fisico-quimicos-avanzados/</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> August 2013. 13th SAM-CONAMET. “DFT Approximation to the Adhesion Energy of the Fe(BCC)/Magnetite Interface”. Sociedad Argentina de Materiales, Puerto Iguazú, Misiones, Argentina.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> November 2012. XXXIX Reunión Anual de la Asociación Argentina de Tecnología Nuclear.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> “Atomistic Model of the Adhesion Problem in Magnetite /iron system”. Asociación Argentina de Tecnología Nuclear. Buenos Aires, Argentina.</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> October 2012. 4º Meeting of young researchers in Cience And Technology. “Spin-Orbit Coupling effect on bandstructure of Iron monoxide in GGA+U approximation”. Sociedad Argentina de Materiales, Mar del Plata, Buenos Aires, Argentina. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> October 2012. 12th CONAMET/SAM “DFT study on adhesion energy in Fe/Fe3O3 ubterface”. Universidad Técnica Federico Santa María, CONAMET-SAM, Valparaíso, Chile. May 2012. 12th Anual Meeting of the Nuclear Fuel Division. “Predicting toughness in Fe/Magnetite interfaces based in First Principle calculations”. Centro Atómico Constituyentes, Comisión Nacional de Energía Atómica. Buenos Aires, Argentina. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> January 2012. Pan American Advanced Institute, Computational Materials Science for Energy Generation and Conversion. Santiago De Chile, Chile. (Workshop, http://www.cnf.cornell.edu/cnf_pasi2012.html) </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> October 2011. SAM / CONAMET 2011, Rosario, Argentina. “Ab Initio Studies on carburization in Fe3Al based alloys” ( http://www.ifir-conicet.gov.ar/SAM-ONAMET2011/documentos/topico6/215-161-1-SP.pdf). </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> December 2008. At The Frontiers of Condensed Matter IV. &quot;Electric and Magnetic properties of PMMA/Manganite composites&quot;. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> September 2008 . Asociación de Física Argentina. &quot;Electric and Magnetic properties of PMMA/Manganite composites&quot;. </p> <h1 class="western">Advisories</h1> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> February 2014. Co-Advisor of intern. Diego Tozini, Instituto Sabato. “Automated edition of VASP outputs to calculate interface interactions”. diegojosetozini@hotmail.com.</p> <h1 class="western">Education</h1> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> September 2017. Doctor in Materials Science and Technology (PhD). Instituto de Tecnología Prof. Jorge Sabato, Universidad Nacional de General San Martín. Advisor: Gerardo Rubiolo (rubiolo@cnea.gov.ar). </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> PhD Thesis title: Pasivating film in steel pipes used in nuclear power plants steam generators. </p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> Abstract: The main objective of this work was to calculate the fracture toughness of the α-Fe /(magnetite)Fe3O4 interface using Density Functional Theory (DFT). Traction and shear <span lang="en-US">mode strains are introduced to the system to separate the parts, and total energy calculations are used to investigate the behavior of the interface and the influence of atomic arrangement. </span> </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> August 2010. Materlials Engineer. Instituto de Tecnología Prof. Jorge Sabato, Universidad Nacional de General San Martín. </p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> Engineering dissertation: “Ab-Initio Studies about Carburization of Fe3Al Based Alloys”. Advisors: Perla Balbuena (balbuena@tamu.edu), Paula Alonso (pralonso@cnea.gov.ar)</p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> Abstract. Fe-Al based alloys exhibit excellent properties but suffer metal dusting in carburizing atmospheres. Surface composition can be a determinant factor in the solution of this problem. We calculate in this work the C adsorption energies in the L21 Fe2AlX (X=Ti,V,Nb) structures and we study the influence of surface covering. Our results show the beneficial effect of Ti, suggesting that there could exist an activation energy to promote the incorporation of C in the subsurface layers when the surface is covered enough.</p> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> Languages: oral and written English, Native Spanish. </p> <h1 class="western">Other Academic Activities.</h1> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> August 2012- August 2015 . Member of the Counseling Board as a representative of former students of Materials Engineering, Instituto Sabato, UNSAM-CNEA.</p> <h1 class="western">Awards</h1> <p style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%; background: #dddddd"> Special Mention to Young Researchers in Materials Cience and Technology. Sociedad Argentina <span lang="en-US">de Materiales, SAM-CONAMET 2011.</span></p> <p class="western" style="margin-top: 0.1cm; margin-bottom: 0.1cm; line-height: 150%"> Special Mention as a Finalist for Best Doctoral Thesis Awards, Universidad de San Martín, Octubre 2018.</p> </body> </html>
{ "content_hash": "66025d7ae93868795598d8535ee8cce1", "timestamp": "", "source": "github", "line_count": 351, "max_line_length": 180, "avg_line_length": 59.92877492877493, "alnum_prop": 0.7644877584977419, "repo_name": "mdforti/mdforti.github.io", "id": "7744231be7a2e55d5a0064376ab09f3ff966d63b", "size": "21347", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CurriculumVitae/CVEnglish_NEW2018.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2052" }, { "name": "HTML", "bytes": "226111" } ], "symlink_target": "" }
package org.hx.rainbow.server.oc.manage.service; import java.util.Date; import org.hx.rainbow.common.context.RainbowContext; import org.hx.rainbow.common.util.ObjectId; import org.hx.rainbow.common.core.service.BaseService; import org.hx.rainbow.common.web.session.RainbowSession; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; @Lazy @Service public class DatahostService extends BaseService { private static final String NAMESPACE = "OCDATAHOST"; private static final String QUERY_COMBOX = "queryCombox"; public RainbowContext query(RainbowContext context) { super.query(context, NAMESPACE); return context; } public RainbowContext queryByPage(RainbowContext context) { super.queryByPage(context, NAMESPACE); return context; } public RainbowContext queryCombox(RainbowContext context) { super.query(context, NAMESPACE,QUERY_COMBOX); return context; } public RainbowContext insert(RainbowContext context) { context.addAttr("guid", new ObjectId().toString()); context.addAttr("createTime", new Date()); context.addAttr("createUser", RainbowSession.getUserName()); super.insert(context, NAMESPACE); context.getAttr().clear(); return context; } public RainbowContext update(RainbowContext context) { super.update(context, NAMESPACE); context.getAttr().clear(); return context; } public RainbowContext delete(RainbowContext context) { super.delete(context, NAMESPACE); context.getAttr().clear(); return context; } }
{ "content_hash": "cb99e77da81e6bafd92cc34fd1c8826e", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 62, "avg_line_length": 29.79245283018868, "alnum_prop": 0.7485750474984167, "repo_name": "youngor/openclouddb", "id": "9e091a78ec20381ce4de4e64d16b3bdeeaeb9778", "size": "2701", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MyCat-web/rainbow-server/src/main/java/org/hx/rainbow/server/oc/manage/service/DatahostService.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "668381" }, { "name": "Java", "bytes": "11214966" }, { "name": "JavaScript", "bytes": "660124" }, { "name": "Shell", "bytes": "34282" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8" /> <meta http-equiv="X-UA-Compatible" content="IE=edge" /> <title>Hi</title> <meta name="description" content="" /> <meta name="HandheldFriendly" content="True" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <link rel="shortcut icon" href="//mrcouthy.github.io/themes/Casper/favicon.ico"> <link rel="stylesheet" type="text/css" href="//mrcouthy.github.io/themes/Casper/assets/css/screen.css?v=1.0.0" /> <link rel="stylesheet" type="text/css" href="//fonts.googleapis.com/css?family=Merriweather:300,700,700italic,300italic|Open+Sans:700,400" /> <link rel="canonical" href="https://mrcouthy.github.io" /> <meta name="generator" content="Ghost ?" /> <link rel="alternate" type="application/rss+xml" title="Hi" href="https://mrcouthy.github.io/rss" /> <link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/8.4/styles/default.min.css"> </head> <body class="home-template"> <header class="main-header no-cover"> <nav class="main-nav overlay clearfix"> <!-- <a class="subscribe-button icon-feed" href="https://mrcouthy.github.io/rss/">Subscribe</a> --> </nav> <div class="vertical"> <div class="main-header-content inner"> <h1 class="page-title">Hi</h1> <h2 class="page-description"></h2> </div> </div> <a class="scroll-down icon-arrow-left" href="#content" data-offset="-45"><span class="hidden">Scroll Down</span></a> </header> <main id="content" class="content" role="main"> <div class="extra-pagination inner"> <nav class="pagination" role="navigation"> <span class="page-number">Page 1 of 1</span> </nav> </div> <article class="post tag-net tag-c"> <header class="post-header"> <h2 class="post-title"><a href="https://mrcouthy.github.io/2016/04/19/Code-Contracts.html">Code Contracts</a></h2> </header> <section class="post-excerpt"> <p>Daily:18 April 2016 Codecontracts Search : code contracts Install vs plugin Preconditions : Consditions required on method entry PostConditions : Conditions required on method ex <a class="read-more" href="https://mrcouthy.github.io/2016/04/19/Code-Contracts.html">&raquo;</a></p> </section> <footer class="post-meta"> on <a href="https://mrcouthy.github.io/tag/net">.net</a>, <a href="https://mrcouthy.github.io/tag/c"> c#</a> <time class="post-date" datetime="2016-04-19">19 April 2016</time> </footer> </article> <article class="post tag-devops"> <header class="post-header"> <h2 class="post-title"><a href="https://mrcouthy.github.io/2016/04/06/Setting-up-the-Sonar-Qube.html">Setting up the SonarQube</a></h2> </header> <section class="post-excerpt"> <p>Install as windows service Configure database Start the web service Browse in localhost:9000 <a class="read-more" href="https://mrcouthy.github.io/2016/04/06/Setting-up-the-Sonar-Qube.html">&raquo;</a></p> </section> <footer class="post-meta"> on <a href="https://mrcouthy.github.io/tag/devops">devops</a> <time class="post-date" datetime="2016-04-06">06 April 2016</time> </footer> </article> <article class="post tag-hubpress"> <header class="post-header"> <h2 class="post-title"><a href="https://mrcouthy.github.io/2016/04/05/Setting-Title.html">Setting Title</a></h2> </header> <section class="post-excerpt"> <p>I&#8217;ve got a hubpress site in github use handlebar templates Gost cms software <a class="read-more" href="https://mrcouthy.github.io/2016/04/05/Setting-Title.html">&raquo;</a></p> </section> <footer class="post-meta"> on <a href="https://mrcouthy.github.io/tag/hubpress">hubpress</a> <time class="post-date" datetime="2016-04-05">05 April 2016</time> </footer> </article> <nav class="pagination" role="navigation"> <span class="page-number">Page 1 of 1</span> </nav> </main> <footer class="site-footer clearfix"> <section class="copyright"><a href="https://mrcouthy.github.io">Hi</a> &copy; 2016</section> <section class="poweredby">Proudly published with <a href="http://hubpress.io">HubPress</a></section> </footer> <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.3/jquery.min.js?v="></script> <script src="//cdnjs.cloudflare.com/ajax/libs/moment.js/2.9.0/moment-with-locales.min.js?v="></script> <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/8.4/highlight.min.js?v="></script> <script type="text/javascript"> jQuery( document ).ready(function() { // change date with ago jQuery('ago.ago').each(function(){ var element = jQuery(this).parent(); element.html( moment(element.text()).fromNow()); }); }); hljs.initHighlightingOnLoad(); </script> <script type="text/javascript" src="//mrcouthy.github.io/themes/Casper/assets/js/jquery.fitvids.js?v=1.0.0"></script> <script type="text/javascript" src="//mrcouthy.github.io/themes/Casper/assets/js/index.js?v=1.0.0"></script> </body> </html>
{ "content_hash": "8e40edd0cd99d92cd16516a4833581ac", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 298, "avg_line_length": 45.483333333333334, "alnum_prop": 0.6201905459875412, "repo_name": "mrcouthy/mrcouthy.github.io", "id": "2ceeb9478a8d237aefda48dc808e8a3ca77a0dc6", "size": "5458", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "420033" }, { "name": "CoffeeScript", "bytes": "6630" }, { "name": "HTML", "bytes": "144544" }, { "name": "JavaScript", "bytes": "23969" }, { "name": "Ruby", "bytes": "806" }, { "name": "Shell", "bytes": "2265" } ], "symlink_target": "" }
/* Erbele - Based on Fraise 3.7.3 based on Smultron by Peter Borg Current Maintainer (since 2016): Andreas Bentele: abentele.github@icloud.com (https://github.com/abentele/Erbele) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #import "NSToolbarItem+Erbele.h" #import "FRACommandsController.h" #import "FRADocumentsListCell.h" #import "FRAApplicationDelegate.h" #import "FRABasicPerformer.h" #import "FRADragAndDropController.h" #import "FRAToolsMenuController.h" #import "FRAInterfacePerformer.h" #import "FRAProjectsController.h" #import "FRAVariousPerformer.h" #import "FRAOpenSavePerformer.h" #import "FRATextView.h" @implementation FRACommandsController static id sharedInstance = nil; @synthesize commandsTextView, commandsWindow, commandCollectionsArrayController, commandCollectionsTableView, commandsTableView, commandsArrayController; + (FRACommandsController *)sharedInstance { if (sharedInstance == nil) { sharedInstance = [[self alloc] init]; } return sharedInstance; } - (id)init { if (sharedInstance == nil) { sharedInstance = [super init]; temporaryFilesArray = [[NSMutableArray alloc] init]; } return sharedInstance; } - (void)openCommandsWindow { if (commandsWindow == nil) { [[NSBundle mainBundle] loadNibNamed:@"FRACommands" owner:self topLevelObjects:nil]; [commandCollectionsTableView setDataSource:[FRADragAndDropController sharedInstance]]; [commandsTableView setDataSource:[FRADragAndDropController sharedInstance]]; [commandCollectionsTableView registerForDraggedTypes:@[NSFilenamesPboardType, @"FRAMovedCommandType"]]; [commandCollectionsTableView setDraggingSourceOperationMask:(NSDragOperationCopy) forLocal:NO]; [commandsTableView registerForDraggedTypes:@[NSStringPboardType]]; [commandsTableView setDraggingSourceOperationMask:(NSDragOperationCopy) forLocal:NO]; NSSortDescriptor *sortDescriptor = [[NSSortDescriptor alloc] initWithKey:@"name" ascending:YES]; [commandCollectionsArrayController setSortDescriptors:@[sortDescriptor]]; [commandsArrayController setSortDescriptors:@[sortDescriptor]]; FRADocumentsListCell *cell = [[FRADocumentsListCell alloc] init]; [cell setWraps:NO]; [cell setLineBreakMode:NSLineBreakByTruncatingMiddle]; [[commandCollectionsTableView tableColumnWithIdentifier:@"collection"] setDataCell:cell]; NSToolbar *toolbar = [[NSToolbar alloc] initWithIdentifier:@"CommandsToolbarIdentifier"]; [toolbar setShowsBaselineSeparator:YES]; [toolbar setAllowsUserCustomization:YES]; [toolbar setAutosavesConfiguration:YES]; [toolbar setDisplayMode:NSToolbarDisplayModeDefault]; [toolbar setSizeMode:NSToolbarSizeModeSmall]; [toolbar setDelegate:self]; [commandsWindow setToolbar:toolbar]; } [commandsWindow makeKeyAndOrderFront:self]; [[FRAToolsMenuController sharedInstance] buildRunCommandMenu]; } - (IBAction)newCollectionAction:(id)sender { [commandCollectionsArrayController commitEditing]; [commandsArrayController commitEditing]; id collection = [FRABasic createNewObjectForEntity:@"CommandCollection"]; [FRAManagedObjectContext processPendingChanges]; [commandCollectionsArrayController setSelectedObjects:@[collection]]; [commandsWindow makeFirstResponder:commandCollectionsTableView]; [commandCollectionsTableView editColumn:0 row:[commandCollectionsTableView selectedRow] withEvent:nil select:NO]; } - (IBAction)newCommandAction:(id)sender { id collection; NSArray *commandCollections = [FRABasic fetchAll:@"CommandCollectionSortKeyName"]; if ([commandCollections count] == 0) { collection = [FRABasic createNewObjectForEntity:@"CommandCollection"]; [collection setValue:COLLECTION_STRING forKey:@"name"]; } [commandsArrayController commitEditing]; [commandCollectionsArrayController commitEditing]; [self performInsertNewCommand]; [commandsWindow makeFirstResponder:commandsTableView]; [commandsTableView editColumn:0 row:[commandsTableView selectedRow] withEvent:nil select:NO]; } - (id)performInsertNewCommand { id collection; NSArray *commandCollections = [FRABasic fetchAll:@"CommandCollectionSortKeyName"]; if ([commandCollections count] == 0) { collection = [FRABasic createNewObjectForEntity:@"CommandCollection"]; [collection setValue:COLLECTION_STRING forKey:@"name"]; } else { if (commandsWindow != nil && [[commandCollectionsArrayController selectedObjects] count] != 0) { collection = [commandCollectionsArrayController selectedObjects][0]; } else { // If no collection is selected choose the last one in the array collection = [commandCollections lastObject]; } } id item = [FRABasic createNewObjectForEntity:@"Command"]; [[collection mutableSetValueForKey:@"commands"] addObject:item]; [FRAManagedObjectContext processPendingChanges]; [commandsArrayController setSelectedObjects:@[item]]; return item; } - (void)performDeleteCollection { id collection = [commandCollectionsArrayController selectedObjects][0]; [FRAManagedObjectContext deleteObject:collection]; [[FRAToolsMenuController sharedInstance] buildRunCommandMenu]; } - (void)importCommands { [self openCommandsWindow]; NSOpenPanel *openPanel = [NSOpenPanel openPanel]; [openPanel setResolvesAliases:YES]; [openPanel setDirectoryURL: [NSURL fileURLWithPath: [FRAInterface whichDirectoryForOpen]]]; [openPanel setAllowedFileTypes: @[@"erbeleCommands"]]; [openPanel beginSheetModalForWindow: self.commandsWindow completionHandler: (^(NSInteger returnCode) { if (returnCode == NSModalResponseOK) { [self performCommandsImportWithPath: [[openPanel URL] path]]; } [self.commandsWindow makeKeyAndOrderFront:nil]; })]; } - (void)performCommandsImportWithPath:(NSString *)path { NSData *data = [NSData dataWithContentsOfFile:path]; NSArray *commands = (NSArray *)[NSKeyedUnarchiver unarchiveObjectWithData:data]; if ([commands count] == 0) { return; } id collection = [FRABasic createNewObjectForEntity:@"CommandCollection"]; [collection setValue:[commands[0] valueForKey:@"collectionName"] forKey:@"name"]; id item; for (item in commands) { id command = [FRABasic createNewObjectForEntity:@"Command"]; [command setValue:[item valueForKey:@"name"] forKey:@"name"]; [command setValue:[item valueForKey:@"text"] forKey:@"text"]; [command setValue:[item valueForKey:@"collectionName"] forKey:@"collectionName"]; [command setValue:[item valueForKey:@"shortcutDisplayString"] forKey:@"shortcutDisplayString"]; [command setValue:[item valueForKey:@"shortcutMenuItemKeyString"] forKey:@"shortcutMenuItemKeyString"]; [command setValue:[item valueForKey:@"shortcutModifier"] forKey:@"shortcutModifier"]; [command setValue:[item valueForKey:@"sortOrder"] forKey:@"sortOrder"]; if ([item valueForKey:@"inline"] != nil) { [command setValue:[item valueForKey:@"inline"] forKey:@"inline"]; } if ([item valueForKey:@"interpreter"] != nil) { [command setValue:[item valueForKey:@"interpreter"] forKey:@"interpreter"]; } [[collection mutableSetValueForKey:@"commands"] addObject:command]; } [FRAManagedObjectContext processPendingChanges]; [commandCollectionsArrayController setSelectedObjects:@[collection]]; } - (void)exportCommands { NSSavePanel *savePanel = [NSSavePanel savePanel]; [savePanel setAllowedFileTypes: @[@"erbeleCommands"]]; [savePanel setDirectoryURL: [NSURL fileURLWithPath: [FRAInterface whichDirectoryForSave]]]; [savePanel setNameFieldStringValue: [[commandCollectionsArrayController selectedObjects][0] valueForKey:@"name"]]; [savePanel beginSheetModalForWindow: self.commandsWindow completionHandler: (^(NSInteger returnCode) { if (returnCode == NSModalResponseOK) { id collection = [self.commandCollectionsArrayController selectedObjects][0]; NSMutableArray *exportArray = [NSMutableArray array]; NSEnumerator *enumerator = [[collection mutableSetValueForKey:@"commands"] objectEnumerator]; for (NSDictionary *item in enumerator) { NSMutableDictionary *command = [[NSMutableDictionary alloc] init]; command[@"name"] = [item valueForKey:@"name"]; command[@"text"] = [item valueForKey:@"text"]; command[@"collectionName"] = [collection valueForKey:@"name"]; command[@"shortcutDisplayString"] = [item valueForKey:@"shortcutDisplayString"]; command[@"shortcutMenuItemKeyString"] = [item valueForKey:@"shortcutMenuItemKeyString"]; command[@"shortcutModifier"] = [item valueForKey:@"shortcutModifier"]; command[@"sortOrder"] = [item valueForKey:@"sortOrder"]; command[@"version"] = @3; command[@"inline"] = [item valueForKey:@"inline"]; command[@"interpreter"] = [item valueForKey:@"interpreter"]; [exportArray addObject: command]; } NSData *data = [NSKeyedArchiver archivedDataWithRootObject:exportArray]; [FRAOpenSave performDataSaveWith: data path: [[savePanel URL] path]]; } [self.commandsWindow makeKeyAndOrderFront:nil]; })]; } - (void)windowWillClose:(NSNotification *)aNotification { [commandCollectionsArrayController commitEditing]; [commandsArrayController commitEditing]; } - (NSManagedObjectContext *)managedObjectContext { return FRAManagedObjectContext; } - (IBAction)runAction:(id)sender { [self runCommand:[commandsArrayController selectedObjects][0]]; } - (IBAction)insertPathAction:(id)sender { id document = FRACurrentDocument; if (document == nil || [document valueForKey:@"path"] == nil) { NSBeep(); return; } [commandsTextView insertText:[document valueForKey:@"path"] replacementRange:[commandsTextView selectedRange]]; } - (IBAction)insertDirectoryAction:(id)sender { id document = FRACurrentDocument; if (document == nil || [document valueForKey:@"path"] == nil) { NSBeep(); return; } [commandsTextView insertText:[[document valueForKey:@"path"] stringByDeletingLastPathComponent] replacementRange:[commandsTextView selectedRange]]; } - (NSString *)commandToRunFromString:(NSString *)string { NSMutableString *returnString = [NSMutableString stringWithString:string]; id document = FRACurrentDocument; if (document == nil || [[document valueForKey:@"isNewDocument"] boolValue] == YES || [document valueForKey:@"path"] == nil) { [returnString replaceOccurrencesOfString:@"%%p" withString:@"" options:NSLiteralSearch range:NSMakeRange(0, [returnString length])]; [returnString replaceOccurrencesOfString:@"%%d" withString:@"" options:NSLiteralSearch range:NSMakeRange(0, [returnString length])]; } else { NSString *path = [NSString stringWithFormat:@"\"%@\"", [document valueForKey:@"path"]]; // If there's a space in the path NSString *directory; if ([[FRADefaults valueForKey:@"PutQuotesAroundDirectory"] boolValue] == YES) { directory = [NSString stringWithFormat:@"\"%@\"", [[document valueForKey:@"path"] stringByDeletingLastPathComponent]]; } else { directory = [NSString stringWithFormat:@"%@", [[document valueForKey:@"path"] stringByDeletingLastPathComponent]]; } [returnString replaceOccurrencesOfString:@"%%p" withString:path options:NSLiteralSearch range:NSMakeRange(0, [returnString length])]; [returnString replaceOccurrencesOfString:@"%%d" withString:directory options:NSLiteralSearch range:NSMakeRange(0, [returnString length])]; } if ([FRACurrentTextView selectedRange].length > 0) { [returnString replaceOccurrencesOfString:@"%%s" withString:[FRACurrentText substringWithRange:[FRACurrentTextView selectedRange]] options:NSLiteralSearch range:NSMakeRange(0, [returnString length])]; } [returnString replaceOccurrencesOfString:@" ~" withString:[NSString stringWithFormat:@" %@", NSHomeDirectory()] options:NSLiteralSearch range:NSMakeRange(0, [returnString length])]; return returnString; } - (void)runCommand:(id)command { [commandCollectionsArrayController commitEditing]; [commandsArrayController commitEditing]; isCommandRunning = YES; if ([command valueForKey:@"inline"] != nil && [[command valueForKey:@"inline"] boolValue] == YES) { currentCommandShouldBeInsertedInline = YES; } else { currentCommandShouldBeInsertedInline = NO; } NSString *commandString = [command valueForKey:@"text"]; if (commandString == nil || [commandString length] < 1) { NSBeep(); return; } if ([commandString length] > 2 && [commandString rangeOfString:@"#!" options:NSLiteralSearch range:NSMakeRange(0, 2)].location != NSNotFound) { // The command starts with a shebang so run it specially NSString *selectionStringPath; NSMutableString *commandToWrite = [NSMutableString stringWithString:commandString]; if ([FRACurrentTextView selectedRange].length > 0 && [commandString rangeOfString:@"%%s"].location != NSNotFound) { selectionStringPath = [FRABasic genererateTemporaryPath]; NSString *selectionString = [FRACurrentText substringWithRange:[FRACurrentTextView selectedRange]]; [selectionString writeToFile:selectionStringPath atomically:YES encoding:NSUTF8StringEncoding error:nil]; [temporaryFilesArray addObject:selectionStringPath]; [commandToWrite replaceOccurrencesOfString:@"%%s" withString:selectionStringPath options:NSLiteralSearch range:NSMakeRange(0, [commandToWrite length])]; } id document = FRACurrentDocument; NSString *path = [NSString stringWithFormat:@"\"%@\"", [document valueForKey:@"path"]]; // If there's a space in the path NSString *directory = [NSString stringWithFormat:@"\"%@\"", [[document valueForKey:@"path"] stringByDeletingLastPathComponent]]; [commandToWrite replaceOccurrencesOfString:@"%%p" withString:path options:NSLiteralSearch range:NSMakeRange(0, [commandToWrite length])]; [commandToWrite replaceOccurrencesOfString:@"%%d" withString:directory options:NSLiteralSearch range:NSMakeRange(0, [commandToWrite length])]; NSString *commandPath = [FRABasic genererateTemporaryPath]; [commandToWrite writeToFile:commandPath atomically:YES encoding:NSUTF8StringEncoding error:nil]; [temporaryFilesArray addObject:commandPath]; if ([command valueForKey:@"interpreter"] != nil && ![[command valueForKey:@"interpreter"] isEqualToString:@""]) { [FRAVarious performCommandAsynchronously:[NSString stringWithFormat:@"%@ %@", [command valueForKey:@"interpreter"], commandPath]]; } else { [FRAVarious performCommandAsynchronously:[NSString stringWithFormat:@"%@ %@", [FRADefaults valueForKey:@"RunText"], commandPath]]; } if (checkIfTemporaryFilesCanBeDeletedTimer != nil) { [checkIfTemporaryFilesCanBeDeletedTimer invalidate]; } checkIfTemporaryFilesCanBeDeletedTimer = [NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(checkIfTemporaryFilesCanBeDeleted) userInfo:nil repeats:YES]; } else { [FRAVarious performCommandAsynchronously:[self commandToRunFromString:commandString]]; } } - (BOOL)currentCommandShouldBeInsertedInline { return currentCommandShouldBeInsertedInline; } - (void)setCommandRunning:(BOOL)flag { isCommandRunning = flag; } - (void)checkIfTemporaryFilesCanBeDeleted { if (isCommandRunning == YES) { return; } if (checkIfTemporaryFilesCanBeDeletedTimer != nil) { [checkIfTemporaryFilesCanBeDeletedTimer invalidate]; checkIfTemporaryFilesCanBeDeletedTimer = nil; } [self clearAnyTemporaryFiles]; } - (void)clearAnyTemporaryFiles { NSArray *enumeratorArray = [NSArray arrayWithArray:temporaryFilesArray]; id item; NSFileManager *fileManager = [NSFileManager defaultManager]; for (item in enumeratorArray) { if ([fileManager fileExistsAtPath:item]) { [fileManager removeItemAtPath:item error:nil]; } [temporaryFilesArray removeObject:item]; } } - (void)tableView:(NSTableView *)aTableView willDisplayCell:(id)aCell forTableColumn:(NSTableColumn *)aTableColumn row:(NSInteger)rowIndex { if ([[FRADefaults valueForKey:@"SizeOfDocumentsListTextPopUp"] integerValue] == 0) { [aCell setFont:[NSFont systemFontOfSize:11.0]]; } else { [aCell setFont:[NSFont systemFontOfSize:13.0]]; } } - (NSArray *)toolbarAllowedItemIdentifiers:(NSToolbar *)toolbar { return @[@"NewCommandCollectionToolbarItem", @"NewCommandToolbarItem", @"FilterCommandsToolbarItem", @"RunCommandToolbarItem", NSToolbarFlexibleSpaceItemIdentifier]; } - (NSArray *)toolbarDefaultItemIdentifiers:(NSToolbar *)toolbar { return @[@"NewCommandCollectionToolbarItem", NSToolbarFlexibleSpaceItemIdentifier, @"RunCommandToolbarItem", NSToolbarFlexibleSpaceItemIdentifier, @"FilterCommandsToolbarItem", @"NewCommandToolbarItem"]; } - (NSToolbarItem *)toolbar:(NSToolbar *)toolbar itemForItemIdentifier:(NSString *)itemIdentifier willBeInsertedIntoToolbar:(BOOL)willBeInserted { if ([itemIdentifier isEqualToString:@"NewCommandCollectionToolbarItem"]) { NSImage *newCommandCollectionImage = [[NSImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"FRANewCollectionIcon" ofType:@"pdf" inDirectory:@"Toolbar Icons"]]; [[newCommandCollectionImage representations][0] setAlpha:YES]; return [NSToolbarItem createToolbarItemWithIdentifier:itemIdentifier name:NEW_COLLECTION_STRING image:newCommandCollectionImage action:@selector(newCollectionAction:) tag:0 target:self]; } else if ([itemIdentifier isEqualToString:@"NewCommandToolbarItem"]) { NSImage *newCommandImage = [[NSImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"FRANewIcon" ofType:@"pdf" inDirectory:@"Toolbar Icons"]]; [[newCommandImage representations][0] setAlpha:YES]; return [NSToolbarItem createToolbarItemWithIdentifier:itemIdentifier name:NSLocalizedStringFromTable(@"New Command", @"Localizable3", @"New Command") image:newCommandImage action:@selector(newCommandAction:) tag:0 target:self]; } else if ([itemIdentifier isEqualToString:@"RunCommandToolbarItem"]) { NSImage *runCommandImage = [[NSImage alloc] initWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"FRARunIcon" ofType:@"pdf" inDirectory:@"Toolbar Icons"]]; [[runCommandImage representations][0] setAlpha:YES]; return [NSToolbarItem createToolbarItemWithIdentifier:itemIdentifier name:NSLocalizedStringFromTable(@"Run", @"Localizable3", @"Run") image:runCommandImage action:@selector(runAction:) tag:0 target:self]; } else if ([itemIdentifier isEqualToString:@"FilterCommandsToolbarItem"]) { return [NSToolbarItem createSeachFieldToolbarItemWithIdentifier:itemIdentifier name:FILTER_STRING view:commandsFilterView]; } return nil; } @end
{ "content_hash": "7dc451f1b7fd4ccade22b863a348a04a", "timestamp": "", "source": "github", "line_count": 495, "max_line_length": 306, "avg_line_length": 42.04242424242424, "alnum_prop": 0.6991014367401854, "repo_name": "abentele/Erbele", "id": "60b3c77e9ee2dac35d4916f79a0986b2808c4d55", "size": "20811", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Classes/FRACommandsController.m", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "289494" }, { "name": "C++", "bytes": "63570" }, { "name": "Objective-C", "bytes": "861890" }, { "name": "Perl", "bytes": "101486" }, { "name": "Rich Text Format", "bytes": "6878" }, { "name": "Roff", "bytes": "1407" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <!-- https://github.com/wildfly/quickstart/blob/master/helloworld-rs/pom.xml --> <groupId>jee6-jaxwsrs-mongodb-template</groupId> <artifactId>jee6-jaxwsrs-mongodb-template</artifactId> <packaging>war</packaging> <version>1.0</version> <name>jee6-jaxwsrs-mongodb-template</name> <properties> <!-- Explicitly declaring the source encoding eliminates the following message: --> <!-- [WARNING] Using platform encoding (UTF-8 actually) to copy filtered resources, i.e. build is platform dependent! --> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <!-- JBoss dependency versions --> <version.wildfly.maven.plugin>1.0.1.Final</version.wildfly.maven.plugin> <version.jboss.spec.javaee.7.0>1.0.0.Final</version.jboss.spec.javaee.7.0> <!-- other plugin versions --> <version.compiler.plugin>3.1</version.compiler.plugin> <version.war.plugin>2.3</version.war.plugin> <!-- maven-compiler-plugin --> <maven.compiler.target>1.7</maven.compiler.target> <maven.compiler.source>1.7</maven.compiler.source> <!-- logging --> <org.slf4j-version>1.6.6</org.slf4j-version> <!-- commons --> <commons.io-version>2.4</commons.io-version> <commons.codec-version>1.9</commons.codec-version> <commons.lang-version>2.6</commons.lang-version> <!-- pdfbox --> <org.apache.pdfboxj-version>1.8.6</org.apache.pdfboxj-version> <!-- itext --> <com.itextpdf-version>5.0.6</com.itextpdf-version> <net.sf.jasperreports-version>5.6.0</net.sf.jasperreports-version> </properties> <dependencyManagement> <dependencies> <!-- Define the version of JBoss' Java EE 7 APIs we want to import. Any dependencies from org.jboss.spec will have their version defined by this BOM --> <!-- JBoss distributes a complete set of Java EE 7 APIs including a Bill of Materials (BOM). A BOM specifies the versions of a "stack" (or a collection) of artifacts. We use this here so that we always get the correct versions of artifacts. Here we use the jboss-javaee-7.0 stack (you can read this as the JBoss stack of the Java EE 7 APIs). You can actually use this stack with any version of WildFly that implements Java EE 7, not just WildFly 8! --> <dependency> <groupId>org.jboss.spec</groupId> <artifactId>jboss-javaee-7.0</artifactId> <version>${version.jboss.spec.javaee.7.0}</version> <type>pom</type> <scope>import</scope> </dependency> </dependencies> </dependencyManagement> <dependencies> <!-- Import the CDI API, we use provided scope as the API is included in JBoss WildFly --> <dependency> <groupId>javax.enterprise</groupId> <artifactId>cdi-api</artifactId> <scope>provided</scope> </dependency> <!-- Import the Common Annotations API (JSR-250), we use provided scope as the API is included in JBoss WildFly --> <dependency> <groupId>org.jboss.spec.javax.annotation</groupId> <artifactId>jboss-annotations-api_1.2_spec</artifactId> <scope>provided</scope> </dependency> <!-- Import the JSON API to build JSON Objects --> <dependency> <groupId>org.jboss.spec.javax.json</groupId> <artifactId>jboss-json-api_1.0_spec</artifactId> <scope>provided</scope> </dependency> <!-- Import the JAX-RS API, we use provided scope as the API is included in JBoss WildFly --> <dependency> <groupId>org.jboss.resteasy</groupId> <artifactId>jaxrs-api</artifactId> <scope>provided</scope> </dependency> <dependency> <groupId>javax</groupId> <artifactId>javaee-api</artifactId> <version>7.0</version> <scope>provided</scope> </dependency> <!-- bean validation --> <!-- https://repository.jboss.org/nexus/index.html#nexus-search;quick~mysql --> <!-- <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>5.1.30</version> </dependency> --> <!-- http://www.mongodb.org/ --> <dependency> <groupId>org.mongodb</groupId> <artifactId>mongo-java-driver</artifactId> <version>2.12.2</version> </dependency> <!-- <dependency> <groupId>org.eclipse.persistence</groupId> <artifactId>org.eclipse.persistence.nosql</artifactId> <version>2.5.2-M1</version> </dependency> --> <!-- Hibernate OGM dependency - not complete enough - queries - too many deps --> <!-- dependency> <groupId>org.hibernate.ogm</groupId> <artifactId>hibernate-ogm-mongodb</artifactId> <version>4.1.0.Beta4</version> </dependency --> <!-- https://sites.google.com/site/gson/gson-user-guide --> <dependency> <groupId>com.google.code.gson</groupId> <artifactId>gson</artifactId> <version>2.2.4</version> </dependency> <!-- commons --> <dependency> <groupId>commons-lang</groupId> <artifactId>commons-lang</artifactId> <version>${commons.lang-version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io-version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec-version}</version> </dependency> <!-- Logging --> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${org.slf4j-version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>jcl-over-slf4j</artifactId> <version>${org.slf4j-version}</version> <scope>runtime</scope> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${org.slf4j-version}</version> <scope>runtime</scope> </dependency> <!-- xml --> <dependency> <groupId>org.jdom</groupId> <artifactId>jdom2</artifactId> <version>2.0.5</version> </dependency> <!-- pdf --> <dependency> <groupId>org.apache.pdfbox</groupId> <artifactId>pdfbox</artifactId> <version>${org.apache.pdfboxj-version}</version> </dependency> <dependency> <groupId>com.itextpdf</groupId> <artifactId>itextpdf</artifactId> <version>${com.itextpdf-version}</version> </dependency> <dependency> <groupId>com.lowagie</groupId> <artifactId>itext</artifactId> <version>4.2.1</version> </dependency> <!-- Needed for variable documents --> <dependency> <groupId>net.sf.jasperreports</groupId> <artifactId>jasperreports</artifactId> <version>${net.sf.jasperreports-version}</version> <exclusions> <exclusion> <groupId>org.olap4j</groupId> <artifactId>olap4j</artifactId> </exclusion> </exclusions> </dependency> <!-- test --> <dependency> <groupId>org.testng</groupId> <artifactId>testng</artifactId> <version>6.8.8</version> <scope>test</scope> </dependency> </dependencies> <build> <!-- Set the name of the war, used as the context root when the app is deployed --> <finalName>${project.artifactId}</finalName> <plugins> <plugin> <artifactId>maven-war-plugin</artifactId> <version>${version.war.plugin}</version> <configuration> <!-- Java EE 7 doesn't require web.xml, Maven needs to catch up! --> <failOnMissingWebXml>false</failOnMissingWebXml> </configuration> </plugin> <!-- WildFly plugin to deploy war --> <plugin> <groupId>org.wildfly.plugins</groupId> <artifactId>wildfly-maven-plugin</artifactId> <version>${version.wildfly.maven.plugin}</version> </plugin> <!-- Compiler plugin enforces Java 1.6 compatibility and activates annotation processors --> <plugin> <artifactId>maven-compiler-plugin</artifactId> <version>${version.compiler.plugin}</version> <configuration> <source>${maven.compiler.source}</source> <target>${maven.compiler.target}</target> </configuration> </plugin> </plugins> </build> <profiles> <profile> <!-- When built in OpenShift the 'openshift' profile will be used when invoking mvn. --> <!-- Use this profile for any OpenShift specific customization your app will need. --> <!-- By default that is to put the resulting archive into the 'deployments' folder. --> <!-- http://maven.apache.org/guides/mini/guide-building-for-different-environments.html --> <id>openshift</id> <build> <finalName>smefunctions</finalName> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-war-plugin</artifactId> <version>${version.war.plugin}</version> <configuration> <failOnMissingWebXml>false</failOnMissingWebXml> <outputDirectory>deployments</outputDirectory> <warName>ROOT</warName> </configuration> </plugin> </plugins> </build> </profile> </profiles> </project>
{ "content_hash": "2276b11b9ca023dc56141644ee3d62cc", "timestamp": "", "source": "github", "line_count": 283, "max_line_length": 118, "avg_line_length": 32.89399293286219, "alnum_prop": 0.6625845955526909, "repo_name": "matthogan/jee6-jaxwsrs-mongodb-template", "id": "62a0f25248dd98df7071ed4d54dcf60681be830f", "size": "9309", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "69513" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>itree: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.5.1 / itree - 3.2.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> itree <small> 3.2.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-10-25 16:48:06 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-25 16:48:06 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-num base Num library distributed with the OCaml compiler base-threads base base-unix base camlp5 7.14 Preprocessor-pretty-printer of OCaml conf-findutils 1 Virtual package relying on findutils conf-perl 2 Virtual package relying on perl coq 8.5.1 Formal proof management system num 0 The Num library for arbitrary-precision integer and rational arithmetic ocaml 4.04.2 The OCaml compiler (virtual package) ocaml-base-compiler 4.04.2 Official 4.04.2 release ocaml-config 1 OCaml Switch Configuration # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;Li-yao Xia &lt;lysxia@gmail.com&gt;&quot; synopsis: &quot;A Library for Representing Recursive and Impure Programs in Coq&quot; homepage: &quot;https://github.com/DeepSpec/InteractionTrees&quot; dev-repo: &quot;git+https://github.com/DeepSpec/InteractionTrees&quot; bug-reports: &quot;https://github.com/DeepSpec/InteractionTrees/issues&quot; license: &quot;MIT&quot; build: [ make &quot;-j%{jobs}%&quot; ] install: [ make &quot;install&quot; ] run-test: [ make &quot;-j%{jobs}%&quot; &quot;all&quot; ] depends: [ &quot;coq&quot; {&gt;= &quot;8.8&quot; &amp; &lt; &quot;8.14~&quot;} &quot;coq-ext-lib&quot; {&gt;= &quot;0.11.1&quot; &amp; &lt; &quot;0.12&quot;} &quot;coq-paco&quot; {&gt;= &quot;4.0.0&quot; &amp; &lt; &quot;4.2.0&quot;} &quot;ocamlbuild&quot; {with-test} ] authors: [ &quot;Li-yao Xia &lt;lysxia@gmail.com&gt;&quot; &quot;Yannick Zakowski &lt;zakowski@seas.upenn.edu&gt;&quot; &quot;Paul He &lt;paulhe@seas.upenn.edu&gt;&quot; &quot;Chung-Kil Hur &lt;gil.hur@gmail.com&gt;&quot; &quot;Gregory Malecha &lt;gmalecha@gmail.com&gt;&quot; &quot;Steve Zdancewic &lt;stevez@cis.upenn.edu&gt;&quot; &quot;Benjamin C. Pierce &lt;bcpierce@cis.upenn.edu&gt;&quot; ] tags: [ &quot;org:deepspec&quot; &quot;logpath: ITree&quot; &quot;date: 2020-07-21&quot; ] url { http: &quot;https://github.com/DeepSpec/InteractionTrees/archive/3.2.0.tar.gz&quot; checksum: &quot;sha512=7d29f559c7e836fc894eae3eb93441e3a1bae6af5299cd4f374015c4f4dd264d56bf9fdc432bf949331a86314f5149555e00ed70c9ab16f6db147ad5038fb3c3&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-itree.3.2.0 coq.8.5.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.5.1). The following dependencies couldn&#39;t be met: - coq-itree -&gt; coq &gt;= 8.8 -&gt; ocaml &gt;= 4.05.0 base of this switch (use `--unlock-base&#39; to force) Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-itree.3.2.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "5b274b39a789cc573287e08e9ba16df9", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 159, "avg_line_length": 41.73295454545455, "alnum_prop": 0.5484002722940776, "repo_name": "coq-bench/coq-bench.github.io", "id": "775a1d3ab5808e1ae8113d5053553bf1ca9fce25", "size": "7370", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.04.2-2.0.5/released/8.5.1/itree/3.2.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
title: any1 type: products image: /img/Screen Shot 2017-05-09 at 11.56.54 AM.png heading: y1 description: lksadjf lkasdjf lksajdf lksdaj flksadj flksa fdj main: heading: Foo Bar BAz description: |- ***This is i a thing***kjh hjk kj # Blah Blah ## Blah![undefined](undefined) ### Baah image1: alt: kkkk ---
{ "content_hash": "715248046125a4e197d13cd52fb45a10", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 61, "avg_line_length": 22.2, "alnum_prop": 0.6636636636636637, "repo_name": "pblack/kaldi-hugo-cms-template", "id": "fe377d9265cbcb0194816438a4d6cc2aebfb5b17", "size": "337", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/content/pages2/any1.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "94394" }, { "name": "HTML", "bytes": "18889" }, { "name": "JavaScript", "bytes": "10014" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "79aac98cf90fa5940601b879b16b04ff", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "9e9b02b16ad2c0fd29dd0c293c904d155896b69a", "size": "184", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Cajanus/Cajanus cajanifolius/ Syn. Atylosia cajanifolia/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
const { _electron: electron } = require('playwright') const { test: it } = require('@playwright/test') const { describe } = it it.setTimeout(100000) const log = require('./common/log') const { expect } = require('chai') const delay = require('./common/wait') const nanoid = require('./common/uid') const appOptions = require('./common/app-options') const extendClient = require('./common/client-extend') describe('local file manager', function () { it('should open window and basic sftp works', async function () { const electronApp = await electron.launch(appOptions) const client = await electronApp.firstWindow() extendClient(client, electronApp) await delay(3500) // click sftp tab await client.click('.session-current .term-sftp-tabs .type-tab', 1) await delay(1500) // make a local folder let localFileListBefore = await client.elements('.session-current .file-list.local .sftp-item') localFileListBefore = await localFileListBefore.count() await client.rightClick('.session-current .file-list.local .real-file-item', 10, 10) await delay(3300) log('add folder') await client.click('.context-menu .anticon-folder-add') await delay(200) const fname = '00000test-electerm' + nanoid() await client.setValue('.session-current .sftp-item input', fname) await client.click('.session-current .sftp-title-wrap') await delay(2500) let localFileList = await client.elements('.session-current .file-list.local .sftp-item') localFileList = await localFileList.count() expect(localFileList).equal(localFileListBefore + 1) // enter folder await client.doubleClick('.session-current .file-list.local .sftp-item:not(.virtual-file-unit) .file-bg') await delay(5000) const pathCurrentLocal = await client.getValue('.session-current .sftp-local-section .sftp-title input') expect(pathCurrentLocal.includes(fname)).equal(true) let localFileList0 = await client.elements('.session-current .file-list.local .sftp-item') localFileList0 = await localFileList0.count() expect(localFileList0).equal(1) // new file await delay(200) await client.rightClick('.session-current .file-list.local .sftp-item', 10, 10) await delay(200) log('add file') await client.click('.context-menu .anticon-file-add') await delay(200) const fname00 = '00000test-electerm' + nanoid() await client.setValue('.session-current .sftp-item input', fname00) await client.doubleClick('.session-current .sftp-title-wrap') await delay(2500) let localFileList00 = await client.elements('.session-current .file-list.local .sftp-item') localFileList00 = await localFileList00.count() expect(localFileList00).equal(2) // select all and del Control await client.rightClick('.session-current .file-list.local .real-file-item', 10, 10) await delay(200) log('select all') await client.click('.context-menu .anticon-check-square') await delay(120) await client.keyboard.press('Delete') await delay(120) await client.keyboard.press('Enter') await delay(4000) let localFileList11 = await client.elements('.session-current .file-list.local .sftp-item') localFileList11 = await localFileList11.count() expect(localFileList11).equal(1) // goto parent await delay(20) log('goto parent') await client.click('.session-current .sftp-local-section .anticon-arrow-up') await delay(4000) let localFileList1 = await client.elements('.session-current .file-list.local .sftp-item') localFileList1 = await localFileList1.count() expect(localFileList1).equal(localFileList) // del folder log('del folder') await delay(100) await client.click('.session-current .file-list.local .real-file-item') await delay(200) await client.keyboard.press('Delete') await delay(260) await client.keyboard.press('Enter') await delay(7000) let localFileList2 = await client.elements('.session-current .file-list.local .sftp-item') localFileList2 = await localFileList2.count() expect(localFileList2).equal(localFileListBefore) await electronApp.close().catch(console.log) }) })
{ "content_hash": "30f509bc024d709d60f637794264c093", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 109, "avg_line_length": 40.49038461538461, "alnum_prop": 0.708857753502731, "repo_name": "electerm/electerm", "id": "ab0eb9984cc3ad5cc21a3f39146fbb4c02ff4499", "size": "4211", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/e2e/009.basic.file-manager.spec.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "728887" }, { "name": "Less", "bytes": "2039" }, { "name": "Pug", "bytes": "1594" }, { "name": "Shell", "bytes": "546" }, { "name": "Stylus", "bytes": "28458" } ], "symlink_target": "" }
describe("The level/geometry/Line module", function() { var test = require('../../setup'); var expect = test.require('chai').expect; var Line = test.require('level/geometry/Line'); var Vector = test.require('math/Vector'); var Entity = test.require('entity/Entity'); var ERROR_ALLOWED = 0.0001; function createCircleCollision(lineX1, lineY1, lineX2, lineY2, x1, y1, x2, y2, velX, velY, radius, bounce) { var line = new Line(lineX1, lineY1, lineX2, lineY2); var entity = new Entity({ x: x2, y: y2, radius: radius, bounce: bounce }); entity.prevPos.copy(x1, y1); entity.vel.copy(velX, velY); return line.checkForCollisionWithEntity(entity); } function createPointCollision(lineX1, lineY1, lineX2, lineY2, x1, y1, x2, y2, velX, velY, bounce) { var line = new Line(lineX1, lineY1, lineX2, lineY2); var entity = new Entity({ x: x2, y: y2, radius: 0, bounce: bounce }); entity.prevPos.copy(x1, y1); entity.vel.copy(velX, velY); return line.checkForCollisionWithEntity(entity); } describe("checkForCollisionWithEntity method", function() { describe("when the entity has a radius", function() { describe("returns the correct contact point", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 1.0); expect(collision).to.be.an('object'); expect(collision.contactPoint.x).to.be.within(90 - ERROR_ALLOWED, 90 + ERROR_ALLOWED); expect(collision.contactPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 1.0); expect(collision).to.be.an('object'); expect(collision.contactPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.contactPoint.y).to.be.within(-5 - ERROR_ALLOWED, -5 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 350,550,550,350, 75,-75, 15, 1.0); expect(collision).to.be.an('object'); var x = 450 - 15 / Math.sqrt(2), y = 450 + 15 / Math.sqrt(2); expect(collision.contactPoint.x).to.be.within(x - ERROR_ALLOWED, x + ERROR_ALLOWED); expect(collision.contactPoint.y).to.be.within(y - ERROR_ALLOWED, y + ERROR_ALLOWED); }); }); describe("returns the correct distance traveled pre-collision", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 1.0); expect(collision).to.be.an('object'); expect(collision.distTraveled).to.be.within(75 - 10 - ERROR_ALLOWED, 75 - 10 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 1.0); expect(collision).to.be.an('object'); expect(collision.distTraveled).to.be.within(25 - 20 - ERROR_ALLOWED, 25 - 20 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 350,550,550,350, 75,-75, 15, 1.0); expect(collision).to.be.an('object'); var dx = (450 - 15 / Math.sqrt(2) - 350), dy = (450 + 15 / Math.sqrt(2) - 550); var dist = Math.sqrt(dx * dx + dy * dy); expect(collision.distTraveled).to.be.within(dist - ERROR_ALLOWED, dist + ERROR_ALLOWED); }); }); describe("returns the correct final point based on bounce", function() { describe("with a bounce of 0.0", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 0.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(90 - ERROR_ALLOWED, 90 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 0.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(-5 - ERROR_ALLOWED, -5 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 350,550,550,350, 75,-75, 15, 0.0); expect(collision).to.be.an('object'); var x = 450 - 15 / Math.sqrt(2), y = 450 + 15 / Math.sqrt(2); expect(collision.finalPoint.x).to.be.within(x - ERROR_ALLOWED, x + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(y - ERROR_ALLOWED, y + ERROR_ALLOWED); }); }); describe("with a bounce of 0.25", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 0.25); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(81.25 - ERROR_ALLOWED, 81.25 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 0.25); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(43.75 - ERROR_ALLOWED, 43.75 + ERROR_ALLOWED); }); }); describe("with a bounce of 1.0", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 1.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(55 - ERROR_ALLOWED, 55 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 1.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(190 - ERROR_ALLOWED, 190 + ERROR_ALLOWED); }); }); }); describe("returns the correct final velocity based on bounce", function() { describe("with a bounce of 0.0", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 0.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 0.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 350,550,550,350, 75,-75, 15, 0.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); }); describe("with a bounce of 0.25", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 0.25); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-12.5 - ERROR_ALLOWED, -12.5 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 0.25); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(12.5 - ERROR_ALLOWED, 12.5 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 350,550,550,350, 75,-75, 15, 0.25); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-18.75 - ERROR_ALLOWED, -18.75 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(18.75 - ERROR_ALLOWED, 18.75 + ERROR_ALLOWED); }); }); describe("with a bounce of 1.0", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 25,75,125,75, 50,0, 10, 1.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-50 - ERROR_ALLOWED, -50 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 20, 1.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(50 - ERROR_ALLOWED, 50 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 350,550,550,350, 75,-75, 15, 1.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-75 - ERROR_ALLOWED, -75 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); }); }); describe("returns false when the entity doesn't move through the line at all", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 525,575,625,575, 50,0, 10, 1.0); expect(collision).to.equal(false); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 560,500,560,400, 0,-50, 20, 1.0); expect(collision).to.equal(false); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 850,1050,1050,850, 75,-75, 15, 1.0); expect(collision).to.equal(false); }); }); describe("returns false when the entity moves through the \"back\" of the line", function() { it("with a vertical line and a horizontal-moving entity", function() { var collision = createCircleCollision(100,100,100,50, 125,75,25,75, -50,0, 10, 1.0); expect(collision).to.equal(false); }); it("with a horizontal line and a vertical-moving entity", function() { var collision = createCircleCollision(100,-25,50,-25, 60,-200,60,0, 0,50, 20, 1.0); expect(collision).to.equal(false); }); it("with an angled line and an angled-moving entity", function() { var collision = createCircleCollision(500,500,400,400, 550,350,350,550, -75,75, 15, 1.0); expect(collision).to.equal(false); }); }); }); describe("when the entity is a point", function() { describe("returns the correct contact point", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 1.0); expect(collision).to.be.an('object'); expect(collision.contactPoint.x).to.be.within(100 - ERROR_ALLOWED, 100 + ERROR_ALLOWED); expect(collision.contactPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 1.0); expect(collision).to.be.an('object'); expect(collision.contactPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.contactPoint.y).to.be.within(-25 - ERROR_ALLOWED, -25 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 350,550,550,350, 75,-75, 1.0); expect(collision).to.be.an('object'); expect(collision.contactPoint.x).to.be.within(450 - ERROR_ALLOWED, 450 + ERROR_ALLOWED); expect(collision.contactPoint.y).to.be.within(450 - ERROR_ALLOWED, 450 + ERROR_ALLOWED); }); }); describe("returns the correct distance traveled pre-collision", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 1.0); expect(collision).to.be.an('object'); expect(collision.distTraveled).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 1.0); expect(collision).to.be.an('object'); expect(collision.distTraveled).to.be.within(25 - ERROR_ALLOWED, 25 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 350,550,550,350, 75,-75, 1.0); expect(collision).to.be.an('object'); var dx = (450 - 350), dy = (450 - 550); var dist = Math.sqrt(dx * dx + dy * dy); expect(collision.distTraveled).to.be.within(dist - ERROR_ALLOWED, dist + ERROR_ALLOWED); }); }); describe("returns the correct final point based on bounce", function() { describe("with a bounce of 0.0", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 0.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(100 - ERROR_ALLOWED, 100 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 0.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(-25 - ERROR_ALLOWED, -25 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 350,550,550,350, 75,-75, 0.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(450 - ERROR_ALLOWED, 450 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(450 - ERROR_ALLOWED, 450 + ERROR_ALLOWED); }); }); describe("with a bounce of 0.25", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 0.25); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(93.75 - ERROR_ALLOWED, 93.75 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 0.25); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(18.75 - ERROR_ALLOWED, 18.75 + ERROR_ALLOWED); }); }); describe("with a bounce of 1.0", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 1.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 1.0); expect(collision).to.be.an('object'); expect(collision.finalPoint.x).to.be.within(60 - ERROR_ALLOWED, 60 + ERROR_ALLOWED); expect(collision.finalPoint.y).to.be.within(150 - ERROR_ALLOWED, 150 + ERROR_ALLOWED); }); }); }); describe("returns the correct final velocity based on bounce", function() { describe("with a bounce of 0.0", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 0.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 0.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 350,550,550,350, 75,-75, 0.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); }); describe("with a bounce of 0.25", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 0.25); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-12.5 - ERROR_ALLOWED, -12.5 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 0.25); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(12.5 - ERROR_ALLOWED, 12.5 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 350,550,550,350, 75,-75, 0.25); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-18.75 - ERROR_ALLOWED, -18.75 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(18.75 - ERROR_ALLOWED, 18.75 + ERROR_ALLOWED); }); }); describe("with a bounce of 1.0", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 25,75,125,75, 50,0, 1.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-50 - ERROR_ALLOWED, -50 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,0,60,-200, 0,-50, 1.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(0 - ERROR_ALLOWED, 0 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(50 - ERROR_ALLOWED, 50 + ERROR_ALLOWED); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 350,550,550,350, 75,-75, 1.0); expect(collision).to.be.an('object'); expect(collision.finalVel.x).to.be.within(-75 - ERROR_ALLOWED, -75 + ERROR_ALLOWED); expect(collision.finalVel.y).to.be.within(75 - ERROR_ALLOWED, 75 + ERROR_ALLOWED); }); }); }); describe("returns false when the point doesn't move through the line at all", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 525,575,625,575, 50,0, 1.0); expect(collision).to.equal(false); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 560,500,560,400, 0,-50, 1.0); expect(collision).to.equal(false); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 850,1050,1050,850, 75,-75, 1.0); expect(collision).to.equal(false); }); }); describe("returns false when the point moves through the \"back\" of the line", function() { it("with a vertical line and a horizontal-moving point", function() { var collision = createPointCollision(100,100,100,50, 125,75,25,75, -50,0, 1.0); expect(collision).to.equal(false); }); it("with a horizontal line and a vertical-moving point", function() { var collision = createPointCollision(100,-25,50,-25, 60,-200,60,0, 0,50, 1.0); expect(collision).to.equal(false); }); it("with an angled line and an angled-moving point", function() { var collision = createPointCollision(500,500,400,400, 550,350,350,550, -75,75, 1.0); expect(collision).to.equal(false); }); }); }); }); });
{ "content_hash": "e4dfab28e222e353676c342369bfe130", "timestamp": "", "source": "github", "line_count": 392, "max_line_length": 109, "avg_line_length": 59.24744897959184, "alnum_prop": 0.6471044133476856, "repo_name": "bridgs/grapple-game", "id": "4b477bf3d1ae8ef1ac468a28cdb626e1ef808917", "size": "23225", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/level/geometry/Line.mspec.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "635" }, { "name": "HTML", "bytes": "408" }, { "name": "JavaScript", "bytes": "108180" }, { "name": "Makefile", "bytes": "375" } ], "symlink_target": "" }
package com.amazonaws.services.directory.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Contains information about a Remote Authentication Dial In User Service (RADIUS) server. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ds-2015-04-16/RadiusSettings" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RadiusSettings implements Serializable, Cloneable, StructuredPojo { /** * <p> * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of your * RADIUS server load balancer. * </p> */ private com.amazonaws.internal.SdkInternalList<String> radiusServers; /** * <p> * The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic * over this port from the AWS Directory Service servers. * </p> */ private Integer radiusPort; /** * <p> * The amount of time, in seconds, to wait for the RADIUS server to respond. * </p> */ private Integer radiusTimeout; /** * <p> * The maximum number of times that communication with the RADIUS server is attempted. * </p> */ private Integer radiusRetries; /** * <p> * Not currently used. * </p> */ private String sharedSecret; /** * <p> * The protocol specified for your RADIUS endpoints. * </p> */ private String authenticationProtocol; /** * <p> * Not currently used. * </p> */ private String displayLabel; /** * <p> * Not currently used. * </p> */ private Boolean useSameUsername; /** * <p> * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of your * RADIUS server load balancer. * </p> * * @return An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of * your RADIUS server load balancer. */ public java.util.List<String> getRadiusServers() { if (radiusServers == null) { radiusServers = new com.amazonaws.internal.SdkInternalList<String>(); } return radiusServers; } /** * <p> * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of your * RADIUS server load balancer. * </p> * * @param radiusServers * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of * your RADIUS server load balancer. */ public void setRadiusServers(java.util.Collection<String> radiusServers) { if (radiusServers == null) { this.radiusServers = null; return; } this.radiusServers = new com.amazonaws.internal.SdkInternalList<String>(radiusServers); } /** * <p> * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of your * RADIUS server load balancer. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setRadiusServers(java.util.Collection)} or {@link #withRadiusServers(java.util.Collection)} if you want * to override the existing values. * </p> * * @param radiusServers * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of * your RADIUS server load balancer. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withRadiusServers(String... radiusServers) { if (this.radiusServers == null) { setRadiusServers(new com.amazonaws.internal.SdkInternalList<String>(radiusServers.length)); } for (String ele : radiusServers) { this.radiusServers.add(ele); } return this; } /** * <p> * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of your * RADIUS server load balancer. * </p> * * @param radiusServers * An array of strings that contains the IP addresses of the RADIUS server endpoints, or the IP addresses of * your RADIUS server load balancer. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withRadiusServers(java.util.Collection<String> radiusServers) { setRadiusServers(radiusServers); return this; } /** * <p> * The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic * over this port from the AWS Directory Service servers. * </p> * * @param radiusPort * The port that your RADIUS server is using for communications. Your on-premises network must allow inbound * traffic over this port from the AWS Directory Service servers. */ public void setRadiusPort(Integer radiusPort) { this.radiusPort = radiusPort; } /** * <p> * The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic * over this port from the AWS Directory Service servers. * </p> * * @return The port that your RADIUS server is using for communications. Your on-premises network must allow inbound * traffic over this port from the AWS Directory Service servers. */ public Integer getRadiusPort() { return this.radiusPort; } /** * <p> * The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic * over this port from the AWS Directory Service servers. * </p> * * @param radiusPort * The port that your RADIUS server is using for communications. Your on-premises network must allow inbound * traffic over this port from the AWS Directory Service servers. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withRadiusPort(Integer radiusPort) { setRadiusPort(radiusPort); return this; } /** * <p> * The amount of time, in seconds, to wait for the RADIUS server to respond. * </p> * * @param radiusTimeout * The amount of time, in seconds, to wait for the RADIUS server to respond. */ public void setRadiusTimeout(Integer radiusTimeout) { this.radiusTimeout = radiusTimeout; } /** * <p> * The amount of time, in seconds, to wait for the RADIUS server to respond. * </p> * * @return The amount of time, in seconds, to wait for the RADIUS server to respond. */ public Integer getRadiusTimeout() { return this.radiusTimeout; } /** * <p> * The amount of time, in seconds, to wait for the RADIUS server to respond. * </p> * * @param radiusTimeout * The amount of time, in seconds, to wait for the RADIUS server to respond. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withRadiusTimeout(Integer radiusTimeout) { setRadiusTimeout(radiusTimeout); return this; } /** * <p> * The maximum number of times that communication with the RADIUS server is attempted. * </p> * * @param radiusRetries * The maximum number of times that communication with the RADIUS server is attempted. */ public void setRadiusRetries(Integer radiusRetries) { this.radiusRetries = radiusRetries; } /** * <p> * The maximum number of times that communication with the RADIUS server is attempted. * </p> * * @return The maximum number of times that communication with the RADIUS server is attempted. */ public Integer getRadiusRetries() { return this.radiusRetries; } /** * <p> * The maximum number of times that communication with the RADIUS server is attempted. * </p> * * @param radiusRetries * The maximum number of times that communication with the RADIUS server is attempted. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withRadiusRetries(Integer radiusRetries) { setRadiusRetries(radiusRetries); return this; } /** * <p> * Not currently used. * </p> * * @param sharedSecret * Not currently used. */ public void setSharedSecret(String sharedSecret) { this.sharedSecret = sharedSecret; } /** * <p> * Not currently used. * </p> * * @return Not currently used. */ public String getSharedSecret() { return this.sharedSecret; } /** * <p> * Not currently used. * </p> * * @param sharedSecret * Not currently used. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withSharedSecret(String sharedSecret) { setSharedSecret(sharedSecret); return this; } /** * <p> * The protocol specified for your RADIUS endpoints. * </p> * * @param authenticationProtocol * The protocol specified for your RADIUS endpoints. * @see RadiusAuthenticationProtocol */ public void setAuthenticationProtocol(String authenticationProtocol) { this.authenticationProtocol = authenticationProtocol; } /** * <p> * The protocol specified for your RADIUS endpoints. * </p> * * @return The protocol specified for your RADIUS endpoints. * @see RadiusAuthenticationProtocol */ public String getAuthenticationProtocol() { return this.authenticationProtocol; } /** * <p> * The protocol specified for your RADIUS endpoints. * </p> * * @param authenticationProtocol * The protocol specified for your RADIUS endpoints. * @return Returns a reference to this object so that method calls can be chained together. * @see RadiusAuthenticationProtocol */ public RadiusSettings withAuthenticationProtocol(String authenticationProtocol) { setAuthenticationProtocol(authenticationProtocol); return this; } /** * <p> * The protocol specified for your RADIUS endpoints. * </p> * * @param authenticationProtocol * The protocol specified for your RADIUS endpoints. * @see RadiusAuthenticationProtocol */ public void setAuthenticationProtocol(RadiusAuthenticationProtocol authenticationProtocol) { this.authenticationProtocol = authenticationProtocol.toString(); } /** * <p> * The protocol specified for your RADIUS endpoints. * </p> * * @param authenticationProtocol * The protocol specified for your RADIUS endpoints. * @return Returns a reference to this object so that method calls can be chained together. * @see RadiusAuthenticationProtocol */ public RadiusSettings withAuthenticationProtocol(RadiusAuthenticationProtocol authenticationProtocol) { setAuthenticationProtocol(authenticationProtocol); return this; } /** * <p> * Not currently used. * </p> * * @param displayLabel * Not currently used. */ public void setDisplayLabel(String displayLabel) { this.displayLabel = displayLabel; } /** * <p> * Not currently used. * </p> * * @return Not currently used. */ public String getDisplayLabel() { return this.displayLabel; } /** * <p> * Not currently used. * </p> * * @param displayLabel * Not currently used. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withDisplayLabel(String displayLabel) { setDisplayLabel(displayLabel); return this; } /** * <p> * Not currently used. * </p> * * @param useSameUsername * Not currently used. */ public void setUseSameUsername(Boolean useSameUsername) { this.useSameUsername = useSameUsername; } /** * <p> * Not currently used. * </p> * * @return Not currently used. */ public Boolean getUseSameUsername() { return this.useSameUsername; } /** * <p> * Not currently used. * </p> * * @param useSameUsername * Not currently used. * @return Returns a reference to this object so that method calls can be chained together. */ public RadiusSettings withUseSameUsername(Boolean useSameUsername) { setUseSameUsername(useSameUsername); return this; } /** * <p> * Not currently used. * </p> * * @return Not currently used. */ public Boolean isUseSameUsername() { return this.useSameUsername; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRadiusServers() != null) sb.append("RadiusServers: ").append(getRadiusServers()).append(","); if (getRadiusPort() != null) sb.append("RadiusPort: ").append(getRadiusPort()).append(","); if (getRadiusTimeout() != null) sb.append("RadiusTimeout: ").append(getRadiusTimeout()).append(","); if (getRadiusRetries() != null) sb.append("RadiusRetries: ").append(getRadiusRetries()).append(","); if (getSharedSecret() != null) sb.append("SharedSecret: ").append(getSharedSecret()).append(","); if (getAuthenticationProtocol() != null) sb.append("AuthenticationProtocol: ").append(getAuthenticationProtocol()).append(","); if (getDisplayLabel() != null) sb.append("DisplayLabel: ").append(getDisplayLabel()).append(","); if (getUseSameUsername() != null) sb.append("UseSameUsername: ").append(getUseSameUsername()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RadiusSettings == false) return false; RadiusSettings other = (RadiusSettings) obj; if (other.getRadiusServers() == null ^ this.getRadiusServers() == null) return false; if (other.getRadiusServers() != null && other.getRadiusServers().equals(this.getRadiusServers()) == false) return false; if (other.getRadiusPort() == null ^ this.getRadiusPort() == null) return false; if (other.getRadiusPort() != null && other.getRadiusPort().equals(this.getRadiusPort()) == false) return false; if (other.getRadiusTimeout() == null ^ this.getRadiusTimeout() == null) return false; if (other.getRadiusTimeout() != null && other.getRadiusTimeout().equals(this.getRadiusTimeout()) == false) return false; if (other.getRadiusRetries() == null ^ this.getRadiusRetries() == null) return false; if (other.getRadiusRetries() != null && other.getRadiusRetries().equals(this.getRadiusRetries()) == false) return false; if (other.getSharedSecret() == null ^ this.getSharedSecret() == null) return false; if (other.getSharedSecret() != null && other.getSharedSecret().equals(this.getSharedSecret()) == false) return false; if (other.getAuthenticationProtocol() == null ^ this.getAuthenticationProtocol() == null) return false; if (other.getAuthenticationProtocol() != null && other.getAuthenticationProtocol().equals(this.getAuthenticationProtocol()) == false) return false; if (other.getDisplayLabel() == null ^ this.getDisplayLabel() == null) return false; if (other.getDisplayLabel() != null && other.getDisplayLabel().equals(this.getDisplayLabel()) == false) return false; if (other.getUseSameUsername() == null ^ this.getUseSameUsername() == null) return false; if (other.getUseSameUsername() != null && other.getUseSameUsername().equals(this.getUseSameUsername()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRadiusServers() == null) ? 0 : getRadiusServers().hashCode()); hashCode = prime * hashCode + ((getRadiusPort() == null) ? 0 : getRadiusPort().hashCode()); hashCode = prime * hashCode + ((getRadiusTimeout() == null) ? 0 : getRadiusTimeout().hashCode()); hashCode = prime * hashCode + ((getRadiusRetries() == null) ? 0 : getRadiusRetries().hashCode()); hashCode = prime * hashCode + ((getSharedSecret() == null) ? 0 : getSharedSecret().hashCode()); hashCode = prime * hashCode + ((getAuthenticationProtocol() == null) ? 0 : getAuthenticationProtocol().hashCode()); hashCode = prime * hashCode + ((getDisplayLabel() == null) ? 0 : getDisplayLabel().hashCode()); hashCode = prime * hashCode + ((getUseSameUsername() == null) ? 0 : getUseSameUsername().hashCode()); return hashCode; } @Override public RadiusSettings clone() { try { return (RadiusSettings) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.directory.model.transform.RadiusSettingsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
{ "content_hash": "6e0013d0bbcb2d75ace72445e42a3c72", "timestamp": "", "source": "github", "line_count": 589, "max_line_length": 141, "avg_line_length": 32.32937181663837, "alnum_prop": 0.62089066274551, "repo_name": "dagnir/aws-sdk-java", "id": "f2232d286b4edb9d5bc9fe38342345f992ec5138", "size": "19622", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-directory/src/main/java/com/amazonaws/services/directory/model/RadiusSettings.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "FreeMarker", "bytes": "157317" }, { "name": "Gherkin", "bytes": "25556" }, { "name": "Java", "bytes": "165755153" }, { "name": "Scilab", "bytes": "3561" } ], "symlink_target": "" }
using AutoMapper; using Sol.DTO; using Sol.Entities; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Sol.WebAPI.Mapping { public class AutomapperConfig { public static void ConfigureMapper(IMapperConfigurationExpression cfg) { cfg.CreateMap<Invoice, InvoiceDTO>().ReverseMap(); cfg.CreateMap<Zone, ZoneDTO>().ReverseMap(); cfg.CreateMap<Market, MarketDTO>().ReverseMap(); cfg.CreateMap<DeliveryPoint, DeliveryPointDTO>().ReverseMap(); } public static IMapper CreateMapper() { var config = new MapperConfiguration(ConfigureMapper); return config.CreateMapper(); } } }
{ "content_hash": "69ab509b8fe7041c3ecc4d3eb9493534", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 78, "avg_line_length": 28.185185185185187, "alnum_prop": 0.6557161629434954, "repo_name": "zenontrujillo/sol-server", "id": "e0a95121317237ff25fabef22561db5058a1b416", "size": "763", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sol-server/src/Sol.WebAPI/Mapping/AutomapperConfig.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "38226" }, { "name": "HTML", "bytes": "13326" } ], "symlink_target": "" }
Clazz.declarePackage ("JM"); Clazz.load (["JM.PhosphorusMonomer"], "JM.NucleicMonomer", ["java.lang.Character", "JU.Lst", "$.P3", "$.Quat", "$.V3", "J.c.STR", "JM.NucleicPolymer", "J.shapebio.BioShape", "JV.JC"], function () { c$ = Clazz.decorateAsClass (function () { this.hasRnaO2Prime = false; this.baseCenter = null; this.bps = null; Clazz.instantialize (this, arguments); }, JM, "NucleicMonomer", JM.PhosphorusMonomer); Clazz.overrideConstructor (c$, function () { }); c$.validateAndAllocate = Clazz.defineMethod (c$, "validateAndAllocate", function (chain, group3, seqcode, firstAtomIndex, lastAtomIndex, specialAtomIndexes) { var offsets = JM.Monomer.scanForOffsets (firstAtomIndex, specialAtomIndexes, JM.NucleicMonomer.interestingNucleicAtomIDs); if (offsets == null) return null; if (!JM.Monomer.checkOptional (offsets, 19, firstAtomIndex, specialAtomIndexes[73])) return null; JM.Monomer.checkOptional (offsets, 20, firstAtomIndex, specialAtomIndexes[89]); JM.Monomer.checkOptional (offsets, 18, firstAtomIndex, specialAtomIndexes[90]); JM.Monomer.checkOptional (offsets, 23, firstAtomIndex, specialAtomIndexes[75]); JM.Monomer.checkOptional (offsets, 24, firstAtomIndex, specialAtomIndexes[77]); return ( new JM.NucleicMonomer ()).set4 (chain, group3, seqcode, firstAtomIndex, lastAtomIndex, offsets); }, "JM.Chain,~S,~N,~N,~N,~A"); Clazz.defineMethod (c$, "set4", function (chain, group3, seqcode, firstAtomIndex, lastAtomIndex, offsets) { this.set3 (chain, group3, seqcode, firstAtomIndex, lastAtomIndex, offsets); if (!JM.Monomer.have (offsets, 15)) { offsets[0] = offsets[19]; var offset = offsets[0] & 0xFF; if (offset != 255) this.leadAtomIndex = firstAtomIndex + offset; }this.hasRnaO2Prime = JM.Monomer.have (offsets, 2); this.$isPyrimidine = JM.Monomer.have (offsets, 8); this.$isPurine = JM.Monomer.have (offsets, 9) && JM.Monomer.have (offsets, 10) && JM.Monomer.have (offsets, 11); return this; }, "JM.Chain,~S,~N,~N,~N,~A"); Clazz.defineMethod (c$, "isNucleicMonomer", function () { return true; }); Clazz.overrideMethod (c$, "isDna", function () { return !this.hasRnaO2Prime; }); Clazz.overrideMethod (c$, "isRna", function () { return this.hasRnaO2Prime; }); Clazz.overrideMethod (c$, "isPurine", function () { return this.$isPurine; }); Clazz.overrideMethod (c$, "isPyrimidine", function () { return this.$isPyrimidine; }); Clazz.defineMethod (c$, "isGuanine", function () { return JM.Monomer.have (this.offsets, 17); }); Clazz.overrideMethod (c$, "getProteinStructureType", function () { return (this.hasRnaO2Prime ? J.c.STR.RNA : J.c.STR.DNA); }); Clazz.defineMethod (c$, "getC1P", function () { return this.getAtomFromOffsetIndex (25); }); Clazz.defineMethod (c$, "getC2", function () { return this.getAtomFromOffsetIndex (5); }); Clazz.defineMethod (c$, "getC4P", function () { return this.getAtomFromOffsetIndex (27); }); Clazz.defineMethod (c$, "getN1", function () { return this.getAtomFromOffsetIndex (4); }); Clazz.defineMethod (c$, "getN3", function () { return this.getAtomFromOffsetIndex (6); }); Clazz.defineMethod (c$, "getN2", function () { return this.getAtomFromOffsetIndex (17); }); Clazz.defineMethod (c$, "getN4", function () { return this.getAtomFromOffsetIndex (14); }); Clazz.defineMethod (c$, "getN6", function () { return this.getAtomFromOffsetIndex (16); }); Clazz.defineMethod (c$, "getO2", function () { return this.getAtomFromOffsetIndex (8); }); Clazz.defineMethod (c$, "getO4", function () { return this.getAtomFromOffsetIndex (12); }); Clazz.defineMethod (c$, "getO6", function () { return this.getAtomFromOffsetIndex (13); }); Clazz.overrideMethod (c$, "getTerminatorAtom", function () { return this.getAtomFromOffsetIndex (JM.Monomer.have (this.offsets, 20) ? 20 : 21); }); Clazz.defineMethod (c$, "getBaseRing6Points", function (pts) { this.getPoints (JM.NucleicMonomer.ring6OffsetIndexes, pts); }, "~A"); Clazz.defineMethod (c$, "getPoints", function (a, pts) { for (var i = a.length; --i >= 0; ) pts[i] = this.getAtomFromOffsetIndex (a[i]); }, "~A,~A"); Clazz.defineMethod (c$, "maybeGetBaseRing5Points", function (pts) { if (this.$isPurine) this.getPoints (JM.NucleicMonomer.ring5OffsetIndexes, pts); return this.$isPurine; }, "~A"); Clazz.defineMethod (c$, "getRiboseRing5Points", function (pts) { this.getPoints (JM.NucleicMonomer.riboseOffsetIndexes, pts); }, "~A"); Clazz.overrideMethod (c$, "isConnectedAfter", function (possiblyPreviousMonomer) { if (possiblyPreviousMonomer == null) return true; var myPhosphorusAtom = this.getAtomFromOffsetIndex (15); if (myPhosphorusAtom == null) return false; return ((possiblyPreviousMonomer).getAtomFromOffsetIndex (21).isBonded (myPhosphorusAtom) || this.isCA2 (possiblyPreviousMonomer)); }, "JM.Monomer"); Clazz.overrideMethod (c$, "findNearestAtomIndex", function (x, y, closest, madBegin, madEnd) { var competitor = closest[0]; var lead = this.getLeadAtom (); var o5prime = this.getAtomFromOffsetIndex (19); var c3prime = this.getAtomFromOffsetIndex (22); var mar = (Clazz.doubleToInt (madBegin / 2)); if (mar < 1900) mar = 1900; var radius = Clazz.floatToInt (this.scaleToScreen (lead.sZ, mar)); if (radius < 4) radius = 4; if (this.isCursorOnTopOf (lead, x, y, radius, competitor) || this.isCursorOnTopOf (o5prime, x, y, radius, competitor) || this.isCursorOnTopOf (c3prime, x, y, radius, competitor)) closest[0] = lead; }, "~N,~N,~A,~N,~N"); Clazz.defineMethod (c$, "setModelClickability", function () { var atom; if (this.isAtomHidden (this.leadAtomIndex)) return; for (var i = 6; --i >= 0; ) { atom = this.getAtomFromOffsetIndex (JM.NucleicMonomer.ring6OffsetIndexes[i]); atom.setClickable (J.shapebio.BioShape.CARTOON_VISIBILITY_FLAG); } if (this.$isPurine) for (var i = 4; --i >= 1; ) { atom = this.getAtomFromOffsetIndex (JM.NucleicMonomer.ring5OffsetIndexes[i]); atom.setClickable (J.shapebio.BioShape.CARTOON_VISIBILITY_FLAG); } }); Clazz.defineMethod (c$, "getN0", function () { return (this.getAtomFromOffsetIndex (this.$isPurine ? 11 : 4)); }); Clazz.overrideMethod (c$, "getHelixData", function (tokType, qType, mStep) { return this.getHelixData2 (tokType, qType, mStep); }, "~N,~S,~N"); Clazz.overrideMethod (c$, "getQuaternionFrameCenter", function (qType) { switch (qType) { case 'x': case 'a': case 'b': case 'p': return this.getP (); case 'c': if (this.baseCenter == null) { var n = 0; this.baseCenter = new JU.P3 (); for (var i = 0; i < JM.NucleicMonomer.heavyAtomIndexes.length; i++) { var a = this.getAtomFromOffsetIndex (JM.NucleicMonomer.heavyAtomIndexes[i]); if (a == null) continue; this.baseCenter.add (a); n++; } this.baseCenter.scale (1 / n); }return this.baseCenter; case 'n': default: return this.getN0 (); } }, "~S"); Clazz.overrideMethod (c$, "getQuaternion", function (qType) { var ptA = null; var ptB = null; var ptNorP; var yBased = false; var reverseY = false; switch (qType) { case 'a': ptNorP = this.getP (); if (this.monomerIndex == 0 || ptNorP == null) return null; yBased = true; ptA = (this.bioPolymer.monomers[this.monomerIndex - 1]).getC4P (); ptB = this.getC4P (); break; case 'x': ptNorP = this.getP (); if (this.monomerIndex == this.bioPolymer.monomerCount - 1 || ptNorP == null) return null; ptA = (this.bioPolymer.monomers[this.monomerIndex + 1]).getP (); ptB = this.getC4P (); break; case 'b': return this.getQuaternionP (); case 'c': case 'n': ptNorP = this.getN0 (); if (ptNorP == null) return null; yBased = true; reverseY = true; ptA = this.getAtomFromOffsetIndex (5); ptB = this.getAtomFromOffsetIndex (25); break; case 'p': ptNorP = this.getP (); if (ptNorP == null) return null; var p1 = this.getAtomFromOffsetIndex (23); var p2 = this.getAtomFromOffsetIndex (24); var bonds = ptNorP.getBonds (); if (bonds == null) return null; var g = ptNorP.getGroup (); for (var i = 0; i < bonds.length; i++) { var atom = bonds[i].getOtherAtom (ptNorP); if (p1 != null && atom.i == p1.i) continue; if (p2 != null && atom.i == p2.i) continue; if (atom.getGroup () === g) ptB = atom; else ptA = atom; } break; case 'q': return null; default: ptNorP = this.getN0 (); if (ptNorP == null) return null; if (this.$isPurine) { ptA = this.getAtomFromOffsetIndex (5); ptB = this.getAtomFromOffsetIndex (9); } else { ptA = this.getAtomFromOffsetIndex (6); ptB = this.getAtomFromOffsetIndex (1); }break; } if (ptA == null || ptB == null) return null; var vA = JU.V3.newVsub (ptA, ptNorP); var vB = JU.V3.newVsub (ptB, ptNorP); if (reverseY) vB.scale (-1); return JU.Quat.getQuaternionFrameV (vA, vB, null, yBased); }, "~S"); Clazz.overrideMethod (c$, "isCrossLinked", function (g) { if (!(Clazz.instanceOf (g, JM.NucleicMonomer)) || this.$isPurine == g.isPurine ()) return false; var otherNucleotide = (this.$isPurine ? g : this); var myNucleotide = (this.$isPurine ? this : g); var myN1 = myNucleotide.getN1 (); var otherN3 = otherNucleotide.getN3 (); return (myN1.isBonded (otherN3)); }, "JM.Group"); Clazz.overrideMethod (c$, "getCrossLinkLead", function (vReturn) { var N = (this.$isPurine ? this.getN1 () : this.getN3 ()); var bonds = N.getBonds (); if (bonds == null) return false; var haveCrossLinks = false; for (var i = 0; i < bonds.length; i++) { if (bonds[i].isHydrogen ()) { var N2 = bonds[i].getOtherAtom (N); var g = N2.getGroup (); if (!(Clazz.instanceOf (g, JM.NucleicMonomer))) continue; var m = g; if ((this.$isPurine ? m.getN3 () : m.getN1 ()) === N2) { if (vReturn == null) return true; vReturn.addLast (Integer.$valueOf (m.leadAtomIndex)); haveCrossLinks = true; }}} return haveCrossLinks; }, "JU.Lst"); Clazz.defineMethod (c$, "getEdgePoints", function (pts) { pts[0] = this.getLeadAtom (); pts[1] = this.getC4P (); pts[2] = pts[5] = this.getC1P (); switch (this.getGroup1 ()) { case 'C': pts[3] = this.getO2 (); pts[4] = this.getN4 (); return true; case 'A': pts[3] = this.getC2 (); pts[4] = this.getN6 (); return true; case 'G': case 'I': pts[3] = this.getC2 (); pts[4] = this.getO6 (); return true; case 'T': case 'U': pts[3] = this.getO2 (); pts[4] = this.getO4 (); return true; default: return false; } }, "~A"); Clazz.defineMethod (c$, "addBasePair", function (bp) { if (this.bps == null) this.bps = new JU.Lst (); this.bps.addLast (bp); }, "JM.BasePair"); Clazz.defineMethod (c$, "setGroup1", function (g) { if (this.group1 == '\0') this.group1 = g; }, "~S"); Clazz.defineMethod (c$, "getBasePairs", function () { if (!(this.bioPolymer).isDssrSet) this.bioPolymer.model.ms.vwr.getAnnotationParser ().setAllDSSRParametersForModel (this.bioPolymer.model.ms.vwr, this.bioPolymer.model.modelIndex); return this.bps; }); Clazz.overrideMethod (c$, "getGroup1b", function () { var g3 = JV.JC.group3Names[this.groupID]; var g1 = (JM.NucleicPolymer.htGroup1 == null ? null : JM.NucleicPolymer.htGroup1.get (g3)); return (g1 == null ? Character.toLowerCase (g3.charAt (g3.length - 1)) : g1.charAt (0)); }); Clazz.defineStatics (c$, "C6", 1, "O2Pr", 2, "C5", 3, "N1", 4, "C2", 5, "N3", 6, "C4", 7, "O2", 8, "N7", 9, "C8", 10, "N9", 11, "O4", 12, "O6", 13, "N4", 14, "NP", 15, "N6", 16, "N2", 17, "H5T", 18, "O5P", 19, "H3T", 20, "O3P", 21, "C3P", 22, "O1P", 23, "O2P", 24, "C1P", 25, "C2P", 26, "C4P", 27, "O4P", 28, "C5P", 29, "interestingNucleicAtomIDs", [-14, 37, -80, 36, 32, 33, 34, 35, -39, -40, -41, -42, -48, -47, -43, -14, -45, -44, -73, -7, -89, 10, 9, -75, -77, -13, -12, -9, -79, -8], "ring6OffsetIndexes", [3, 1, 4, 5, 6, 7], "ring5OffsetIndexes", [3, 9, 10, 11, 7], "riboseOffsetIndexes", [25, 26, 22, 27, 28, 21, 29, 19, 0], "heavyAtomIndexes", [3, 1, 4, 5, 6, 7, 11, 10, 9, 16, 14, 8, 12, 17, 13]); });
{ "content_hash": "9c92ed30393c0d2838440dea7022e6e5", "timestamp": "", "source": "github", "line_count": 372, "max_line_length": 197, "avg_line_length": 32.00268817204301, "alnum_prop": 0.6598908021839563, "repo_name": "sillitoe/biojs-vis-pdbviewer", "id": "aba5bb2a437313a0354148aa3f6442cade838743", "size": "11905", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "external/jmol-14.2.4/jsmol/j2s/JM/NucleicMonomer.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "10964" } ], "symlink_target": "" }
class Recipes::Devise < Rails::AppBuilder def ask use_devise = answer(:devise) do Ask.confirm "Do you want to use Devise for authentication? (required for ActiveAdmin)" end if use_devise set(:authentication, use_devise) ask_for_devise_model end end def create add_devise if selected?(:authentication) end def install ask_for_devise_model add_devise end def installed? gem_exists?(/devise/) end private def ask_for_devise_model create_user_model = answer(:"devise-user-model") do Ask.confirm "Do you want to create a user model for Devise?" end set(:authentication_model, :user) if create_user_model end def add_devise gather_gem 'devise' gather_gem 'devise-i18n' after(:gem_install) do generate "devise:install" if auth_model = get(:authentication_model) generate "devise #{auth_model}" end gsub_file "config/initializers/devise.rb", /(\# config.secret_key.+)/i do |_match| "config.secret_key = ENV['DEVISE_SECRET_KEY']" end gsub_file "config/initializers/devise.rb", /(config.mailer_sender.+)/i do |_match| "config.mailer_sender = ENV['DEFAULT_EMAIL_ADDRESS']" end gsub_file "config/initializers/devise.rb", /(\# config.pepper.+)/i do |_match| "# config.pepper = 'onhcylrat7x8bjyr5o15sxaix3vbu0sl'" end append_to_file '.env.development', "DEVISE_SECRET_KEY=\n" add_readme_section :internal_dependencies, :devise end end end
{ "content_hash": "e5e4e1ecd0b2dde88a3087d862757065", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 92, "avg_line_length": 24.555555555555557, "alnum_prop": 0.6470588235294118, "repo_name": "platanus/potassium", "id": "7db52540c4081503a190368976acf3913010211f", "size": "1547", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/potassium/recipes/devise.rb", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "11445" }, { "name": "JavaScript", "bytes": "1476" }, { "name": "Procfile", "bytes": "63" }, { "name": "Ruby", "bytes": "163591" }, { "name": "Shell", "bytes": "1780" }, { "name": "TypeScript", "bytes": "2729" }, { "name": "Vue", "bytes": "623" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using Android.App; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("GoogleAnalyticsConnector.Forms.Plugin.Android")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("GoogleAnalyticsConnector.Forms.Plugin.Android")] [assembly: AssemblyCopyright("Copyright © 2014")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] [assembly: ComVisible(false)] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "ad610d424a7d1522dc8e55842eebfa0e", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 84, "avg_line_length": 37.56666666666667, "alnum_prop": 0.7515527950310559, "repo_name": "lhughey/Xamarin.Plugins-1", "id": "1c55b11930da920364dfbf6f7b071d4b1c5b914d", "size": "1130", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GoogleAnalyticsConnector/GoogleAnalyticsConnector/FrazzApps.Xamarin.GoogleAnalyticsConnector.Android/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "477863" }, { "name": "JavaScript", "bytes": "2246" }, { "name": "PowerShell", "bytes": "10689" } ], "symlink_target": "" }
/* * SystemSnapshotWebServer * Author: Andrzej Piotrowski * * http://SystemSnapshotWebServer.com/ * */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; using System.Security.Cryptography; using System.Windows.Forms; namespace SystemSnapshotWebServer { /// <summary> /// A class for loading Embedded Assembly /// </summary> public class EmbeddedAssembly { // Version 1.3 static Dictionary<string, Assembly> dic = null; /// <summary> /// Load Assembly, DLL from Embedded Resources into memory. /// </summary> /// <param name="embeddedResource">Embedded Resource string. Example: WindowsFormsApplication1.SomeTools.dll</param> /// <param name="fileName">File Name. Example: SomeTools.dll</param> public static void Load(string embeddedResource, string fileName) { if (dic == null) dic = new Dictionary<string, Assembly>(); byte[] ba = null; Assembly asm = null; Assembly curAsm = Assembly.GetExecutingAssembly(); using (Stream stm = curAsm.GetManifestResourceStream(embeddedResource)) { // Either the file is not existed or it is not mark as embedded resource if (stm == null) throw new Exception(embeddedResource + " is not found in Embedded Resources."); // Get byte[] from the file from embedded resource ba = new byte[(int)stm.Length]; stm.Read(ba, 0, (int)stm.Length); try { asm = Assembly.Load(ba); // Add the assembly/dll into dictionary dic.Add(asm.FullName, asm); return; } catch { // Purposely do nothing // Unmanaged dll or assembly cannot be loaded directly from byte[] // Let the process fall through for next part } } bool fileOk = false; string tempFile = ""; using (SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider()) { // Get the hash value from embedded DLL/assembly string fileHash = BitConverter.ToString(sha1.ComputeHash(ba)).Replace("-", string.Empty); // Define the temporary storage location of the DLL/assembly tempFile = Path.GetTempPath() + fileName; // Determines whether the DLL/assembly is existed or not if (File.Exists(tempFile)) { // Get the hash value of the existed file byte[] bb = File.ReadAllBytes(tempFile); string fileHash2 = BitConverter.ToString(sha1.ComputeHash(bb)).Replace("-", string.Empty); // Compare the existed DLL/assembly with the Embedded DLL/assembly if (fileHash == fileHash2) { // Same file fileOk = true; } else { // Not same fileOk = false; } } else { // The DLL/assembly is not existed yet fileOk = false; } } // Create the file on disk if (!fileOk) { System.IO.File.WriteAllBytes(tempFile, ba); } // Load it into memory asm = Assembly.LoadFile(tempFile); // Add the loaded DLL/assembly into dictionary dic.Add(asm.FullName, asm); } /// <summary> /// Retrieve specific loaded DLL/assembly from memory /// </summary> /// <param name="assemblyFullName"></param> /// <returns></returns> public static Assembly Get(string assemblyFullName) { if (dic == null || dic.Count == 0) return null; if (dic.ContainsKey(assemblyFullName)) return dic[assemblyFullName]; return null; // Don't throw Exception if the dictionary does not contain the requested assembly. // This is because the event of AssemblyResolve will be raised for every // Embedded Resources (such as pictures) of the projects. // Those resources wil not be loaded by this class and will not exist in dictionary. } // This function is not called if the Assembly is already previously loaded into memory. // This function is not called if the Assembly is already in the same folder as the app. // public static Assembly OnResolveAssembly(object sender, ResolveEventArgs e) { var thisAssembly = Assembly.GetExecutingAssembly(); // Get the Name of the AssemblyFile var assemblyName = new AssemblyName(e.Name); var dllName = assemblyName.Name + ".dll"; // Load from Embedded Resources var resources = thisAssembly.GetManifestResourceNames().Where(s => s.EndsWith(dllName)); if (resources.Any()) { // 99% of cases will only have one matching item, but if you don't, // you will have to change the logic to handle those cases. var resourceName = resources.First(); using (var stream = thisAssembly.GetManifestResourceStream(resourceName)) { if (stream == null) return null; var block = new byte[stream.Length]; // Safely try to load the assembly. try { stream.Read(block, 0, block.Length); return Assembly.Load(block); } catch (IOException) { return null; } catch (BadImageFormatException) { return null; } } } // in the case the resource doesn't exist, return null. return null; } } }
{ "content_hash": "5226834ffd1d0f97bcb4e5b88042ab49", "timestamp": "", "source": "github", "line_count": 181, "max_line_length": 124, "avg_line_length": 36.0939226519337, "alnum_prop": 0.5107913669064749, "repo_name": "siranen/SystemSnapshotWebServer", "id": "ed3faac76b12614fbed9c8a0720bd438e7ba449d", "size": "6535", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "EmbeddedAssembly.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "237794" }, { "name": "HTML", "bytes": "35593" } ], "symlink_target": "" }
<?php /* |-------------------------------------------------------------------------- | Register The Laravel Class Loader |-------------------------------------------------------------------------- | | In addition to using Composer, you may use the Laravel class loader to | load your controllers and models. This is useful for keeping all of | your classes in the "global" namespace without Composer updating. | */ ClassLoader::addDirectories([ app_path().'/database/seeds', ]); /* |-------------------------------------------------------------------------- | Application Error Logger |-------------------------------------------------------------------------- | | Here we will configure the error logger setup for the application which | is built on top of the wonderful Monolog library. By default we will | build a basic log file setup which creates a single file for logs. | */ Log::useFiles(storage_path().'/logs/laravel.log'); /* |-------------------------------------------------------------------------- | Application Error Handler |-------------------------------------------------------------------------- | | Here you may handle any errors that occur in your application, including | logging them or displaying custom views for specific errors. You may | even register several error handlers to handle different types of | exceptions. If nothing is returned, the default error view is | shown, which includes a detailed stack trace during debug. | */ App::error(function(Exception $exception, $code) { Log::error($exception); }); /* |-------------------------------------------------------------------------- | Maintenance Mode Handler |-------------------------------------------------------------------------- | | The "down" Artisan command gives you the ability to put an application | into maintenance mode. Here, you will define what is displayed back | to the user if maintenace mode is in effect for this application. | */ App::down(function() { return Response::make('Be right back!', 503); }); /* * Other Required Files */ // Composers are located within our /src directory. require_once base_path().'/src/composers.php'; // Filters are located within our /src directory. require_once base_path().'/src/filters.php'; // Helpers are located within our /src directory. require_once base_path().'/src/helpers.php'; // Observers are located within our /src directory. require_once base_path().'/src/observers.php'; // Routes are located within our /src directory. require_once base_path().'/src/routes.php'; // Services are located within our /src directory. require_once base_path().'/src/services.php';
{ "content_hash": "2caf9de4f0add469742990d6f0252de6", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 77, "avg_line_length": 32.795180722891565, "alnum_prop": 0.550330639235856, "repo_name": "EdRands/laravel-seeder", "id": "73a54a34e483619f2df9678a586080b91eca48ea", "size": "2722", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/start/global.php", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "73" }, { "name": "CSS", "bytes": "30769" }, { "name": "HTML", "bytes": "2492" }, { "name": "JavaScript", "bytes": "118257" }, { "name": "PHP", "bytes": "194868" } ], "symlink_target": "" }
#include <babylon/materials/node/optimizers/node_material_optimizer.h> namespace BABYLON { void NodeMaterialOptimizer::optimize( const std::vector<NodeMaterialBlockPtr>& /*vertexOutputNodes*/, const std::vector<NodeMaterialBlockPtr>& /*fragmentOutputNodes*/) { // Do nothing by default } } // end of namespace BABYLON
{ "content_hash": "99d416bdbb3f8f3cdb7e899bd28ec515", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 70, "avg_line_length": 27.25, "alnum_prop": 0.7675840978593272, "repo_name": "samdauwe/BabylonCpp", "id": "b08f59308e803af7084bfdc17b8f2d9a33690c8a", "size": "327", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/BabylonCpp/src/materials/node/optimizers/node_material_optimizer.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "65272" }, { "name": "C++", "bytes": "14493785" }, { "name": "CMake", "bytes": "62515" }, { "name": "GLSL", "bytes": "367" }, { "name": "HLSL", "bytes": "821" }, { "name": "HTML", "bytes": "6706" }, { "name": "Objective-C", "bytes": "8381" }, { "name": "Objective-C++", "bytes": "1034" }, { "name": "Python", "bytes": "68588" } ], "symlink_target": "" }
package org.flywaydb.sample.webapp; import org.flywaydb.core.Flyway; /** * Environment of this application. */ public class Environment { /** * Checks whether we are currently running on AppEngine. * * @return {@code true} if we are, {@code false} if not. */ public static boolean runningOnGoogleAppEngine() { return System.getProperty("com.google.appengine.runtime.environment") != null; } /** * Creates a new Flyway instance. * * @return The fully configured Flyway instance. */ public static Flyway createFlyway() { Flyway flyway = new Flyway(); if (runningOnGoogleAppEngine()) { flyway.setDataSource("jdbc:google:rdbms://flyway-test-project:flywaycloudsql/flyway_cloudsql_db", null, null); } else { flyway.setDataSource("jdbc:h2:mem:flyway_db;DB_CLOSE_DELAY=-1", "sa", ""); } flyway.setLocations("db.migration", "db/more/migrations", "org.flywaydb.sample.migration", "org/flywaydb/sample/webapp/migration"); return flyway; } }
{ "content_hash": "6a422a1a27d8e697d7aa3870af4e71f6", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 122, "avg_line_length": 28.3, "alnum_prop": 0.6113074204946997, "repo_name": "IAops/flyway", "id": "4b6e0a141bcd370e338c726c93c6b7ada3801c3a", "size": "1736", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "flyway-sample-webapp/src/main/java/org/flywaydb/sample/webapp/Environment.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "6941" }, { "name": "HTML", "bytes": "3722" }, { "name": "Java", "bytes": "1612100" }, { "name": "PLSQL", "bytes": "73460" }, { "name": "PLpgSQL", "bytes": "25289" }, { "name": "SQLPL", "bytes": "16856" }, { "name": "Scala", "bytes": "27026" }, { "name": "Shell", "bytes": "1638" } ], "symlink_target": "" }
FROM nodesource/nsolid:carbon-latest RUN mkdir -p /usr/src/app WORKDIR /usr/src/app ADD package.json /usr/src/app/package.json RUN npm install --production ADD server.js /usr/src/app/server.js ENTRYPOINT ["nsolid", "server.js"]
{ "content_hash": "9d57624b7eeb92437bb085f2fb4d1eb2", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 42, "avg_line_length": 21.09090909090909, "alnum_prop": 0.7543103448275862, "repo_name": "nodesource/nsolid-kubernetes", "id": "b0040cafa8c628a0863046eaa1b33abb30701adc", "size": "232", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sample-app/Dockerfile", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "455" }, { "name": "Shell", "bytes": "323" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>com.ameliant.devoxx</groupId> <artifactId>camel-devoxx</artifactId> <version>1.0-SNAPSHOT</version> </parent> <artifactId>rest-jms-proxy</artifactId> <packaging>war</packaging> <name>${application-name} :: Rest-JMS Proxy</name> <dependencies> <dependency> <groupId>com.ameliant.devoxx</groupId> <artifactId>backend-model</artifactId> <version>1.0-SNAPSHOT</version> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-core</artifactId> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-spring</artifactId> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-servlet</artifactId> <version>${camel-version}</version> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-jaxb</artifactId> <version>${camel-version}</version> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-jetty8</artifactId> <version>${camel-version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-web</artifactId> <version>${spring-version}</version> </dependency> <dependency> <groupId>javax.inject</groupId> <artifactId>javax.inject</artifactId> <version>1</version> </dependency> <dependency> <groupId>commons-lang</groupId> <artifactId>commons-lang</artifactId> </dependency> <dependency> <groupId>org.apache.activemq</groupId> <artifactId>activemq-camel</artifactId> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jms</artifactId> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-test</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <scope>runtime</scope> </dependency> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> <scope>runtime</scope> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> </dependency> </dependencies> </project>
{ "content_hash": "41fd08ec0b0796fa533ac2214d14601e", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 105, "avg_line_length": 27.64423076923077, "alnum_prop": 0.6462608695652174, "repo_name": "jkorab/camel-devoxx", "id": "85012c50b41bcd615cce3595f289d77366694af3", "size": "2875", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rest-jms-proxy/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "16" }, { "name": "Java", "bytes": "7437" } ], "symlink_target": "" }
// ---------------------------------------------------------------------------- // Copyright 2006-2010, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2009/08/07 Martin D. Flynn // -Initial release // ---------------------------------------------------------------------------- package org.opengts.util; import java.lang.*; import java.util.*; import java.math.*; /** *** Accumulator Long container. *** Typically used in conditions where it is desireable to pass an accumulator *** to an inner-class and have the value accessible from outside the inner-class. *** (If the accumulator is to be used from different threads in a multi-threaded *** environment, use "AtomicLong" instead). **/ public class AccumulatorLong { // ------------------------------------------------------------------------ private long accum = 0L; /** *** Constructor **/ public AccumulatorLong() { this(0L); } /** *** Constructor *** @param val Initial value **/ public AccumulatorLong(long val) { this.accum = val; } // ------------------------------------------------------------------------ /** *** Sets the value of the accumulator *** @param v The new value **/ public void set(long v) { this.accum = v; } /** *** Gets the value of the accumulator *** @return The current value **/ public long get() { return this.accum; } // ------------------------------------------------------------------------ /** *** Adds the specified value to the accumulator *** @param v The value to add **/ public void add(long v) { this.accum += v; } /** *** Subtracts the specified value from the accumulator *** @param v The value to subtract **/ public void subtract(long v) { this.accum -= v; } // ------------------------------------------------------------------------ /** *** Increment the value of the accumulator by 1 **/ public void increment() { this.accum++; } /** *** Decrement the value of the accumulator by 1 **/ public void decrement(long v) { this.accum--; } }
{ "content_hash": "48c36ea3193321ebe21d005bde6b91f3", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 81, "avg_line_length": 26.116666666666667, "alnum_prop": 0.4620293554562859, "repo_name": "cisdev123/OpenGTS_2.6.0", "id": "85e37d253b5dc65fb2829195b8e254211816a5ae", "size": "3134", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/org/opengts/util/AccumulatorLong.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "13754" }, { "name": "CSS", "bytes": "57398" }, { "name": "Java", "bytes": "7705340" }, { "name": "JavaScript", "bytes": "598031" }, { "name": "Perl", "bytes": "53060" }, { "name": "Shell", "bytes": "37282" } ], "symlink_target": "" }
package com.not2excel.api.util; import org.bukkit.ChatColor; import org.bukkit.command.CommandSender; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Richmond Steele * @since 12/16/13 * All rights Reserved * Please read included LICENSE file */ public class Colorizer { private static final int MAX_SIZE = 1000; private static final Map<String, String> colorizedStrings = new ConcurrentHashMap<String, String>(); private static final Map<String, ChatColor> customColors = new ConcurrentHashMap<String, ChatColor>(); static { addColor("purple", ChatColor.LIGHT_PURPLE); addColor("cyan", ChatColor.AQUA); addColor("dark_cyan", ChatColor.DARK_AQUA); } /** * Converts simple colors into ChatColor values * eg. <blue>test => §9test (actually its technically ChatColor.BLUE, not §9. despite them being the same) * * @param string * input string * @return string with proper ChatColor inputted */ public static String formatColors(String string) { synchronized (colorizedStrings) { if (colorizedStrings.containsKey(string)) { return colorizedStrings.get(string); } else { Pattern p = Pattern.compile("<([a-zA-Z_]*)>"); Matcher m = p.matcher(string); String colorized = string; while (m.find()) { colorized = colorized.replaceFirst(p.pattern(), convertToColorCode(m.group(1))); } colorizedStrings.put(string, colorized); if(colorizedStrings.size() > MAX_SIZE) { reduceSize(); } return colorized; } } } /** * Formats string and colorizes it * * @param string * String containing colors and %s %d etc. * @param objects * Objects to be formatted into the string * @return formatted and colorized String */ public static String formatString(String string, Object... objects) { string = String.format(string, objects); return formatColors(string); } public static void send(CommandSender sender, String string, Object... objects) { sender.sendMessage(formatString(string, objects)); } public static void addColor(String s, ChatColor color) { synchronized (customColors) { if(!customColors.containsKey(s.toUpperCase())) { customColors.put(s.toUpperCase(), color); } } } public static void removeColor(String s) { synchronized (customColors) { if(customColors.containsKey(s.toUpperCase())) { customColors.remove(s.toUpperCase()); } } } /** * Wrapper for <code>ChatColor.valueOf()</code> * * @param s * string to get color of * @return ChatColor char */ private static String convertToColorCode(String s) { synchronized (customColors) { if(customColors.containsKey(s.toUpperCase())) { return customColors.get(s.toUpperCase()).toString(); } } try { return ChatColor.valueOf(s.toUpperCase()).toString(); } catch(Exception e) { return "<" + s + ">"; } } private static void reduceSize() { synchronized (colorizedStrings) { Iterator<String> iterator = colorizedStrings.values().iterator(); for(int i = colorizedStrings.size() / 10; i >= 0; --i) { if(!iterator.hasNext()) { break; } iterator.next(); iterator.remove(); } } } }
{ "content_hash": "42670edd5dd395a78ca81f50197d147e", "timestamp": "", "source": "github", "line_count": 150, "max_line_length": 110, "avg_line_length": 27.913333333333334, "alnum_prop": 0.539049438738954, "repo_name": "Not2EXceL/ClashOfBlocks", "id": "d26cf0d1dd8841dd663af6c7e9d12367873d0d3e", "size": "4189", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/not2excel/api/util/Colorizer.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "136382" } ], "symlink_target": "" }
""" Tests for Algorithms using the Pipeline API. """ from os.path import ( dirname, join, realpath, ) from nose_parameterized import parameterized from numpy import ( array, arange, full_like, float64, nan, uint32, ) from numpy.testing import assert_almost_equal import pandas as pd from pandas import ( concat, DataFrame, date_range, read_csv, Series, Timestamp, ) from pandas.tseries.tools import normalize_date from six import iteritems, itervalues from zipline.algorithm import TradingAlgorithm from zipline.api import ( attach_pipeline, pipeline_output, get_datetime, ) from zipline.errors import ( AttachPipelineAfterInitialize, PipelineOutputDuringInitialize, NoSuchPipeline, ) from zipline.lib.adjustment import MULTIPLY from zipline.pipeline import Pipeline from zipline.pipeline.factors import VWAP from zipline.pipeline.data import USEquityPricing from zipline.pipeline.loaders.frame import DataFrameLoader from zipline.pipeline.loaders.equity_pricing_loader import ( USEquityPricingLoader, ) from zipline.testing import ( str_to_seconds ) from zipline.testing import ( create_empty_splits_mergers_frame, FakeDataPortal, ) from zipline.testing.fixtures import ( WithAdjustmentReader, WithBcolzEquityDailyBarReaderFromCSVs, WithDataPortal, ZiplineTestCase, ) from zipline.utils.calendars import get_calendar TEST_RESOURCE_PATH = join( dirname(dirname(realpath(__file__))), # zipline_repo/tests 'resources', 'pipeline_inputs', ) def rolling_vwap(df, length): "Simple rolling vwap implementation for testing" closes = df['close'].values volumes = df['volume'].values product = closes * volumes out = full_like(closes, nan) for upper_bound in range(length, len(closes) + 1): bounds = slice(upper_bound - length, upper_bound) out[upper_bound - 1] = product[bounds].sum() / volumes[bounds].sum() return Series(out, index=df.index) class ClosesOnly(WithDataPortal, ZiplineTestCase): sids = 1, 2, 3 START_DATE = pd.Timestamp('2014-01-01', tz='utc') END_DATE = pd.Timestamp('2014-02-01', tz='utc') dates = date_range(START_DATE, END_DATE, freq=get_calendar("NYSE").day, tz='utc') @classmethod def make_equity_info(cls): cls.equity_info = ret = DataFrame.from_records([ { 'sid': 1, 'symbol': 'A', 'start_date': cls.dates[10], 'end_date': cls.dates[13], 'exchange': 'TEST', }, { 'sid': 2, 'symbol': 'B', 'start_date': cls.dates[11], 'end_date': cls.dates[14], 'exchange': 'TEST', }, { 'sid': 3, 'symbol': 'C', 'start_date': cls.dates[12], 'end_date': cls.dates[15], 'exchange': 'TEST', }, ]) return ret @classmethod def make_equity_daily_bar_data(cls): cls.closes = DataFrame( {sid: arange(1, len(cls.dates) + 1) * sid for sid in cls.sids}, index=cls.dates, dtype=float, ) for sid in cls.sids: yield sid, DataFrame( { 'open': cls.closes[sid].values, 'high': cls.closes[sid].values, 'low': cls.closes[sid].values, 'close': cls.closes[sid].values, 'volume': cls.closes[sid].values, }, index=cls.dates, ) @classmethod def init_class_fixtures(cls): super(ClosesOnly, cls).init_class_fixtures() cls.first_asset_start = min(cls.equity_info.start_date) cls.last_asset_end = max(cls.equity_info.end_date) cls.assets = cls.asset_finder.retrieve_all(cls.sids) cls.trading_day = cls.trading_calendar.day # Add a split for 'A' on its second date. cls.split_asset = cls.assets[0] cls.split_date = cls.split_asset.start_date + cls.trading_day cls.split_ratio = 0.5 cls.adjustments = DataFrame.from_records([ { 'sid': cls.split_asset.sid, 'value': cls.split_ratio, 'kind': MULTIPLY, 'start_date': Timestamp('NaT'), 'end_date': cls.split_date, 'apply_date': cls.split_date, } ]) def init_instance_fixtures(self): super(ClosesOnly, self).init_instance_fixtures() # View of the data on/after the split. self.adj_closes = adj_closes = self.closes.copy() adj_closes.ix[:self.split_date, self.split_asset] *= self.split_ratio self.pipeline_loader = DataFrameLoader( column=USEquityPricing.close, baseline=self.closes, adjustments=self.adjustments, ) def expected_close(self, date, asset): if date < self.split_date: lookup = self.closes else: lookup = self.adj_closes return lookup.loc[date, asset] def exists(self, date, asset): return asset.start_date <= date <= asset.end_date def test_attach_pipeline_after_initialize(self): """ Assert that calling attach_pipeline after initialize raises correctly. """ def initialize(context): pass def late_attach(context, data): attach_pipeline(Pipeline(), 'test') raise AssertionError("Shouldn't make it past attach_pipeline!") algo = TradingAlgorithm( initialize=initialize, handle_data=late_attach, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.first_asset_start - self.trading_day, end=self.last_asset_end + self.trading_day, env=self.env, ) with self.assertRaises(AttachPipelineAfterInitialize): algo.run(self.data_portal) def barf(context, data): raise AssertionError("Shouldn't make it past before_trading_start") algo = TradingAlgorithm( initialize=initialize, before_trading_start=late_attach, handle_data=barf, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.first_asset_start - self.trading_day, end=self.last_asset_end + self.trading_day, env=self.env, ) with self.assertRaises(AttachPipelineAfterInitialize): algo.run(self.data_portal) def test_pipeline_output_after_initialize(self): """ Assert that calling pipeline_output after initialize raises correctly. """ def initialize(context): attach_pipeline(Pipeline(), 'test') pipeline_output('test') raise AssertionError("Shouldn't make it past pipeline_output()") def handle_data(context, data): raise AssertionError("Shouldn't make it past initialize!") def before_trading_start(context, data): raise AssertionError("Shouldn't make it past initialize!") algo = TradingAlgorithm( initialize=initialize, handle_data=handle_data, before_trading_start=before_trading_start, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.first_asset_start - self.trading_day, end=self.last_asset_end + self.trading_day, env=self.env, ) with self.assertRaises(PipelineOutputDuringInitialize): algo.run(self.data_portal) def test_get_output_nonexistent_pipeline(self): """ Assert that calling add_pipeline after initialize raises appropriately. """ def initialize(context): attach_pipeline(Pipeline(), 'test') def handle_data(context, data): raise AssertionError("Shouldn't make it past before_trading_start") def before_trading_start(context, data): pipeline_output('not_test') raise AssertionError("Shouldn't make it past pipeline_output!") algo = TradingAlgorithm( initialize=initialize, handle_data=handle_data, before_trading_start=before_trading_start, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.first_asset_start - self.trading_day, end=self.last_asset_end + self.trading_day, env=self.env, ) with self.assertRaises(NoSuchPipeline): algo.run(self.data_portal) @parameterized.expand([('default', None), ('day', 1), ('week', 5), ('year', 252), ('all_but_one_day', 'all_but_one_day')]) def test_assets_appear_on_correct_days(self, test_name, chunksize): """ Assert that assets appear at correct times during a backtest, with correctly-adjusted close price values. """ if chunksize == 'all_but_one_day': chunksize = ( self.dates.get_loc(self.last_asset_end) - self.dates.get_loc(self.first_asset_start) ) - 1 def initialize(context): p = attach_pipeline(Pipeline(), 'test', chunksize=chunksize) p.add(USEquityPricing.close.latest, 'close') def handle_data(context, data): results = pipeline_output('test') date = get_datetime().normalize() for asset in self.assets: # Assets should appear iff they exist today and yesterday. exists_today = self.exists(date, asset) existed_yesterday = self.exists(date - self.trading_day, asset) if exists_today and existed_yesterday: latest = results.loc[asset, 'close'] self.assertEqual(latest, self.expected_close(date, asset)) else: self.assertNotIn(asset, results.index) before_trading_start = handle_data algo = TradingAlgorithm( initialize=initialize, handle_data=handle_data, before_trading_start=before_trading_start, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.first_asset_start, end=self.last_asset_end, env=self.env, ) # Run for a week in the middle of our data. algo.run(self.data_portal) class MockDailyBarSpotReader(object): """ A BcolzDailyBarReader which returns a constant value for spot price. """ def get_value(self, sid, day, column): return 100.0 class PipelineAlgorithmTestCase(WithBcolzEquityDailyBarReaderFromCSVs, WithAdjustmentReader, ZiplineTestCase): AAPL = 1 MSFT = 2 BRK_A = 3 assets = ASSET_FINDER_EQUITY_SIDS = AAPL, MSFT, BRK_A ASSET_FINDER_EQUITY_SYMBOLS = 'AAPL', 'MSFT', 'BRK_A' START_DATE = Timestamp('2014') END_DATE = Timestamp('2015') @classmethod def make_equity_daily_bar_data(cls): resources = { cls.AAPL: join(TEST_RESOURCE_PATH, 'AAPL.csv'), cls.MSFT: join(TEST_RESOURCE_PATH, 'MSFT.csv'), cls.BRK_A: join(TEST_RESOURCE_PATH, 'BRK-A.csv'), } cls.raw_data = raw_data = { asset: read_csv(path, parse_dates=['day']).set_index('day') for asset, path in resources.items() } # Add 'price' column as an alias because all kinds of stuff in zipline # depends on it being present. :/ for frame in raw_data.values(): frame['price'] = frame['close'] return resources @classmethod def make_splits_data(cls): return DataFrame.from_records([ { 'effective_date': str_to_seconds('2014-06-09'), 'ratio': (1 / 7.0), 'sid': cls.AAPL, } ]) @classmethod def make_mergers_data(cls): return create_empty_splits_mergers_frame() @classmethod def make_dividends_data(cls): return pd.DataFrame(array([], dtype=[ ('sid', uint32), ('amount', float64), ('record_date', 'datetime64[ns]'), ('ex_date', 'datetime64[ns]'), ('declared_date', 'datetime64[ns]'), ('pay_date', 'datetime64[ns]'), ])) @classmethod def init_class_fixtures(cls): super(PipelineAlgorithmTestCase, cls).init_class_fixtures() cls.pipeline_loader = USEquityPricingLoader( cls.bcolz_equity_daily_bar_reader, cls.adjustment_reader, ) cls.dates = cls.raw_data[cls.AAPL].index.tz_localize('UTC') cls.AAPL_split_date = Timestamp("2014-06-09", tz='UTC') def compute_expected_vwaps(self, window_lengths): AAPL, MSFT, BRK_A = self.AAPL, self.MSFT, self.BRK_A # Our view of the data before AAPL's split on June 9, 2014. raw = {k: v.copy() for k, v in iteritems(self.raw_data)} split_date = self.AAPL_split_date split_loc = self.dates.get_loc(split_date) split_ratio = 7.0 # Our view of the data after AAPL's split. All prices from before June # 9 get divided by the split ratio, and volumes get multiplied by the # split ratio. adj = {k: v.copy() for k, v in iteritems(self.raw_data)} for column in 'open', 'high', 'low', 'close': adj[AAPL].ix[:split_loc, column] /= split_ratio adj[AAPL].ix[:split_loc, 'volume'] *= split_ratio # length -> asset -> expected vwap vwaps = {length: {} for length in window_lengths} for length in window_lengths: for asset in AAPL, MSFT, BRK_A: raw_vwap = rolling_vwap(raw[asset], length) adj_vwap = rolling_vwap(adj[asset], length) # Shift computed results one day forward so that they're # labelled by the date on which they'll be seen in the # algorithm. (We can't show the close price for day N until day # N + 1.) vwaps[length][asset] = concat( [ raw_vwap[:split_loc - 1], adj_vwap[split_loc - 1:] ] ).shift(1, self.trading_calendar.day) # Make sure all the expected vwaps have the same dates. vwap_dates = vwaps[1][self.AAPL].index for dict_ in itervalues(vwaps): # Each value is a dict mapping sid -> expected series. for series in itervalues(dict_): self.assertTrue((vwap_dates == series.index).all()) # Spot check expectations near the AAPL split. # length 1 vwap for the morning before the split should be the close # price of the previous day. before_split = vwaps[1][AAPL].loc[split_date - self.trading_calendar.day] assert_almost_equal(before_split, 647.3499, decimal=2) assert_almost_equal( before_split, raw[AAPL].loc[split_date - (2 * self.trading_calendar.day), 'close'], decimal=2, ) # length 1 vwap for the morning of the split should be the close price # of the previous day, **ADJUSTED FOR THE SPLIT**. on_split = vwaps[1][AAPL].loc[split_date] assert_almost_equal(on_split, 645.5700 / split_ratio, decimal=2) assert_almost_equal( on_split, raw[AAPL].loc[split_date - self.trading_calendar.day, 'close'] / split_ratio, decimal=2, ) # length 1 vwap on the day after the split should be the as-traded # close on the split day. after_split = vwaps[1][AAPL].loc[split_date + self.trading_calendar.day] assert_almost_equal(after_split, 93.69999, decimal=2) assert_almost_equal( after_split, raw[AAPL].loc[split_date, 'close'], decimal=2, ) return vwaps @parameterized.expand([ (True,), (False,), ]) def test_handle_adjustment(self, set_screen): AAPL, MSFT, BRK_A = assets = self.AAPL, self.MSFT, self.BRK_A window_lengths = [1, 2, 5, 10] vwaps = self.compute_expected_vwaps(window_lengths) def vwap_key(length): return "vwap_%d" % length def initialize(context): pipeline = Pipeline() context.vwaps = [] for length in vwaps: name = vwap_key(length) factor = VWAP(window_length=length) context.vwaps.append(factor) pipeline.add(factor, name=name) filter_ = (USEquityPricing.close.latest > 300) pipeline.add(filter_, 'filter') if set_screen: pipeline.set_screen(filter_) attach_pipeline(pipeline, 'test') def handle_data(context, data): today = normalize_date(get_datetime()) results = pipeline_output('test') expect_over_300 = { AAPL: today < self.AAPL_split_date, MSFT: False, BRK_A: True, } for asset in assets: should_pass_filter = expect_over_300[asset] if set_screen and not should_pass_filter: self.assertNotIn(asset, results.index) continue asset_results = results.loc[asset] self.assertEqual(asset_results['filter'], should_pass_filter) for length in vwaps: computed = results.loc[asset, vwap_key(length)] expected = vwaps[length][asset].loc[today] # Only having two places of precision here is a bit # unfortunate. assert_almost_equal(computed, expected, decimal=2) # Do the same checks in before_trading_start before_trading_start = handle_data algo = TradingAlgorithm( initialize=initialize, handle_data=handle_data, before_trading_start=before_trading_start, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.dates[max(window_lengths)], end=self.dates[-1], env=self.env, ) algo.run( FakeDataPortal(), # Yes, I really do want to use the start and end dates I passed to # TradingAlgorithm. overwrite_sim_params=False, ) def test_empty_pipeline(self): # For ensuring we call before_trading_start. count = [0] def initialize(context): pipeline = attach_pipeline(Pipeline(), 'test') vwap = VWAP(window_length=10) pipeline.add(vwap, 'vwap') # Nothing should have prices less than 0. pipeline.set_screen(vwap < 0) def handle_data(context, data): pass def before_trading_start(context, data): context.results = pipeline_output('test') self.assertTrue(context.results.empty) count[0] += 1 algo = TradingAlgorithm( initialize=initialize, handle_data=handle_data, before_trading_start=before_trading_start, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.dates[0], end=self.dates[-1], env=self.env, ) algo.run( FakeDataPortal(), overwrite_sim_params=False, ) self.assertTrue(count[0] > 0) def test_pipeline_beyond_daily_bars(self): """ Ensure that we can run an algo with pipeline beyond the max date of the daily bars. """ # For ensuring we call before_trading_start. count = [0] current_day = self.trading_calendar.next_session_label( self.pipeline_loader.raw_price_loader.last_available_dt, ) def initialize(context): pipeline = attach_pipeline(Pipeline(), 'test') vwap = VWAP(window_length=10) pipeline.add(vwap, 'vwap') # Nothing should have prices less than 0. pipeline.set_screen(vwap < 0) def handle_data(context, data): pass def before_trading_start(context, data): context.results = pipeline_output('test') self.assertTrue(context.results.empty) count[0] += 1 algo = TradingAlgorithm( initialize=initialize, handle_data=handle_data, before_trading_start=before_trading_start, data_frequency='daily', get_pipeline_loader=lambda column: self.pipeline_loader, start=self.dates[0], end=current_day, env=self.env, ) algo.run( FakeDataPortal(), overwrite_sim_params=False, ) self.assertTrue(count[0] > 0)
{ "content_hash": "887acf81fff8eb4eb5478708d3e8a1ec", "timestamp": "", "source": "github", "line_count": 645, "max_line_length": 79, "avg_line_length": 33.83410852713178, "alnum_prop": 0.5617926041332539, "repo_name": "magne-max/zipline-ja", "id": "e24996833f9651a332f0287b8608cdec56cf3f9b", "size": "21823", "binary": false, "copies": "1", "ref": "refs/heads/japan", "path": "tests/pipeline/test_pipeline_algo.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7251" }, { "name": "Emacs Lisp", "bytes": "138" }, { "name": "Jupyter Notebook", "bytes": "166848" }, { "name": "PowerShell", "bytes": "3260" }, { "name": "Python", "bytes": "2949355" }, { "name": "Shell", "bytes": "7508" } ], "symlink_target": "" }
ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Mycoderma humuli Lasché ### Remarks null
{ "content_hash": "f9dc9d64a5434416485e8a67f702ba8e", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 23, "avg_line_length": 9.846153846153847, "alnum_prop": 0.7109375, "repo_name": "mdoering/backbone", "id": "d45b9f05b36b0b41c0d58fd606f59e6972546b77", "size": "177", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Mycoderma/Mycoderma humuli/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
// // Created by Darin on 20/07/2018. // #include "call_js_on_app_context_task.h" CallJsOnAppContextTask::CallJsOnAppContextTask(const std::string &instanceId, const std::string &func, std::vector<VALUE_WITH_TYPE *> &params) : WeexTask(instanceId) { this->func = func; exeJsArgs = new ExeJsArgs(params); } CallJsOnAppContextTask::CallJsOnAppContextTask(const std::string &instanceId, const std::string &func, IPCArguments *arguments, size_t startCount) : WeexTask(instanceId) { this->func = func; exeJsArgs = new ExeJsArgs(arguments, startCount); } void CallJsOnAppContextTask::run(WeexRuntime *runtime) { runtime->callJSOnAppContext(instanceId, func, exeJsArgs->params); }
{ "content_hash": "118ae9413ee2992af684b410a33a86d3", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 127, "avg_line_length": 28.535714285714285, "alnum_prop": 0.6483103879849812, "repo_name": "alibaba/weex", "id": "c12cd16bc718e2358be8d950117bdab4da70fdf1", "size": "1607", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "weex_core/Source/js_runtime/weex/task/impl/call_js_on_app_context_task.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "1100" }, { "name": "C", "bytes": "2394315" }, { "name": "C++", "bytes": "10574606" }, { "name": "CMake", "bytes": "94330" }, { "name": "Java", "bytes": "3428925" }, { "name": "JavaScript", "bytes": "4162443" }, { "name": "Makefile", "bytes": "27" }, { "name": "Objective-C", "bytes": "1645742" }, { "name": "Objective-C++", "bytes": "733199" }, { "name": "Python", "bytes": "53147" }, { "name": "Ruby", "bytes": "8073" }, { "name": "Shell", "bytes": "24472" }, { "name": "Vue", "bytes": "111308" } ], "symlink_target": "" }
import logging from mender.cli.utils import api_from_opts, run_command, dump_token, \ load_file, save_file, do_simple_get from mender.client import user_url def add_args(sub): # deployments sub.set_defaults(usercommand='') pusub = sub.add_subparsers(help='Commands for user management') plogin = pusub.add_parser('login', help='Login') plogin.set_defaults(usercommand='login') plogin.add_argument('-u', '--user', help='User name', required=True) plogin.add_argument('-p', '--password', help='Password', required=True) ptoken = pusub.add_parser('token', help='Show token') ptoken.set_defaults(usercommand='token') plogininit = pusub.add_parser('initial-login', help='Initial login') plogininit.set_defaults(usercommand='initial-login') pinit = pusub.add_parser('initial', help='Initial user') pinit.set_defaults(usercommand='initial') pinit.add_argument('-u', '--user', help='User name', required=True) pinit.add_argument('-p', '--password', help='Password', required=True) plist = pusub.add_parser('list', help='List users') plist.set_defaults(usercommand='list') def do_main(opts): logging.debug('user opts: %r', opts) cmds = { 'login': do_user_login, 'token': do_user_token, 'initial-login': do_user_login_initial, 'initial': do_user_create_initial, 'list': list_users, } run_command(opts.usercommand, cmds, opts) def do_user_login(opts): logging.debug('user login') url = user_url(opts.service, '/auth/login') with api_from_opts(opts) as api: # use basic auth with user provided password and login api.auth = (opts.user, opts.password) rsp = api.post(url) if rsp.status_code == 200: logging.info('request successful') logging.info('token: %s', rsp.text) save_file(opts.user_token, rsp.text) else: logging.warning('request failed: %s %s', rsp, rsp.text) def do_user_login_initial(opts): logging.debug('initial user login') url = user_url(opts.service, '/auth/login') with api_from_opts(opts) as api: # use basic auth with user provided password and login api.auth = None rsp = api.post(url) if rsp.status_code == 200: logging.info('initial login successful') logging.info('token: %s', rsp.text) save_file(opts.user_token, rsp.text) else: logging.warning('request failed: %s %s', rsp, rsp.text) def do_user_create_initial(opts): logging.debug('create initial user') url = user_url(opts.service, '/users/initial') with api_from_opts(opts) as api: # use basic auth with user provided password and login rsp = api.post(url, json={ 'email': opts.user, 'password': opts.password, }) if rsp.status_code == 201: logging.info('initial user created') logging.info(rsp.text) else: logging.warning('request failed: %s %s', rsp, rsp.text) def do_user_token(opts): logging.info('show user token') try: tok = load_file(opts.user_token) except IOError as err: logging.error('failed to load token from %s: %s', opts.user_token, err) return dump_token(tok) def dump_user(data): print('user ID: %s' % data['id']) print(' email: %s' % data['email']) print(' created: %s' % data['created_ts']) print(' updated: %s' % data['updated_ts']) def list_users(opts): logging.info('list users') with api_from_opts(opts) as api: do_simple_get(api, user_url(opts.service, '/users'), printer=lambda rsp: [dump_user(user) for user in rsp.json()])
{ "content_hash": "31a0f13f37522da92d67d83e80e7584d", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 75, "avg_line_length": 32.94117647058823, "alnum_prop": 0.5915816326530612, "repo_name": "bboozzoo/mender-backend-cli", "id": "2de92a7900e588784da1040c355f50e86095909e", "size": "5040", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mender/cli/user.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "63002" } ], "symlink_target": "" }
function bgfxProject(_name, _kind, _defines) project ("bgfx" .. _name) uuid (os.uuid("bgfx" .. _name)) kind (_kind) if _kind == "SharedLib" then defines { "BGFX_SHARED_LIB_BUILD=1", } configuration { "vs20* or mingw*" } links { "gdi32", "psapi", } configuration { "mingw*" } linkoptions { "-shared", } configuration { "linux-*" } buildoptions { "-fPIC", } configuration {} end includedirs { path.join(BGFX_DIR, "3rdparty"), path.join(BGFX_DIR, "3rdparty/dxsdk/include"), path.join(BGFX_DIR, "../bx/include"), } defines { _defines, } if _OPTIONS["with-glfw"] then defines { "BGFX_CONFIG_MULTITHREADED=0", } end if _OPTIONS["with-ovr"] then defines { "BGFX_CONFIG_USE_OVR=1", } includedirs { "$(OVR_DIR)/LibOVR/Include", } end configuration { "Debug" } defines { "BGFX_CONFIG_DEBUG=1", } configuration { "android*" } links { "EGL", "GLESv2", } configuration { "winphone8* or winstore8*" } linkoptions { "/ignore:4264" -- LNK4264: archiving object file compiled with /ZW into a static library; note that when authoring Windows Runtime types it is not recommended to link with a static library that contains Windows Runtime metadata } configuration { "*clang*" } buildoptions { "-Wno-microsoft-enum-value", -- enumerator value is not representable in the underlying type 'int' "-Wno-microsoft-const-init", -- default initialization of an object of const type '' without a user-provided default constructor is a Microsoft extension } configuration { "osx" } linkoptions { "-framework Cocoa", "-framework Metal", "-framework QuartzCore", "-framework OpenGL", } configuration { "not nacl" } includedirs { --nacl has GLES2 headers modified... path.join(BGFX_DIR, "3rdparty/khronos"), } configuration {} includedirs { path.join(BGFX_DIR, "include"), } files { path.join(BGFX_DIR, "include/**.h"), path.join(BGFX_DIR, "src/**.cpp"), path.join(BGFX_DIR, "src/**.h"), } removefiles { path.join(BGFX_DIR, "src/**.bin.h"), } if _OPTIONS["with-amalgamated"] then excludes { path.join(BGFX_DIR, "src/bgfx.cpp"), path.join(BGFX_DIR, "src/glcontext_egl.cpp"), path.join(BGFX_DIR, "src/glcontext_glx.cpp"), path.join(BGFX_DIR, "src/glcontext_ppapi.cpp"), path.join(BGFX_DIR, "src/glcontext_wgl.cpp"), path.join(BGFX_DIR, "src/image.cpp"), path.join(BGFX_DIR, "src/ovr.cpp"), path.join(BGFX_DIR, "src/renderdoc.cpp"), path.join(BGFX_DIR, "src/renderer_d3d9.cpp"), path.join(BGFX_DIR, "src/renderer_d3d11.cpp"), path.join(BGFX_DIR, "src/renderer_d3d12.cpp"), path.join(BGFX_DIR, "src/renderer_null.cpp"), path.join(BGFX_DIR, "src/renderer_gl.cpp"), path.join(BGFX_DIR, "src/renderer_vk.cpp"), path.join(BGFX_DIR, "src/shader_dx9bc.cpp"), path.join(BGFX_DIR, "src/shader_dxbc.cpp"), path.join(BGFX_DIR, "src/shader_spirv.cpp"), path.join(BGFX_DIR, "src/vertexdecl.cpp"), } configuration { "xcode* or osx or ios*" } files { path.join(BGFX_DIR, "src/amalgamated.mm"), } excludes { path.join(BGFX_DIR, "src/glcontext_eagl.mm"), path.join(BGFX_DIR, "src/glcontext_nsgl.mm"), path.join(BGFX_DIR, "src/renderer_mtl.mm"), path.join(BGFX_DIR, "src/amalgamated.cpp"), } configuration {} else configuration { "xcode* or osx or ios*" } files { path.join(BGFX_DIR, "src/glcontext_eagl.mm"), path.join(BGFX_DIR, "src/glcontext_nsgl.mm"), path.join(BGFX_DIR, "src/renderer_mtl.mm"), } configuration {} excludes { path.join(BGFX_DIR, "src/amalgamated.**"), } end configuration {} copyLib() end
{ "content_hash": "4d9a2ebd00729ee7cfe66ed7a543c10b", "timestamp": "", "source": "github", "line_count": 162, "max_line_length": 231, "avg_line_length": 23.567901234567902, "alnum_prop": 0.6178627553693034, "repo_name": "BlueCrystalLabs/bgfx", "id": "a946155bdc865f45d2ab4e5c93c0998cfd7f29dd", "size": "3948", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "scripts/bgfx.lua", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "130622" }, { "name": "C++", "bytes": "1415717" }, { "name": "Lua", "bytes": "19234" }, { "name": "Makefile", "bytes": "16587" }, { "name": "Objective-C", "bytes": "147063" }, { "name": "Objective-C++", "bytes": "106984" }, { "name": "Scala", "bytes": "231" }, { "name": "Shell", "bytes": "19931" }, { "name": "SuperCollider", "bytes": "3999" } ], "symlink_target": "" }
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.nio.channels.ClosedChannelException; import java.io.OutputStreamWriter; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DF; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.datanode.DataStorage; import org.apache.hadoop.hdfs.server.datanode.DatanodeUtil; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.util.CloseableReferenceCount; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.util.Time; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The underlying volume used to store replica. * * It uses the {@link FsDatasetImpl} object for synchronization. */ @InterfaceAudience.Private @VisibleForTesting public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); private final FsDatasetImpl dataset; private final String storageID; private final StorageType storageType; private final Map<String, BlockPoolSlice> bpSlices = new ConcurrentHashMap<String, BlockPoolSlice>(); private final File currentDir; // <StorageDirectory>/current private final DF usage; private final long reserved; private CloseableReferenceCount reference = new CloseableReferenceCount(); // Disk space reserved for open blocks. private AtomicLong reservedForRbw; // Capacity configured. This is useful when we want to // limit the visible capacity for tests. If negative, then we just // query from the filesystem. protected volatile long configuredCapacity; /** * Per-volume worker pool that processes new blocks to cache. * The maximum number of workers per volume is bounded (configurable via * dfs.datanode.fsdatasetcache.max.threads.per.volume) to limit resource * contention. */ protected ThreadPoolExecutor cacheExecutor; FsVolumeImpl(FsDatasetImpl dataset, String storageID, File currentDir, Configuration conf, StorageType storageType) throws IOException { this.dataset = dataset; this.storageID = storageID; this.reserved = conf.getLong( DFSConfigKeys.DFS_DATANODE_DU_RESERVED_KEY, DFSConfigKeys.DFS_DATANODE_DU_RESERVED_DEFAULT); this.reservedForRbw = new AtomicLong(0L); this.currentDir = currentDir; File parent = currentDir.getParentFile(); this.usage = new DF(parent, conf); this.storageType = storageType; this.configuredCapacity = -1; cacheExecutor = initializeCacheExecutor(parent); } protected ThreadPoolExecutor initializeCacheExecutor(File parent) { if (storageType.isTransient()) { return null; } if (dataset.datanode == null) { // FsVolumeImpl is used in test. return null; } final int maxNumThreads = dataset.datanode.getConf().getInt( DFSConfigKeys.DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_KEY, DFSConfigKeys.DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_DEFAULT); ThreadFactory workerFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("FsVolumeImplWorker-" + parent.toString() + "-%d") .build(); ThreadPoolExecutor executor = new ThreadPoolExecutor( 1, maxNumThreads, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), workerFactory); executor.allowCoreThreadTimeOut(true); return executor; } private void printReferenceTraceInfo(String op) { StackTraceElement[] stack = Thread.currentThread().getStackTrace(); for (StackTraceElement ste : stack) { switch (ste.getMethodName()) { case "getDfsUsed": case "getBlockPoolUsed": case "getAvailable": case "getVolumeMap": return; default: break; } } FsDatasetImpl.LOG.trace("Reference count: " + op + " " + this + ": " + this.reference.getReferenceCount()); FsDatasetImpl.LOG.trace( Joiner.on("\n").join(Thread.currentThread().getStackTrace())); } /** * Increase the reference count. The caller must increase the reference count * before issuing IOs. * * @throws IOException if the volume is already closed. */ private void reference() throws ClosedChannelException { this.reference.reference(); if (FsDatasetImpl.LOG.isTraceEnabled()) { printReferenceTraceInfo("incr"); } } /** * Decrease the reference count. */ private void unreference() { if (FsDatasetImpl.LOG.isTraceEnabled()) { printReferenceTraceInfo("desc"); } if (FsDatasetImpl.LOG.isDebugEnabled()) { if (reference.getReferenceCount() <= 0) { FsDatasetImpl.LOG.debug("Decrease reference count <= 0 on " + this + Joiner.on("\n").join(Thread.currentThread().getStackTrace())); } } checkReference(); this.reference.unreference(); } private static class FsVolumeReferenceImpl implements FsVolumeReference { private final FsVolumeImpl volume; FsVolumeReferenceImpl(FsVolumeImpl volume) throws ClosedChannelException { this.volume = volume; volume.reference(); } /** * Decreases the reference count. * @throws IOException it never throws IOException. */ @Override public void close() throws IOException { volume.unreference(); } @Override public FsVolumeSpi getVolume() { return this.volume; } } @Override public FsVolumeReference obtainReference() throws ClosedChannelException { return new FsVolumeReferenceImpl(this); } private void checkReference() { Preconditions.checkState(reference.getReferenceCount() > 0); } /** * Close this volume and wait all other threads to release the reference count * on this volume. * @throws IOException if the volume is closed or the waiting is interrupted. */ void closeAndWait() throws IOException { try { this.reference.setClosed(); } catch (ClosedChannelException e) { throw new IOException("The volume has already closed.", e); } final int SLEEP_MILLIS = 500; while (this.reference.getReferenceCount() > 0) { if (FsDatasetImpl.LOG.isDebugEnabled()) { FsDatasetImpl.LOG.debug(String.format( "The reference count for %s is %d, wait to be 0.", this, reference.getReferenceCount())); } try { Thread.sleep(SLEEP_MILLIS); } catch (InterruptedException e) { throw new IOException(e); } } } File getCurrentDir() { return currentDir; } File getRbwDir(String bpid) throws IOException { return getBlockPoolSlice(bpid).getRbwDir(); } File getLazyPersistDir(String bpid) throws IOException { return getBlockPoolSlice(bpid).getLazypersistDir(); } File getTmpDir(String bpid) throws IOException { return getBlockPoolSlice(bpid).getTmpDir(); } void decDfsUsed(String bpid, long value) { synchronized(dataset) { BlockPoolSlice bp = bpSlices.get(bpid); if (bp != null) { bp.decDfsUsed(value); } } } void incDfsUsed(String bpid, long value) { synchronized(dataset) { BlockPoolSlice bp = bpSlices.get(bpid); if (bp != null) { bp.incDfsUsed(value); } } } @VisibleForTesting public long getDfsUsed() throws IOException { long dfsUsed = 0; synchronized(dataset) { for(BlockPoolSlice s : bpSlices.values()) { dfsUsed += s.getDfsUsed(); } } return dfsUsed; } long getBlockPoolUsed(String bpid) throws IOException { return getBlockPoolSlice(bpid).getDfsUsed(); } /** * Calculate the capacity of the filesystem, after removing any * reserved capacity. * @return the unreserved number of bytes left in this filesystem. May be zero. */ @VisibleForTesting public long getCapacity() { if (configuredCapacity < 0) { long remaining = usage.getCapacity() - reserved; return remaining > 0 ? remaining : 0; } return configuredCapacity; } /** * This function MUST NOT be used outside of tests. * * @param capacity */ @VisibleForTesting public void setCapacityForTesting(long capacity) { this.configuredCapacity = capacity; } @Override public long getAvailable() throws IOException { long remaining = getCapacity() - getDfsUsed() - reservedForRbw.get(); long available = usage.getAvailable(); if (remaining > available) { remaining = available; } return (remaining > 0) ? remaining : 0; } @VisibleForTesting public long getReservedForRbw() { return reservedForRbw.get(); } long getReserved(){ return reserved; } BlockPoolSlice getBlockPoolSlice(String bpid) throws IOException { BlockPoolSlice bp = bpSlices.get(bpid); if (bp == null) { throw new IOException("block pool " + bpid + " is not found"); } return bp; } @Override public String getBasePath() { return currentDir.getParent(); } @Override public boolean isTransientStorage() { return storageType.isTransient(); } @Override public String getPath(String bpid) throws IOException { return getBlockPoolSlice(bpid).getDirectory().getAbsolutePath(); } @Override public File getFinalizedDir(String bpid) throws IOException { return getBlockPoolSlice(bpid).getFinalizedDir(); } /** * Make a deep copy of the list of currently active BPIDs */ @Override public String[] getBlockPoolList() { return bpSlices.keySet().toArray(new String[bpSlices.keySet().size()]); } /** * Temporary files. They get moved to the finalized block directory when * the block is finalized. */ File createTmpFile(String bpid, Block b) throws IOException { checkReference(); return getBlockPoolSlice(bpid).createTmpFile(b); } @Override public void reserveSpaceForRbw(long bytesToReserve) { if (bytesToReserve != 0) { reservedForRbw.addAndGet(bytesToReserve); } } @Override public void releaseReservedSpace(long bytesToRelease) { if (bytesToRelease != 0) { long oldReservation, newReservation; do { oldReservation = reservedForRbw.get(); newReservation = oldReservation - bytesToRelease; if (newReservation < 0) { // Failsafe, this should never occur in practice, but if it does we don't // want to start advertising more space than we have available. newReservation = 0; } } while (!reservedForRbw.compareAndSet(oldReservation, newReservation)); } } private enum SubdirFilter implements FilenameFilter { INSTANCE; @Override public boolean accept(File dir, String name) { return name.startsWith("subdir"); } } private enum BlockFileFilter implements FilenameFilter { INSTANCE; @Override public boolean accept(File dir, String name) { return !name.endsWith(".meta") && name.startsWith("blk_"); } } @VisibleForTesting public static String nextSorted(List<String> arr, String prev) { int res = 0; if (prev != null) { res = Collections.binarySearch(arr, prev); if (res < 0) { res = -1 - res; } else { res++; } } if (res >= arr.size()) { return null; } return arr.get(res); } private static class BlockIteratorState { BlockIteratorState() { lastSavedMs = iterStartMs = Time.now(); curFinalizedDir = null; curFinalizedSubDir = null; curEntry = null; atEnd = false; } // The wall-clock ms since the epoch at which this iterator was last saved. @JsonProperty private long lastSavedMs; // The wall-clock ms since the epoch at which this iterator was created. @JsonProperty private long iterStartMs; @JsonProperty private String curFinalizedDir; @JsonProperty private String curFinalizedSubDir; @JsonProperty private String curEntry; @JsonProperty private boolean atEnd; } /** * A BlockIterator implementation for FsVolumeImpl. */ private class BlockIteratorImpl implements FsVolumeSpi.BlockIterator { private final File bpidDir; private final String name; private final String bpid; private long maxStalenessMs = 0; private List<String> cache; private long cacheMs; private BlockIteratorState state; BlockIteratorImpl(String bpid, String name) { this.bpidDir = new File(currentDir, bpid); this.name = name; this.bpid = bpid; rewind(); } /** * Get the next subdirectory within the block pool slice. * * @return The next subdirectory within the block pool slice, or * null if there are no more. */ private String getNextSubDir(String prev, File dir) throws IOException { List<String> children = IOUtils.listDirectory(dir, SubdirFilter.INSTANCE); cache = null; cacheMs = 0; if (children.size() == 0) { LOG.trace("getNextSubDir({}, {}): no subdirectories found in {}", storageID, bpid, dir.getAbsolutePath()); return null; } Collections.sort(children); String nextSubDir = nextSorted(children, prev); if (nextSubDir == null) { LOG.trace("getNextSubDir({}, {}): no more subdirectories found in {}", storageID, bpid, dir.getAbsolutePath()); } else { LOG.trace("getNextSubDir({}, {}): picking next subdirectory {} " + "within {}", storageID, bpid, nextSubDir, dir.getAbsolutePath()); } return nextSubDir; } private String getNextFinalizedDir() throws IOException { File dir = Paths.get( bpidDir.getAbsolutePath(), "current", "finalized").toFile(); return getNextSubDir(state.curFinalizedDir, dir); } private String getNextFinalizedSubDir() throws IOException { if (state.curFinalizedDir == null) { return null; } File dir = Paths.get( bpidDir.getAbsolutePath(), "current", "finalized", state.curFinalizedDir).toFile(); return getNextSubDir(state.curFinalizedSubDir, dir); } private List<String> getSubdirEntries() throws IOException { if (state.curFinalizedSubDir == null) { return null; // There are no entries in the null subdir. } long now = Time.monotonicNow(); if (cache != null) { long delta = now - cacheMs; if (delta < maxStalenessMs) { return cache; } else { LOG.trace("getSubdirEntries({}, {}): purging entries cache for {} " + "after {} ms.", storageID, bpid, state.curFinalizedSubDir, delta); cache = null; } } File dir = Paths.get(bpidDir.getAbsolutePath(), "current", "finalized", state.curFinalizedDir, state.curFinalizedSubDir).toFile(); List<String> entries = IOUtils.listDirectory(dir, BlockFileFilter.INSTANCE); if (entries.size() == 0) { entries = null; } else { Collections.sort(entries); } if (entries == null) { LOG.trace("getSubdirEntries({}, {}): no entries found in {}", storageID, bpid, dir.getAbsolutePath()); } else { LOG.trace("getSubdirEntries({}, {}): listed {} entries in {}", storageID, bpid, entries.size(), dir.getAbsolutePath()); } cache = entries; cacheMs = now; return cache; } /** * Get the next block.<p/> * * Each volume has a hierarchical structure.<p/> * * <code> * BPID B0 * finalized/ * subdir0 * subdir0 * blk_000 * blk_001 * ... * subdir1 * subdir0 * ... * rbw/ * </code> * * When we run out of entries at one level of the structure, we search * progressively higher levels. For example, when we run out of blk_ * entries in a subdirectory, we search for the next subdirectory. * And so on. */ @Override public ExtendedBlock nextBlock() throws IOException { if (state.atEnd) { return null; } try { while (true) { List<String> entries = getSubdirEntries(); if (entries != null) { state.curEntry = nextSorted(entries, state.curEntry); if (state.curEntry == null) { LOG.trace("nextBlock({}, {}): advancing from {} to next " + "subdirectory.", storageID, bpid, state.curFinalizedSubDir); } else { ExtendedBlock block = new ExtendedBlock(bpid, Block.filename2id(state.curEntry)); LOG.trace("nextBlock({}, {}): advancing to {}", storageID, bpid, block); return block; } } state.curFinalizedSubDir = getNextFinalizedSubDir(); if (state.curFinalizedSubDir == null) { state.curFinalizedDir = getNextFinalizedDir(); if (state.curFinalizedDir == null) { state.atEnd = true; return null; } } } } catch (IOException e) { state.atEnd = true; LOG.error("nextBlock({}, {}): I/O error", storageID, bpid, e); throw e; } } @Override public boolean atEnd() { return state.atEnd; } @Override public void rewind() { cache = null; cacheMs = 0; state = new BlockIteratorState(); } @Override public void save() throws IOException { state.lastSavedMs = Time.now(); boolean success = false; ObjectMapper mapper = new ObjectMapper(); try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) { mapper.defaultPrettyPrintingWriter().writeValue(writer, state); success = true; } finally { if (!success) { if (getTempSaveFile().delete()) { LOG.debug("save({}, {}): error deleting temporary file.", storageID, bpid); } } } Files.move(getTempSaveFile().toPath(), getSaveFile().toPath(), StandardCopyOption.ATOMIC_MOVE); if (LOG.isTraceEnabled()) { LOG.trace("save({}, {}): saved {}", storageID, bpid, mapper.defaultPrettyPrintingWriter().writeValueAsString(state)); } } public void load() throws IOException { ObjectMapper mapper = new ObjectMapper(); File file = getSaveFile(); this.state = mapper.reader(BlockIteratorState.class).readValue(file); LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID, bpid, name, file.getAbsoluteFile(), mapper.defaultPrettyPrintingWriter().writeValueAsString(state)); } File getSaveFile() { return new File(bpidDir, name + ".cursor"); } File getTempSaveFile() { return new File(bpidDir, name + ".cursor.tmp"); } @Override public void setMaxStalenessMs(long maxStalenessMs) { this.maxStalenessMs = maxStalenessMs; } @Override public void close() throws IOException { // No action needed for this volume implementation. } @Override public long getIterStartMs() { return state.iterStartMs; } @Override public long getLastSavedMs() { return state.lastSavedMs; } @Override public String getBlockPoolId() { return bpid; } } @Override public BlockIterator newBlockIterator(String bpid, String name) { return new BlockIteratorImpl(bpid, name); } @Override public BlockIterator loadBlockIterator(String bpid, String name) throws IOException { BlockIteratorImpl iter = new BlockIteratorImpl(bpid, name); iter.load(); return iter; } @Override public FsDatasetSpi getDataset() { return dataset; } /** * RBW files. They get moved to the finalized block directory when * the block is finalized. */ File createRbwFile(String bpid, Block b) throws IOException { checkReference(); reserveSpaceForRbw(b.getNumBytes()); try { return getBlockPoolSlice(bpid).createRbwFile(b); } catch (IOException exception) { releaseReservedSpace(b.getNumBytes()); throw exception; } } /** * * @param bytesReservedForRbw Space that was reserved during * block creation. Now that the block is being finalized we * can free up this space. * @return * @throws IOException */ File addFinalizedBlock(String bpid, Block b, File f, long bytesReservedForRbw) throws IOException { releaseReservedSpace(bytesReservedForRbw); return getBlockPoolSlice(bpid).addBlock(b, f); } Executor getCacheExecutor() { return cacheExecutor; } void checkDirs() throws DiskErrorException { // TODO:FEDERATION valid synchronization for(BlockPoolSlice s : bpSlices.values()) { s.checkDirs(); } } void getVolumeMap(ReplicaMap volumeMap, final RamDiskReplicaTracker ramDiskReplicaMap) throws IOException { for(BlockPoolSlice s : bpSlices.values()) { s.getVolumeMap(volumeMap, ramDiskReplicaMap); } } void getVolumeMap(String bpid, ReplicaMap volumeMap, final RamDiskReplicaTracker ramDiskReplicaMap) throws IOException { getBlockPoolSlice(bpid).getVolumeMap(volumeMap, ramDiskReplicaMap); } @Override public String toString() { return currentDir.getAbsolutePath(); } void shutdown() { if (cacheExecutor != null) { cacheExecutor.shutdown(); } Set<Entry<String, BlockPoolSlice>> set = bpSlices.entrySet(); for (Entry<String, BlockPoolSlice> entry : set) { entry.getValue().shutdown(); } } void addBlockPool(String bpid, Configuration conf) throws IOException { File bpdir = new File(currentDir, bpid); BlockPoolSlice bp = new BlockPoolSlice(bpid, this, bpdir, conf); bpSlices.put(bpid, bp); } void shutdownBlockPool(String bpid) { BlockPoolSlice bp = bpSlices.get(bpid); if (bp != null) { bp.shutdown(); } bpSlices.remove(bpid); } boolean isBPDirEmpty(String bpid) throws IOException { File volumeCurrentDir = this.getCurrentDir(); File bpDir = new File(volumeCurrentDir, bpid); File bpCurrentDir = new File(bpDir, DataStorage.STORAGE_DIR_CURRENT); File finalizedDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_FINALIZED); File rbwDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_RBW); if (finalizedDir.exists() && !DatanodeUtil.dirNoFilesRecursive( finalizedDir)) { return false; } if (rbwDir.exists() && FileUtil.list(rbwDir).length != 0) { return false; } return true; } void deleteBPDirectories(String bpid, boolean force) throws IOException { File volumeCurrentDir = this.getCurrentDir(); File bpDir = new File(volumeCurrentDir, bpid); if (!bpDir.isDirectory()) { // nothing to be deleted return; } File tmpDir = new File(bpDir, DataStorage.STORAGE_DIR_TMP); File bpCurrentDir = new File(bpDir, DataStorage.STORAGE_DIR_CURRENT); File finalizedDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_FINALIZED); File lazypersistDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_LAZY_PERSIST); File rbwDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_RBW); if (force) { FileUtil.fullyDelete(bpDir); } else { if (!rbwDir.delete()) { throw new IOException("Failed to delete " + rbwDir); } if (!DatanodeUtil.dirNoFilesRecursive(finalizedDir) || !FileUtil.fullyDelete(finalizedDir)) { throw new IOException("Failed to delete " + finalizedDir); } if (lazypersistDir.exists() && ((!DatanodeUtil.dirNoFilesRecursive(lazypersistDir) || !FileUtil.fullyDelete(lazypersistDir)))) { throw new IOException("Failed to delete " + lazypersistDir); } FileUtil.fullyDelete(tmpDir); for (File f : FileUtil.listFiles(bpCurrentDir)) { if (!f.delete()) { throw new IOException("Failed to delete " + f); } } if (!bpCurrentDir.delete()) { throw new IOException("Failed to delete " + bpCurrentDir); } for (File f : FileUtil.listFiles(bpDir)) { if (!f.delete()) { throw new IOException("Failed to delete " + f); } } if (!bpDir.delete()) { throw new IOException("Failed to delete " + bpDir); } } } @Override public String getStorageID() { return storageID; } @Override public StorageType getStorageType() { return storageType; } DatanodeStorage toDatanodeStorage() { return new DatanodeStorage(storageID, DatanodeStorage.State.NORMAL, storageType); } }
{ "content_hash": "04651046c107112d312ecd265bb1d340", "timestamp": "", "source": "github", "line_count": 894, "max_line_length": 85, "avg_line_length": 29.885906040268456, "alnum_prop": 0.6528183247249045, "repo_name": "wankunde/cloudera_hadoop", "id": "9e8c588420c0b6fd872ac8b53035693925fde607", "size": "27524", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AspectJ", "bytes": "95943" }, { "name": "Batchfile", "bytes": "63910" }, { "name": "C", "bytes": "1745962" }, { "name": "C++", "bytes": "2134903" }, { "name": "CMake", "bytes": "55692" }, { "name": "CSS", "bytes": "53463" }, { "name": "HTML", "bytes": "2441631" }, { "name": "Java", "bytes": "59302604" }, { "name": "JavaScript", "bytes": "46290" }, { "name": "M4", "bytes": "39811" }, { "name": "Makefile", "bytes": "57929" }, { "name": "Objective-C", "bytes": "118273" }, { "name": "PHP", "bytes": "152555" }, { "name": "Perl", "bytes": "159384" }, { "name": "Python", "bytes": "714987" }, { "name": "Ruby", "bytes": "28847" }, { "name": "Shell", "bytes": "446018" }, { "name": "Smalltalk", "bytes": "56562" }, { "name": "TLA", "bytes": "14993" }, { "name": "TeX", "bytes": "45082" }, { "name": "Thrift", "bytes": "3965" }, { "name": "XSLT", "bytes": "41310" } ], "symlink_target": "" }
# Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # Skip all tests on non-windows and non-PowerShellCore and non-elevated platforms. $originalDefaultParameterValues = $PSDefaultParameterValues.Clone() $originalWarningPreference = $WarningPreference $WarningPreference = "SilentlyContinue" $skipTest = ! ($IsWindows -and $IsCoreCLR -and (Test-IsElevated)) $PSDefaultParameterValues["it:skip"] = $skipTest try { Describe "Implicit remoting and CIM cmdlets with AllSigned and Restricted policy" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } # # GET CERTIFICATE # $tempName = "TESTDRIVE:\signedscript_$(Get-Random).ps1" "123456" > $tempName $cert = $null foreach ($thisCertificate in (Get-ChildItem cert:\ -rec -codesigning)) { $null = Set-AuthenticodeSignature $tempName -Certificate $thisCertificate if ((Get-AuthenticodeSignature $tempName).Status -eq "Valid") { $cert = $thisCertificate break } } # Skip the tests if we couldn't find a code sign certificate # This will happen in NanoServer and IoT if ($null -eq $cert) { $skipThisTest = $true return } $skipThisTest = $false # Ensure the cert is trusted if (-not (Test-Path "cert:\currentuser\TrustedPublisher\$($cert.Thumbprint)")) { $store = New-Object System.Security.Cryptography.X509Certificates.X509Store "TrustedPublisher" $store.Open("ReadWrite") $store.Add($cert) $store.Close() } # # Create a remote session # $session = New-RemoteSession # # Set process scope execution policy to 'AllSigned' # $oldExecutionPolicy = Get-ExecutionPolicy -Scope Process Set-ExecutionPolicy AllSigned -Scope Process } AfterAll { if ($skipTest) { return } if ($null -ne $tempName) { Remove-Item -Path $tempName -Force -ErrorAction SilentlyContinue } if ($null -ne $oldExecutionPolicy) { Set-ExecutionPolicy $oldExecutionPolicy -Scope Process } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } # # TEST - Verifying that Import-PSSession signs the files # It "Verifies that Import-PSSession works in AllSigned if Certificate is used" -Skip:($skipTest -or $skipThisTest) { try { $importedModule = Import-PSSession $session Get-Variable -Prefix Remote -Certificate $cert -AllowClobber $importedModule | Should -Not -BeNullOrEmpty } finally { $importedModule | Remove-Module -Force -ErrorAction SilentlyContinue } } It "Verifies security error when Certificate parameter is not used" -Skip:($skipTest -or $skipThisTest) { { $importedModule = Import-PSSession $session Get-Variable -Prefix Remote -AllowClobber } | Should -Throw -ErrorId "InvalidOperation,Microsoft.PowerShell.Commands.ImportPSSessionCommand" } } Describe "Tests Import-PSSession cmdlet works with types unavailable on the client" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $typeDefinition = @" namespace MyTest { public enum MyEnum { Value1 = 1, Value2 = 2 } } "@ # # Create a remote session # $session = New-RemoteSession Invoke-Command -Session $session -Script { Add-Type -TypeDefinition $args[0] } -Args $typeDefinition Invoke-Command -Session $session -Script { function foo { param([MyTest.MyEnum][Parameter(Mandatory = $true)]$x) $x } } } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Verifies client-side unavailable enum is correctly handled" { try { $module = Import-PSSession -Session $session -CommandName foo -AllowClobber # The enum is treated as an int (foo -x "Value2") | Should -Be 2 # The enum is to-string-ed appropriately (foo -x "Value2").ToString() | Should -BeExactly "Value2" } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } } Describe "Cmdlet help from remote session" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Verifies that get-help name for remote proxied commands matches the get-command name" { try { $module = Import-PSSession $session -Name Select-Object -Prefix My -AllowClobber $gcmOutPut = (Get-Command Select-MyObject ).Name $getHelpOutPut = (Get-Help Select-MyObject).Name $gcmOutPut | Should -Be $getHelpOutPut } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } } Describe "Import-PSSession Cmdlet error handling" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Verifies that broken alias results in one error" { try { Invoke-Command $session { Set-Alias BrokenAlias NonExistantCommand } $module = Import-PSSession $session -CommandName:BrokenAlias -CommandType:All -ErrorAction SilentlyContinue -ErrorVariable expectedError -AllowClobber $expectedError | Should -Not -BeNullOrEmpty $expectedError[0].ToString().Contains("BrokenAlias") | Should -BeTrue } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } Invoke-Command $session { Remove-Item alias:BrokenAlias } } } Context "Test content and format of proxied error message (Windows 7: #319080)" { BeforeAll { if ($skipTest) { return } $module = Import-PSSession -Session $session -Name Get-Variable -Prefix My -AllowClobber $oldErrorView = $ErrorView $ErrorView = "NormalView" } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } $ErrorView = $oldErrorView } It "Test non-terminating error" { $results = Get-MyVariable blah,pid 2>&1 ($results[1]).Value | Should -Not -Be $PID # Verifies that returned PID is not for this session $errorString = $results[0] | Out-String # Verifies error message for variable blah ($errorString -like "*VariableNotFound*") | Should -BeTrue } It "Test terminating error" { $results = Get-MyVariable pid -Scope blah 2>&1 $results.Count | Should -Be 1 # Verifies that remote session pid is not returned $errorString = $results[0] | Out-String # Verifes error message for incorrect Scope parameter argument ($errorString -like "*Argument*") | Should -BeTrue } } Context "Ordering of a sequence of error and output messages (Windows 7: #405065)" { BeforeAll { if ($skipTest) { return } Invoke-Command $session { function foo1{1; Write-Error 2; 3; Write-Error 4; 5; Write-Error 6} } $module = Import-PSSession $session -CommandName foo1 -AllowClobber $icmErr = $($icmOut = Invoke-Command $session { foo1 }) 2>&1 $proxiedErr = $($proxiedOut = foo1) 2>&1 $proxiedOut2 = foo1 2> $null $icmOut = "$icmOut" $icmErr = "$icmErr" $proxiedOut = "$proxiedOut" $proxiedOut2 = "$proxiedOut2" $proxiedErr = "$proxiedErr" } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Verifies proxied output = proxied output 2" { $proxiedOut2 | Should -Be $proxiedOut } It "Verifies proxied output = icm output (for mixed error and output results)" { $icmOut | Should -Be $proxiedOut } It "Verifies proxied error = icm error (for mixed error and output results)" { $icmErr | Should -Be $proxiedErr } It "Verifies proxied order = icm order (for mixed error and output results)" { $icmOrder = Invoke-Command $session { foo1 } 2>&1 | Out-String $proxiedOrder = foo1 2>&1 | Out-String $icmOrder | Should -Be $proxiedOrder } } Context "WarningVariable parameter works with implicit remoting (Windows 8: #44861)" { BeforeAll { if ($skipTest) { return } $module = Import-PSSession $session -CommandName Write-Warning -Prefix Remote -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Verifies WarningVariable" { $global:myWarningVariable = @() Write-RemoteWarning MyWarning -WarningVariable global:myWarningVariable ([string]($myWarningVariable[0])) | Should -Be 'MyWarning' } } } Describe "Tests Export-PSSession" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $sessionOption = New-PSSessionOption -ApplicationArguments @{myTest="MyValue"} $session = New-RemoteSession -SessionOption $sessionOption $file = [IO.Path]::Combine([IO.Path]::GetTempPath(), [Guid]::NewGuid().ToString()) $results = Export-PSSession -Session $session -CommandName Get-Variable -AllowClobber -ModuleName $file $oldTimestamp = $($results | Select-Object -First 1).LastWriteTime } AfterAll { if ($skipTest) { return } if ($null -ne $file) { Remove-Item $file -Force -Recurse -ErrorAction SilentlyContinue } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Verifies Export-PSSession creates a file/directory" { @(Get-Item $file).Count | Should -Be 1 } It "Verifies Export-PSSession creates a psd1 file" { ($results | Where-Object { $_.Name -like "*$(Split-Path -Leaf $file).psd1" }) | Should -BeTrue } It "Verifies Export-PSSession creates a psm1 file" { ($results | Where-Object { $_.Name -like "*.psm1" }) | Should -BeTrue } It "Verifies Export-PSSession creates a ps1xml file" { ($results | Where-Object { $_.Name -like "*.ps1xml" }) | Should -BeTrue } It "Verifies that Export-PSSession fails when a module directory already exists" { $e = { Export-PSSession -Session $session -CommandName Get-Variable -AllowClobber -ModuleName $file -ErrorAction Stop } | Should -Throw -PassThru $e | Should -Not -BeNullOrEmpty # Error contains reference to the directory that already exists ([string]($e[0]) -like "*$file*") | Should -BeTrue } It "Verifies that overwriting an existing directory succeeds with -Force" { $newResults = Export-PSSession -Session $session -CommandName Get-Variable -AllowClobber -ModuleName $file -Force # Verifies that Export-PSSession returns 4 files @($newResults).Count | Should -Be 4 # Verifies that Export-PSSession creates *new* files $newResults | ForEach-Object { $_.LastWriteTime | Should -BeGreaterThan $oldTimestamp } } Context "The module is usable when the original runspace is still around" { BeforeAll { if ($skipTest) { return } $module = Import-Module $file -PassThru } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Verifies that proxy returns remote pid" { (Get-Variable -Name pid).Value | Should -Not -Be $PID } It "Verfies Remove-Module doesn't remove user's runspace" { Remove-Module $module -Force -ErrorAction SilentlyContinue (Get-PSSession -InstanceId $session.InstanceId) | Should -Not -BeNullOrEmpty } } } Describe "Proxy module is usable when the original runspace is no longer around" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $sessionOption = New-PSSessionOption -ApplicationArguments @{myTest="MyValue"} $session = New-RemoteSession -SessionOption $sessionOption $file = [IO.Path]::Combine([IO.Path]::GetTempPath(), [Guid]::NewGuid().ToString()) $null = Export-PSSession -Session $session -CommandName Get-Variable -AllowClobber -ModuleName $file # Close the session to test the behavior of proxy module if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue; $session = $null } } AfterAll { if ($skipTest) { return } if ($null -ne $file) { Remove-Item $file -Force -Recurse -ErrorAction SilentlyContinue } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } ## It requires 'New-PSSession' to work with implicit credential to allow proxied command to create new session. ## Implicit credential doesn't work in the Azure DevOps builder, so mark all tests here '-pending'. Context "Proxy module should create a new session" { BeforeAll { if ($skipTest) { return } $module = Import-Module $file -PassThru -Force $internalSession = & $module { $script:PSSession } } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Verifies proxy should return remote pid" -Pending { (Get-Variable -Name PID).Value | Should -Not -Be $PID } It "Verifies ApplicationArguments got preserved correctly" -Pending { $(Invoke-Command $internalSession { $PSSenderInfo.ApplicationArguments.MyTest }) | Should -BeExactly "MyValue" } It "Verifies Remove-Module removed the runspace that was automatically created" -Pending { Remove-Module $module -Force (Get-PSSession -InstanceId $internalSession.InstanceId -ErrorAction SilentlyContinue) | Should -BeNullOrEmpty } It "Verifies Runspace is closed after removing module from Export-PSSession that got initialized with an internal r-space" -Pending { ($internalSession.Runspace.RunspaceStateInfo.ToString()) | Should -BeExactly "Closed" } } Context "Runspace created by the module with explicit session options" { BeforeAll { if ($skipTest) { return } $explicitSessionOption = New-PSSessionOption -Culture fr-FR -UICulture de-DE $module = Import-Module $file -PassThru -Force -ArgumentList $null, $explicitSessionOption $internalSession = & $module { $script:PSSession } } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Verifies proxy should return remote pid" -Pending { (Get-Variable -Name PID).Value | Should -Not -Be $PID } # culture settings should be taken from the explicitly passed session options It "Verifies proxy returns modified culture" -Pending { (Get-Variable -Name PSCulture).Value | Should -BeExactly "fr-FR" } It "Verifies proxy returns modified culture" -Pending { (Get-Variable -Name PSUICulture).Value | Should -BeExactly "de-DE" } # removing the module should remove the implicitly/magically created runspace It "Verifies Remove-Module removes automatically created runspace" -Pending { Remove-Module $module -Force (Get-PSSession -InstanceId $internalSession.InstanceId -ErrorAction SilentlyContinue) | Should -BeNullOrEmpty } It "Verifies Runspace is closed after removing module from Export-PSSession that got initialized with an internal r-space" -Pending { ($internalSession.Runspace.RunspaceStateInfo.ToString()) | Should -BeExactly "Closed" } } Context "Passing a runspace into proxy module" { BeforeAll { if ($skipTest) { return } $newSession = New-RemoteSession $module = Import-Module $file -PassThru -Force -ArgumentList $newSession $internalSession = & $module { $script:PSSession } } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } if ($null -ne $newSession) { Remove-PSSession $newSession -ErrorAction SilentlyContinue } } It "Verifies proxy returns remote pid" { (Get-Variable -Name PID).Value | Should -Not -Be $PID } It "Verifies switch parameters work" { (Get-Variable -Name PID -ValueOnly) | Should -Not -Be $PID } It "Verifies Adding a module affects runspace's state" { ($internalSession.Runspace.RunspaceStateInfo.ToString()) | Should -BeExactly "Opened" } It "Verifies Runspace stays opened after removing module from Export-PSSession that got initialized with an external runspace" { Remove-Module $module -Force ($internalSession.Runspace.RunspaceStateInfo.ToString()) | Should -BeExactly "Opened" } } } Describe "Import-PSSession with FormatAndTypes" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } # remote into same powershell instance $samesession = New-RemoteSession -ConfigurationName $endpointName $session = New-RemoteSession function CreateTempPs1xmlFile { do { $tmpFile = [IO.Path]::Combine([IO.Path]::GetTempPath(), [IO.Path]::GetRandomFileName()) + ".ps1xml"; } while ([IO.File]::Exists($tmpFile)) $tmpFile } function CreateTypeFile { $tmpFile = CreateTempPs1xmlFile @" <Types> <Type> <Name>System.Management.Automation.Host.Coordinates</Name> <Members> <NoteProperty> <Name>MyTestLabel</Name> <Value>123</Value> </NoteProperty> </Members> </Type> <Type> <Name>MyTest.Root</Name> <Members> <MemberSet> <Name>PSStandardMembers</Name> <Members> <NoteProperty> <Name>SerializationDepth</Name> <Value>1</Value> </NoteProperty> </Members> </MemberSet> </Members> </Type> <Type> <Name>MyTest.Son</Name> <Members> <MemberSet> <Name>PSStandardMembers</Name> <Members> <NoteProperty> <Name>SerializationDepth</Name> <Value>1</Value> </NoteProperty> </Members> </MemberSet> </Members> </Type> <Type> <Name>MyTest.Grandson</Name> <Members> <MemberSet> <Name>PSStandardMembers</Name> <Members> <NoteProperty> <Name>SerializationDepth</Name> <Value>1</Value> </NoteProperty> </Members> </MemberSet> </Members> </Type> </Types> "@ | Set-Content $tmpFile $tmpFile } function CreateFormatFile { $tmpFile = CreateTempPs1xmlFile @" <Configuration> <ViewDefinitions> <View> <Name>MySizeView</Name> <ViewSelectedBy> <TypeName>System.Management.Automation.Host.Size</TypeName> </ViewSelectedBy> <TableControl> <TableHeaders> <TableColumnHeader> <Label>MyTestWidth</Label> </TableColumnHeader> <TableColumnHeader> <Label>MyTestHeight</Label> </TableColumnHeader> </TableHeaders> <TableRowEntries> <TableRowEntry> <TableColumnItems> <TableColumnItem> <PropertyName>Width</PropertyName> </TableColumnItem> <TableColumnItem> <PropertyName>Height</PropertyName> </TableColumnItem> </TableColumnItems> </TableRowEntry> </TableRowEntries> </TableControl> </View> </ViewDefinitions> </Configuration> "@ | Set-Content $tmpFile $tmpFile } $formatFile = CreateFormatFile $typeFile = CreateTypeFile } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } if ($null -ne $samesession) { Remove-PSSession $samesession -ErrorAction SilentlyContinue } if ($null -ne $formatFile) { Remove-Item $formatFile -Force -ErrorAction SilentlyContinue } if ($null -ne $typeFile) { Remove-Item $typeFile -Force -ErrorAction SilentlyContinue } } Context "Importing format file works" { BeforeAll { if ($skipTest) { return } $formattingScript = { New-Object System.Management.Automation.Host.Size | ForEach-Object { $_.Width = 123; $_.Height = 456; $_ } | Out-String } $originalLocalFormatting = & $formattingScript # Original local and remote formatting should be equal (sanity check) $originalRemoteFormatting = Invoke-Command $samesession $formattingScript $originalLocalFormatting | Should -Be $originalRemoteFormatting Invoke-Command $samesession { param($file) Update-FormatData $file } -ArgumentList $formatFile # Original remote and modified remote formatting should not be equal (sanity check) $modifiedRemoteFormatting = Invoke-Command $samesession $formattingScript $originalRemoteFormatting | Should -Not -Be $modifiedRemoteFormatting $module = Import-PSSession -Session $samesession -CommandName @() -FormatTypeName * -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "modified remote and imported local should be equal" { $importedLocalFormatting = & $formattingScript $modifiedRemoteFormatting | Should -Be $importedLocalFormatting } It "original local and unimported local should be equal" { Remove-Module $module -Force $unimportedLocalFormatting = & $formattingScript $originalLocalFormatting | Should -Be $unimportedLocalFormatting } } It "Updating type table in a middle of a command has effect on serializer" { $results = Invoke-Command $session -ArgumentList $typeFile -ScriptBlock { param($file) New-Object System.Management.Automation.Host.Coordinates Update-TypeData $file New-Object System.Management.Automation.Host.Coordinates } # Should get 2 deserialized S.M.A.H.Coordinates objects $results.Count | Should -Be 2 # First object shouldn't have the additional ETS note property $results[0].MyTestLabel | Should -BeNullOrEmpty # Second object should have the additional ETS note property $results[1].MyTestLabel | Should -Be 123 } Context "Implicit remoting works even when types.ps1xml is missing on the client" { BeforeAll { if ($skipTest) { return } $typeDefinition = @" namespace MyTest { public class Root { public Root(string s) { text = s; } public Son Son = new Son(); public string text; } public class Son { public Grandson Grandson = new Grandson(); } public class Grandson { public string text = "Grandson"; } } "@ Invoke-Command -Session $session -Script { Add-Type -TypeDefinition $args[0] } -ArgumentList $typeDefinition Invoke-Command -Session $session -Script { function foo { New-Object MyTest.Root "root" } } Invoke-Command -Session $session -Script { function bar { param([Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]$Son) $Son.Grandson.text } } $module = Import-PSSession $session foo,bar -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Serialization works for top-level properties" { $x = foo $x.text | Should -BeExactly "root" } It "Serialization settings works for deep properties" { $x = foo $x.Son.Grandson.text | Should -BeExactly "Grandson" } It "Serialization settings are preserved even if types.ps1xml is missing on the client" { $y = foo | bar $y | Should -BeExactly "Grandson" } } } Describe "Import-PSSession functional tests" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession # Define a remote function Invoke-Command -Session $session { function MyFunction { param($x) "x = '$x'; args = '$args'" } } # Define a remote proxy script cmdlet $remoteCommandType = $ExecutionContext.InvokeCommand.GetCommand('Get-Variable', [System.Management.Automation.CommandTypes]::Cmdlet) $remoteProxyBody = [System.Management.Automation.ProxyCommand]::Create($remoteCommandType) $remoteProxyDeclaration = "function Get-VariableProxy { $remoteProxyBody }" Invoke-Command -Session $session { param($x) Invoke-Expression $x } -Arg $remoteProxyDeclaration $remoteAliasDeclaration = "set-alias gvalias Get-Variable" Invoke-Command -Session $session { param($x) Invoke-Expression $x } -Arg $remoteAliasDeclaration Remove-Item alias:gvalias -Force -ErrorAction silentlycontinue # Import a remote function, script cmdlet, cmdlet, native application, alias $module = Import-PSSession -Session $session -Name MyFunction,Get-VariableProxy,Get-Variable,gvalias,cmd -AllowClobber -Type All } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Import-PSSession should return a PSModuleInfo object" { $module | Should -Not -BeNullOrEmpty } It "Import-PSSession should return a PSModuleInfo object" { ($module -as [System.Management.Automation.PSModuleInfo]) | Should -Not -BeNullOrEmpty } It "Helper functions should not be imported" { Get-Item function:*PSImplicitRemoting* -ErrorAction SilentlyContinue | Where-Object {$_.ModuleName -eq $module.Name} | Should -BeNullOrEmpty } It "Calls implicit remoting proxies 'MyFunction'" { (MyFunction 1 2 3) | Should -BeExactly "x = '1'; args = '2 3'" } It "proxy should return remote pid" { (Get-VariableProxy -Name:pid).Value | Should -Not -Be $PID } It "proxy should return remote pid" { (Get-Variable -Name:pid).Value | Should -Not -Be $PID } It "proxy should return remote pid" { $(& (Get-Command gvalias -Type alias) -Name:pid).Value | Should -Not -Be $PID } It "NoName-c8aeb5c8-2388-4d64-98c1-a9c6c218d404" { Invoke-Command -Session $session { $env:TestImplicitRemotingVariable = 123 } (cmd.exe /c "echo TestImplicitRemotingVariable=%TestImplicitRemotingVariable%") | Should -BeExactly "TestImplicitRemotingVariable=123" } Context "Test what happens after the runspace is closed" { BeforeAll { if ($skipTest) { return } Remove-PSSession $session # The loop below works around the fact that PSEventManager uses threadpool worker to queue event handler actions to process later. # Usage of threadpool means that it is impossible to predict when the event handler will run (this is Windows 8 Bugs: #882977). $i = 0 while ( ($i -lt 20) -and ($null -ne (Get-Module | Where-Object { $_.Path -eq $module.Path })) ) { $i++ Start-Sleep -Milliseconds 50 } } It "Temporary module should be automatically removed after runspace is closed" { (Get-Module | Where-Object { $_.Path -eq $module.Path }) | Should -BeNullOrEmpty } It "Temporary psm1 file should be automatically removed after runspace is closed" { (Get-Item $module.Path -ErrorAction SilentlyContinue) | Should -BeNullOrEmpty } It "Event should be unregistered when the runspace is closed" { # Check that the implicit remoting event has been removed. $implicitEventCount = 0 foreach ($item in $ExecutionContext.Events.Subscribers) { if ($item.SourceIdentifier -match "Implicit remoting event") { $implicitEventCount++ } } $implicitEventCount | Should -Be 0 } It "Private functions from the implicit remoting module shouldn't get imported into global scope" { @(Get-ChildItem function:*Implicit* -ErrorAction SilentlyContinue | Where-Object {$_.ModuleName -eq $module.Name}).Count | Should -Be 0 } } } Describe "Implicit remoting parameter binding" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Binding of ValueFromPipeline should work" { try { $module = Import-PSSession -Session $session -Name Get-Random -AllowClobber $x = 1..20 | Get-Random -Count 5 $x.Count | Should -Be 5 } finally { Remove-Module $module -Force } } Context "Pipeline-based parameter binding works even when client has no type constraints (Windows 7: #391157)" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { [cmdletbinding(defaultparametersetname="string")] param( [string] [parameter(ParameterSetName="string", ValueFromPipeline = $true)] $string, [ipaddress] [parameter(ParameterSetName="ipaddress", ValueFromPipeline = $true)] $ipaddress ) "Bound parameter: $($MyInvocation.BoundParameters.Keys | Sort-Object)" } } # Sanity checks. Invoke-Command $session {"s" | foo} | Should -BeExactly "Bound parameter: string" Invoke-Command $session {[ipaddress]::parse("127.0.0.1") | foo} | Should -BeExactly "Bound parameter: ipaddress" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Pipeline binding works even if it relies on type constraints" { ("s" | foo) | Should -BeExactly "Bound parameter: string" } It "Pipeline binding works even if it relies on type constraints" { ([ipaddress]::parse("127.0.0.1") | foo) | Should -BeExactly "Bound parameter: ipaddress" } } Context "Pipeline-based parameter binding works even when client has no type constraints and parameterset is ambiguous (Windows 7: #430379)" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( [string] [parameter(ParameterSetName="string", ValueFromPipeline = $true)] $string, [ipaddress] [parameter(ParameterSetName="ipaddress", ValueFromPipeline = $true)] $ipaddress ) "Bound parameter: $($MyInvocation.BoundParameters.Keys)" } } # Sanity checks. Invoke-Command $session {"s" | foo} | Should -BeExactly "Bound parameter: string" Invoke-Command $session {[ipaddress]::parse("127.0.0.1") | foo} | Should -BeExactly "Bound parameter: ipaddress" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Pipeline binding works even if it relies on type constraints and parameter set is ambiguous" { ("s" | foo) | Should -BeExactly "Bound parameter: string" } It "Pipeline binding works even if it relies on type constraints and parameter set is ambiguous" { ([ipaddress]::parse("127.0.0.1") | foo) | Should -BeExactly "Bound parameter: ipaddress" } } Context "pipeline-based parameter binding works even when one of parameters that can be bound by pipeline gets bound by name" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( [DateTime] [parameter(ValueFromPipeline = $true)] $date, [ipaddress] [parameter(ValueFromPipeline = $true)] $ipaddress ) "Bound parameter: $($MyInvocation.BoundParameters.Keys | Sort-Object)" } } # Sanity checks. Invoke-Command $session {Get-Date | foo} | Should -BeExactly "Bound parameter: date" Invoke-Command $session {[ipaddress]::parse("127.0.0.1") | foo} | Should -BeExactly "Bound parameter: ipaddress" Invoke-Command $session {[ipaddress]::parse("127.0.0.1") | foo -date (Get-Date)} | Should -BeExactly "Bound parameter: date ipaddress" Invoke-Command $session {Get-Date | foo -ipaddress ([ipaddress]::parse("127.0.0.1"))} | Should -BeExactly "Bound parameter: date ipaddress" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Pipeline binding works even when also binding by name" { (Get-Date | foo) | Should -BeExactly "Bound parameter: date" } It "Pipeline binding works even when also binding by name" { ([ipaddress]::parse("127.0.0.1") | foo) | Should -BeExactly "Bound parameter: ipaddress" } It "Pipeline binding works even when also binding by name" { ([ipaddress]::parse("127.0.0.1") | foo -date $(Get-Date)) | Should -BeExactly "Bound parameter: date ipaddress" } It "Pipeline binding works even when also binding by name" { (Get-Date | foo -ipaddress ([ipaddress]::parse("127.0.0.1"))) | Should -BeExactly "Bound parameter: date ipaddress" } } Context "value from pipeline by property name - multiple parameters" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( [System.TimeSpan] [parameter(ValueFromPipelineByPropertyName = $true)] $TotalProcessorTime, [System.Diagnostics.ProcessPriorityClass] [parameter(ValueFromPipelineByPropertyName = $true)] $PriorityClass ) "Bound parameter: $($MyInvocation.BoundParameters.Keys | Sort-Object)" } } # Sanity checks. Invoke-Command $session {Get-Process -pid $PID | foo} | Should -BeExactly "Bound parameter: PriorityClass TotalProcessorTime" Invoke-Command $session {Get-Process -pid $PID | foo -Total 5} | Should -BeExactly "Bound parameter: PriorityClass TotalProcessorTime" Invoke-Command $session {Get-Process -pid $PID | foo -Priority normal} | Should -BeExactly "Bound parameter: PriorityClass TotalProcessorTime" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Pipeline binding works by property name" { (Get-Process -Id $PID | foo) | Should -BeExactly "Bound parameter: PriorityClass TotalProcessorTime" } It "Pipeline binding works by property name" { (Get-Process -Id $PID | foo -Total 5) | Should -BeExactly "Bound parameter: PriorityClass TotalProcessorTime" } It "Pipeline binding works by property name" { (Get-Process -Id $PID | foo -Priority normal) | Should -BeExactly "Bound parameter: PriorityClass TotalProcessorTime" } } Context "2 parameters on the same position" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( [string] [parameter(Position = 0, parametersetname = 'set1', mandatory = $true)] $string, [ipaddress] [parameter(Position = 0, parametersetname = 'set2', mandatory = $true)] $ipaddress ) "Bound parameter: $($MyInvocation.BoundParameters.Keys | Sort-Object)" } } # Sanity checks. Invoke-Command $session {foo ([ipaddress]::parse("127.0.0.1"))} | Should -BeExactly "Bound parameter: ipaddress" Invoke-Command $session {foo "blah"} | Should -BeExactly "Bound parameter: string" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Positional binding works" { foo "blah" | Should -BeExactly "Bound parameter: string" } It "Positional binding works" { foo ([ipaddress]::parse("127.0.0.1")) | Should -BeExactly "Bound parameter: ipaddress" } } Context "positional binding and array argument value" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( [object] [parameter(Position = 0, mandatory = $true)] $p1, [object] [parameter(Position = 1)] $p2 ) "$p1 : $p2" } } # Sanity checks. Invoke-Command $session {foo 1,2,3} | Should -BeExactly "1 2 3 : " Invoke-Command $session {foo 1,2,3 4} | Should -BeExactly "1 2 3 : 4" Invoke-Command $session {foo -p2 4 1,2,3} | Should -BeExactly "1 2 3 : 4" Invoke-Command $session {foo 1 4} | Should -BeExactly "1 : 4" Invoke-Command $session {foo -p2 4 1} | Should -BeExactly "1 : 4" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Positional binding works when binding an array value" { foo 1,2,3 | Should -BeExactly "1 2 3 : " } It "Positional binding works when binding an array value" { foo 1,2,3 4 | Should -BeExactly "1 2 3 : 4" } It "Positional binding works when binding an array value" { foo -p2 4 1,2,3 | Should -BeExactly "1 2 3 : 4" } It "Positional binding works when binding an array value" { foo 1 4 | Should -BeExactly "1 : 4" } It "Positional binding works when binding an array value" { foo -p2 4 1 | Should -BeExactly "1 : 4" } } Context "value from remaining arguments" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( [string] [parameter(Position = 0)] $firstArg, [string[]] [parameter(ValueFromRemainingArguments = $true)] $remainingArgs ) "$firstArg : $remainingArgs" } } # Sanity checks. Invoke-Command $session {foo} | Should -BeExactly " : " Invoke-Command $session {foo 1} | Should -BeExactly "1 : " Invoke-Command $session {foo -first 1} | Should -BeExactly "1 : " Invoke-Command $session {foo 1 2 3} | Should -BeExactly "1 : 2 3" Invoke-Command $session {foo -first 1 2 3} | Should -BeExactly "1 : 2 3" Invoke-Command $session {foo 2 3 -first 1 4 5} | Should -BeExactly "1 : 2 3 4 5" Invoke-Command $session {foo -remainingArgs 2,3 1} | Should -BeExactly "1 : 2 3" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Value from remaining arguments works" { $( foo ) | Should -BeExactly " : " } It "Value from remaining arguments works" { $( foo 1 ) | Should -BeExactly "1 : " } It "Value from remaining arguments works" { $( foo -first 1 ) | Should -BeExactly "1 : " } It "Value from remaining arguments works" { $( foo 1 2 3 ) | Should -BeExactly "1 : 2 3" } It "Value from remaining arguments works" { $( foo -first 1 2 3 ) | Should -BeExactly "1 : 2 3" } It "Value from remaining arguments works" { $( foo 2 3 -first 1 4 5 ) | Should -BeExactly "1 : 2 3 4 5" } It "Value from remaining arguments works" { $( foo -remainingArgs 2,3 1 ) | Should -BeExactly "1 : 2 3" } } Context "non cmdlet-based binding" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function foo { param( $firstArg, $secondArg ) "$firstArg : $secondArg : $args" } } # Sanity checks. Invoke-Command $session { foo } | Should -BeExactly " : : " Invoke-Command $session { foo 1 } | Should -BeExactly "1 : : " Invoke-Command $session { foo -first 1 } | Should -BeExactly "1 : : " Invoke-Command $session { foo 1 2 } | Should -BeExactly "1 : 2 : " Invoke-Command $session { foo 1 -second 2 } | Should -BeExactly "1 : 2 : " Invoke-Command $session { foo -first 1 -second 2 } | Should -BeExactly "1 : 2 : " Invoke-Command $session { foo 1 2 3 4 } | Should -BeExactly "1 : 2 : 3 4" Invoke-Command $session { foo -first 1 2 3 4 } | Should -BeExactly "1 : 2 : 3 4" Invoke-Command $session { foo 1 -second 2 3 4 } | Should -BeExactly "1 : 2 : 3 4" Invoke-Command $session { foo 1 3 -second 2 4 } | Should -BeExactly "1 : 2 : 3 4" Invoke-Command $session { foo -first 1 -second 2 3 4 } | Should -BeExactly "1 : 2 : 3 4" $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Non cmdlet-based binding works." { foo | Should -BeExactly " : : " } It "Non cmdlet-based binding works." { foo 1 | Should -BeExactly "1 : : " } It "Non cmdlet-based binding works." { foo -first 1 | Should -BeExactly "1 : : " } It "Non cmdlet-based binding works." { foo 1 2 | Should -BeExactly "1 : 2 : " } It "Non cmdlet-based binding works." { foo 1 -second 2 | Should -BeExactly "1 : 2 : " } It "Non cmdlet-based binding works." { foo -first 1 -second 2 | Should -BeExactly "1 : 2 : " } It "Non cmdlet-based binding works." { foo 1 2 3 4 | Should -BeExactly "1 : 2 : 3 4" } It "Non cmdlet-based binding works." { foo -first 1 2 3 4 | Should -BeExactly "1 : 2 : 3 4" } It "Non cmdlet-based binding works." { foo 1 -second 2 3 4 | Should -BeExactly "1 : 2 : 3 4" } It "Non cmdlet-based binding works." { foo 1 3 -second 2 4 | Should -BeExactly "1 : 2 : 3 4" } It "Non cmdlet-based binding works." { foo -first 1 -second 2 3 4 | Should -BeExactly "1 : 2 : 3 4" } } Context "default parameter initialization should be executed on the server" { BeforeAll { if ($skipTest) { return } Invoke-Command -Session $session -Scriptblock { function MyInitializerFunction { param($x = $PID) $x } } $localPid = $PID $remotePid = Invoke-Command $session { $PID } # Sanity check $localPid | Should -Not -Be $remotePid $module = Import-PSSession -Session $session -Name MyInitializerFunction -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Initializer run on the remote server" { (MyInitializerFunction) | Should -Be $remotePid } It "Initializer not run when value provided" { (MyInitializerFunction 123) | Should -Be 123 } } Context "client-side parameters - cmdlet case" { BeforeAll { if ($skipTest) { return } $remotePid = Invoke-Command $session { $PID } $module = Import-PSSession -Session $session -Name Get-Variable -Type cmdlet -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Importing by name/type should work" { (Get-Variable -Name PID).Value | Should -Not -Be $PID } It "Test -AsJob parameter" { try { $job = Get-Variable -Name PID -AsJob $job | Should -Not -BeNullOrEmpty ($job -is [System.Management.Automation.Job]) | Should -BeTrue ($job.Finished.WaitOne([TimeSpan]::FromSeconds(10), $false)) | Should -BeTrue $job.JobStateInfo.State | Should -Be 'Completed' $childJob = $job.ChildJobs[0] $childJob.Output.Count | Should -Be 1 $childJob.Output[0].Value | Should -Be $remotePid } finally { Remove-Job $job -Force } } It "Test OutVariable" { $result1 = Get-Variable -Name PID -OutVariable global:result2 $result1.Value | Should -Be $remotePid $global:result2[0].Value | Should -Be $remotePid } } Context "client-side parameters - Windows 7 bug #759434" { BeforeAll { if ($skipTest) { return } $module = Import-PSSession -Session $session -Name Write-Warning -Type cmdlet -Prefix Remote -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Test warnings present with '-WarningAction Continue'" { try { $jobWithWarnings = write-remotewarning foo -WarningAction continue -Asjob $null = Wait-Job $jobWithWarnings $jobWithWarnings.ChildJobs[0].Warning.Count | Should -Be 1 } finally { Remove-Job $jobWithWarnings -Force } } It "Test no warnings with '-WarningAction SilentlyContinue'" { try { $jobWithoutWarnings = write-remotewarning foo -WarningAction silentlycontinue -Asjob $null = Wait-Job $jobWithoutWarnings $jobWithoutWarnings.ChildJobs[0].Warning.Count | Should -Be 0 } finally { Remove-Job $jobWithoutWarnings -Force } } } Context "client-side parameters - non-cmdlet case" { BeforeAll { if ($skipTest) { return } Invoke-Command $session { function foo { param($OutVariable) "OutVariable = $OutVariable" } } # Sanity check Invoke-Command $session { foo -OutVariable x } | Should -BeExactly "OutVariable = x" $module = Import-PSSession -Session $session -Name foo -Type function -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Implicit remoting: OutVariable is not intercepted for non-cmdlet-bound functions" { foo -OutVariable x | Should -BeExactly "OutVariable = x" } } Context "switch and positional parameters" { BeforeAll { if ($skipTest) { return } $remotePid = Invoke-Command $session { $PID } $module = Import-PSSession -Session $session -Name Get-Variable -Type cmdlet -Prefix Remote -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Switch parameters work fine" { $proxiedPid = Get-RemoteVariable -Name pid -ValueOnly $remotePid | Should -Be $proxiedPid } It "Positional parameters work fine" { $proxiedPid = Get-RemoteVariable pid $remotePid | Should -Be ($proxiedPid.Value) } } } Describe "Implicit remoting on restricted ISS" -tags "Feature","RequireAdminOnWindows","Slow" { BeforeAll { if ($skipTest) { return } $sessionConfigurationDll = [IO.Path]::Combine([IO.Path]::GetTempPath(), "ImplicitRemotingRestrictedConfiguration$(Get-Random).dll") Add-Type -OutputAssembly $sessionConfigurationDll -TypeDefinition @" using System; using System.Collections.Generic; using System.Management.Automation; using System.Management.Automation.Runspaces; using System.Management.Automation.Remoting; namespace MySessionConfiguration { public class MySessionConfiguration : PSSessionConfiguration { public override InitialSessionState GetInitialSessionState(PSSenderInfo senderInfo) { //System.Diagnostics.Debugger.Launch(); //System.Diagnostics.Debugger.Break(); InitialSessionState iss = InitialSessionState.CreateRestricted(System.Management.Automation.SessionCapabilities.RemoteServer); // add Out-String for testing stuff iss.Commands["Out-String"][0].Visibility = SessionStateEntryVisibility.Public; // remove all commands that are not public List<string> commandsToRemove = new List<string>(); foreach (SessionStateCommandEntry entry in iss.Commands) { List<SessionStateCommandEntry> sameNameEntries = new List<SessionStateCommandEntry>(iss.Commands[entry.Name]); if (!sameNameEntries.Exists(delegate(SessionStateCommandEntry e) { return e.Visibility == SessionStateEntryVisibility.Public; })) { commandsToRemove.Add(entry.Name); } } foreach (string commandToRemove in commandsToRemove) { iss.Commands.Remove(commandToRemove, null /* all types */); } return iss; } } } "@ Get-PSSessionConfiguration ImplicitRemotingRestrictedConfiguration* | Unregister-PSSessionConfiguration -Force ## The 'Register-PSSessionConfiguration' call below raises an AssemblyLoadException in powershell: ## "Could not load file or assembly 'Microsoft.Powershell.Workflow.ServiceCore, Version=3.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35'. The system cannot find the file specified." ## Issue #2555 is created to track this issue and all tests here are skipped for CoreCLR for now. $myConfiguration = Register-PSSessionConfiguration ` -Name ImplicitRemotingRestrictedConfiguration ` -ApplicationBase (Split-Path $sessionConfigurationDll) ` -AssemblyName (Split-Path $sessionConfigurationDll -Leaf) ` -ConfigurationTypeName "MySessionConfiguration.MySessionConfiguration" ` -Force $session = New-RemoteSession -ConfigurationName $myConfiguration.Name $session | Should -Not -BeNullOrEmpty } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } if ($null -ne $myConfiguration) { Unregister-PSSessionConfiguration -Name ($myConfiguration.Name) -Force -ErrorAction SilentlyContinue } if ($null -ne $sessionConfigurationDll) { Remove-Item $sessionConfigurationDll -Force -ErrorAction SilentlyContinue } } Context "restrictions works" { It "Get-Variable is private" { @(Invoke-Command $session { Get-Command -Name Get-Variabl* }).Count | Should -Be 0 } It "Only 9 commands are public" { @(Invoke-Command $session { Get-Command }).Count | Should -Be 9 } } Context "basic functionality of Import-PSSession works (against a directly exposed cmdlet and against a proxy function)" { BeforeAll { if ($skipTest) { return } $module = Import-PSSession $session Out-Strin*,Measure-Object -Type Cmdlet,Function -ArgumentList 123 -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "Import-PSSession works against the ISS-restricted runspace (Out-String)" { @(Get-Command Out-String -Type Function).Count | Should -Be 1 } It "Import-PSSession works against the ISS-restricted runspace (Measure-Object)" { @(Get-Command Measure-Object -Type Function).Count | Should -Be 1 } It "Invoking an implicit remoting proxy works against the ISS-restricted runspace (Out-String)" { $remoteResult = Out-String -input ("blah " * 10) -Width 10 $localResult = Microsoft.PowerShell.Utility\Out-String -input ("blah " * 10) -Width 10 $localResult | Should -Be $remoteResult } It "Invoking an implicit remoting proxy works against the ISS-restricted runspace (Measure-Object)" { $remoteResult = 1..10 | Measure-Object $localResult = 1..10 | Microsoft.PowerShell.Utility\Measure-Object ($localResult.Count) | Should -Be ($remoteResult.Count) } } } Describe "Implicit remoting tests" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } Context "Get-Command <Imported-Module> and <Imported-Module.Name> work (Windows 7: #334112)" { BeforeAll { if ($skipTest) { return } $module = Import-PSSession $session Get-Variable -Prefix My -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } It "PSModuleInfo.Name shouldn't contain a psd1 extension" { ($module.Name -notlike '*.psd1') | Should -BeTrue } It "PSModuleInfo.Name shouldn't contain a psm1 extension" { ($module.Name -notlike '*.psm1') | Should -BeTrue } It "PSModuleInfo.Name shouldn't contain a path" { ($module.Name -notlike "${env:TMP}*") | Should -BeTrue } # Test temporarily disabled because of conflict with DG UMCI tests. # Re-enable after DG UMCI tests moved to a separate test process. It "Get-Command returns only 1 public command from implicit remoting module (1)" -Pending { $c = @(Get-Command -Module $module) $c.Count | Should -Be 1 $c[0].Name | Should -BeExactly "Get-MyVariable" } # Test temporarily disabled because of conflict with DG UMCI tests. # Re-enable after DG UMCI tests moved to a separate test process. It "Get-Command returns only 1 public command from implicit remoting module (2)" -Pending { $c = @(Get-Command -Module $module.Name) $c.Count | Should -Be 1 $c[0].Name | Should -BeExactly "Get-MyVariable" } } Context "progress bar should be 1) present and 2) completed also" { BeforeAll { if ($skipTest) { return } $file = [IO.Path]::Combine([IO.Path]::GetTempPath(), [Guid]::NewGuid().ToString()) $powerShell = [PowerShell]::Create().AddCommand("Export-PSSession").AddParameter("Session", $session).AddParameter("ModuleName", $file).AddParameter("CommandName", "Get-Process").AddParameter("AllowClobber") $powerShell.Invoke() | Out-Null } AfterAll { if ($skipTest) { return } $powerShell.Dispose() if ($null -ne $file) { Remove-Item $file -Recurse -Force -ErrorAction SilentlyContinue } } It "'Completed' progress record should be present" { ($powerShell.Streams.Progress | Select-Object -Last 1).RecordType.ToString() | Should -BeExactly "Completed" } } Context "display of property-less objects (not sure if this test belongs here) (Windows 7: #248499)" { BeforeAll { if ($skipTest) { return } $x = New-Object random $expected = $x.ToString() } # Since New-PSSession now only loads Microsoft.PowerShell.Core and for the session in the test, Autoloading is disabled, engine cannot find New-Object as it is part of Microsoft.PowerShell.Utility module. # The fix is to import this module before running the command. It "Display of local property-less objects" { ($x | Out-String).Trim() | Should -Be $expected } It "Display of remote property-less objects" { (Invoke-Command $session { Import-Module Microsoft.PowerShell.Utility; New-Object random } | Out-String).Trim() | Should -Be $expected } } It "piping between remoting proxies should work" { try { $module = Import-PSSession -Session $session -Name Write-Output -AllowClobber $result = Write-Output 123 | Write-Output $result | Should -Be 123 } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } It "Strange parameter names should trigger an error" { try { Invoke-Command $session { function attack(${foo="$(calc)"}){Write-Output "It is done."}} $module = Import-PSSession -Session $session -CommandName attack -ErrorAction SilentlyContinue -ErrorVariable expectedError -AllowClobber $expectedError | Should -Not -BeNullOrEmpty } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } It "Non-terminating error from remote end got duplicated locally" { try { Invoke-Command $session { $oldGetCommand = ${function:Get-Command} } Invoke-Command $session { function Get-Command { Write-Error blah } } $module = Import-PSSession -Session $session -ErrorAction SilentlyContinue -ErrorVariable expectedError -AllowClobber $expectedError | Should -Not -BeNullOrEmpty $msg = [string]($expectedError[0]) $msg.Contains("blah") | Should -BeTrue } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } Invoke-Command $session { ${function:Get-Command} = $oldGetCommand } } } It "Should get an error if remote server returns something that wasn't asked for" { try { Invoke-Command $session { $oldGetCommand = ${function:Get-Command} } Invoke-Command $session { function notRequested { "notRequested" }; function Get-Command { Microsoft.PowerShell.Core\Get-Command Get-Variable,notRequested } } $module = Import-PSSession -Session $session Get-Variable -AllowClobber -ErrorAction SilentlyContinue -ErrorVariable expectedError $expectedError | Should -Not -BeNullOrEmpty $msg = [string]($expectedError[0]) $msg.Contains("notRequested") | Should -BeTrue } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } Invoke-Command $session { ${function:Get-Command} = $oldGetCommand } } } It "Get-Command returns something that is not CommandInfo" { Invoke-Command $session { $oldGetCommand = ${function:Get-Command} } Invoke-Command $session { function Get-Command { Microsoft.PowerShell.Utility\Get-Variable } } $e = { $module = Import-PSSession -Session $session -AllowClobber } | Should -Throw -PassThru $msg = [string]($e) $msg.Contains("Get-Command") | Should -BeTrue if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } Invoke-Command $session { ${function:Get-Command} = $oldGetCommand } } # Test order of remote commands (alias > function > cmdlet > external script) It "Command resolution for 'myOrder' should be respected by implicit remoting" { try { $tempdir = Join-Path $env:TEMP ([IO.Path]::GetRandomFileName()) $null = New-Item $tempdir -ItemType Directory -Force $oldPath = Invoke-Command $session { $env:PATH } 'param([Parameter(Mandatory=$true)]$scriptParam) "external script / $scriptParam"' > $tempdir\myOrder.ps1 Invoke-Command $session { param($x) $env:PATH = $env:PATH + [IO.Path]::PathSeparator + $x } -ArgumentList $tempDir Invoke-Command $session { function myOrder { param([Parameter(Mandatory=$true)]$functionParam) "function / $functionParam" } } Invoke-Command $session { function helper { param([Parameter(Mandatory=$true)]$aliasParam) "alias / $aliasParam" }; Set-Alias myOrder helper } $expectedResult = Invoke-Command $session { myOrder -aliasParam 123 } $module = Import-PSSession $session myOrder -CommandType All -AllowClobber $actualResult = myOrder -aliasParam 123 $expectedResult | Should -Be $actualResult } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } Invoke-Command $session { param($x) $env:PATH = $x; Remove-Item Alias:\myOrder, Function:\myOrder, Function:\helper -Force -ErrorAction SilentlyContinue } -ArgumentList $oldPath Remove-Item $tempDir -Force -Recurse -ErrorAction SilentlyContinue } } It "Test -Prefix parameter" { try { $module = Import-PSSession -Session $session -Name Get-Variable -Type cmdlet -Prefix My -AllowClobber (Get-MyVariable -Name pid).Value | Should -Not -Be $PID } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } (Get-Item function:Get-MyVariable -ErrorAction SilentlyContinue) | Should -BeNullOrEmpty } Context "BadVerbs of functions should trigger a warning" { BeforeAll { if ($skipTest) { return } Invoke-Command $session { function BadVerb-Variable { param($name) Get-Variable $name } } } AfterAll { if ($skipTest) { return } Invoke-Command $session { Remove-Item Function:\BadVerb-Variable } } It "Bad verb causes no error but warning" { try { $ps = [powershell]::Create().AddCommand("Import-PSSession", $true).AddParameter("Session", $session).AddParameter("CommandName", "BadVerb-Variable") $module = $ps.Invoke() | Select-Object -First 1 $ps.Streams.Error.Count | Should -Be 0 $ps.Streams.Warning.Count | Should -Not -Be 0 } finally { if ($null -ne $module) { $ps.Commands.Clear() $ps.AddCommand("Remove-Module").AddParameter("ModuleInfo", $module).AddParameter("Force", $true) > $null $ps.Invoke() > $null } $ps.Dispose() } } It "Imported function with bad verb should work" { try { $module = Import-PSSession $session BadVerb-Variable -WarningAction SilentlyContinue -AllowClobber $remotePid = Invoke-Command $session { $PID } $getVariablePid = Invoke-Command $session { (Get-Variable -Name PID).Value } $getVariablePid | Should -Be $remotePid ## Get-Variable function should not be exported when importing a BadVerb-Variable function Get-Item Function:\Get-Variable -ErrorAction SilentlyContinue | Should -BeNullOrEmpty ## BadVerb-Variable should be a function, not an alias (1) Get-Item Function:\BadVerb-Variable -ErrorAction SilentlyContinue | Should -Not -BeNullOrEmpty ## BadVerb-Variable should be a function, not an alias (2) Get-Item Alias:\BadVerb-Variable -ErrorAction SilentlyContinue | Should -BeNullOrEmpty (BadVerb-Variable -Name pid).Value | Should -Be $remotePid } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } It "Test warning is supressed by '-DisableNameChecking'" { try { $ps = [powershell]::Create().AddCommand("Import-PSSession", $true).AddParameter("Session", $session).AddParameter("CommandName", "BadVerb-Variable").AddParameter("DisableNameChecking", $true) $module = $ps.Invoke() | Select-Object -First 1 $ps.Streams.Error.Count | Should -Be 0 $ps.Streams.Warning.Count | Should -Be 0 } finally { if ($null -ne $module) { $ps.Commands.Clear() $ps.AddCommand("Remove-Module").AddParameter("ModuleInfo", $module).AddParameter("Force", $true) > $null $ps.Invoke() > $null } $ps.Dispose() } } It "Imported function with bad verb by 'Import-PSSession -DisableNameChecking' should work" { try { $module = Import-PSSession $session BadVerb-Variable -DisableNameChecking -AllowClobber $remotePid = Invoke-Command $session { $PID } $getVariablePid = Invoke-Command $session { (Get-Variable -Name PID).Value } $getVariablePid | Should -Be $remotePid ## Get-Variable function should not be exported when importing a BadVerb-Variable function Get-Item Function:\Get-Variable -ErrorAction SilentlyContinue | Should -BeNullOrEmpty ## BadVerb-Variable should be a function, not an alias (1) Get-Item Function:\BadVerb-Variable -ErrorAction SilentlyContinue | Should -Not -BeNullOrEmpty ## BadVerb-Variable should be a function, not an alias (2) Get-Item Alias:\BadVerb-Variable -ErrorAction SilentlyContinue | Should -BeNullOrEmpty (BadVerb-Variable -Name pid).Value | Should -Be $remotePid } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } } Context "BadVerbs of alias shouldn't trigger a warning + can import an alias without saying -CommandType Alias" { BeforeAll { if ($skipTest) { return } Invoke-Command $session { Set-Alias BadVerb-Variable Get-Variable } } AfterAll { if ($skipTest) { return } Invoke-Command $session { Remove-Item Alias:\BadVerb-Variable } } It "Bad verb alias causes no error or warning" { try { $ps = [powershell]::Create().AddCommand("Import-PSSession", $true).AddParameter("Session", $session).AddParameter("CommandName", "BadVerb-Variable") $module = $ps.Invoke() | Select-Object -First 1 $ps.Streams.Error.Count | Should -Be 0 $ps.Streams.Warning.Count | Should -Be 0 } finally { if ($null -ne $module) { $ps.Commands.Clear() $ps.AddCommand("Remove-Module").AddParameter("ModuleInfo", $module).AddParameter("Force", $true) > $null $ps.Invoke() > $null } $ps.Dispose() } } It "Importing alias with bad verb should work" { try { $module = Import-PSSession $session BadVerb-Variable -AllowClobber $remotePid = Invoke-Command $session { $PID } $getVariablePid = Invoke-Command $session { (Get-Variable -Name PID).Value } $getVariablePid | Should -Be $remotePid ## BadVerb-Variable should be an alias, not a function (1) Get-Item Function:\BadVerb-Variable -ErrorAction SilentlyContinue | Should -BeNullOrEmpty ## BadVerb-Variable should be an alias, not a function (2) Get-Item Alias:\BadVerb-Variable -ErrorAction SilentlyContinue | Should -Not -BeNullOrEmpty (BadVerb-Variable -Name pid).Value | Should -Be $remotePid } finally { if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } } } It "Removing a module should clean-up event handlers (Windows 7: #268819)" { $oldNumberOfHandlers = $ExecutionContext.GetType().GetProperty("Events").GetValue($ExecutionContext, $null).Subscribers.Count $module = Import-PSSession -Session $session -Name Get-Random -AllowClobber Remove-Module $module -Force $newNumberOfHandlers = $ExecutionContext.GetType().GetProperty("Events").GetValue($ExecutionContext, $null).Subscribers.Count ## Event should be unregistered when the module is removed $oldNumberOfHandlers | Should -Be $newNumberOfHandlers ## Private functions from the implicit remoting module shouldn't get imported into global scope @(Get-ChildItem function:*Implicit* -ErrorAction SilentlyContinue | Where-Object {$_.ModuleName -eq $module.Name}).Count | Should -Be 0 } } Describe "Export-PSSession function" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession $tempdir = Join-Path $env:TEMP ([IO.Path]::GetRandomFileName()) New-Item $tempdir -ItemType Directory > $null @" Import-Module `"$tempdir\Diag`" `$mod = Get-Module Diag Return `$mod "@ > $tempdir\TestBug450687.ps1 } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } if ($null -ne $tempdir) { Remove-Item $tempdir -Force -Recurse -ErrorAction SilentlyContinue } } It "Test the module created by Export-PSSession" { try { Export-PSSession -Session $session -OutputModule $tempdir\Diag -CommandName New-Guid -AllowClobber > $null # Only the snapin Microsoft.PowerShell.Core is loaded $iss = [System.Management.Automation.Runspaces.InitialSessionState]::CreateDefault2() $ps = [PowerShell]::Create($iss) $result = $ps.AddScript(" & $tempdir\TestBug450687.ps1").Invoke() ## The module created by Export-PSSession is imported successfully ($null -ne $result -and $result.Count -eq 1 -and $result[0].Name -eq "Diag") | Should -BeTrue ## The command Add-BitsFile is imported successfully $c = $result[0].ExportedCommands["New-Guid"] ($null -ne $c -and $c.CommandType -eq "Function") | Should -BeTrue } finally { $ps.Dispose() } } } Describe "Implicit remoting with disconnected session" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession -Name Session102 $remotePid = Invoke-Command $session { $PID } $module = Import-PSSession $session Get-Variable -Prefix Remote -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Remote session PID should be different" { $sessionPid = Get-RemoteVariable pid $sessionPid.Value | Should -Be $remotePid } It "Disconnected session should be reconnected when calling proxied command" { Disconnect-PSSession $session $dSessionPid = Get-RemoteVariable pid $dSessionPid.Value | Should -Be $remotePid $session.State | Should -Be 'Opened' } ## It requires 'New-PSSession' to work with implicit credential to allow proxied command to create new session. ## Implicit credential doesn't work in the Windows Azure DevOps builder, so mark this test '-pending'. ## Also, this feature doesn't work on macOS or Linux It "Should have a new session when the disconnected session cannot be re-connected" -Pending { ## Disconnect session and make it un-connectable. Disconnect-PSSession $session Start-Process powershell -arg 'Get-PSSession -cn localhost -name Session102 | Connect-PSSession' -Wait Start-Sleep -Seconds 3 ## This time a new session is created because the old one is unavailable. $dSessionPid = Get-RemoteVariable pid $dSessionPid.Value | Should -Not -Be $remotePid } } Describe "Select-Object with implicit remoting" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession Invoke-Command $session { function foo { "a","b","c" } } $module = Import-PSSession $session foo -AllowClobber } AfterAll { if ($skipTest) { return } if ($null -ne $module) { Remove-Module $module -Force -ErrorAction SilentlyContinue } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Select-Object -First should work with implicit remoting" { $bar = foo | Select-Object -First 2 $bar | Should -Not -BeNullOrEmpty $bar.Count | Should -Be 2 $bar[0] | Should -BeExactly "a" $bar[1] | Should -BeExactly "b" } } Describe "Get-FormatData used in Export-PSSession should work on DL targets" -tags "Feature","RequireAdminOnWindows" { BeforeAll { # Skip tests for CoreCLR for now # Skip tests if .NET 2.0 and PS 2.0 are not installed on the machine $skipThisTest = $skipTest -or $IsCoreCLR -or (! (Test-Path 'HKLM:\SOFTWARE\Microsoft\NET Framework Setup\NDP\v2.0.50727')) -or (! (Test-Path 'HKLM:\SOFTWARE\Microsoft\PowerShell\1\PowerShellEngine')) if ($skipThisTest) { return } ## The call to 'Register-PSSessionConfiguration -PSVersion 2.0' below raises an exception: ## `Cannot bind parameter 'PSVersion' to the target. Exception setting "PSVersion": "Windows PowerShell 2.0 is not installed. ## Install Windows PowerShell 2.0, and then try again."` ## Issue #2556 is created to track this issue and the test here is skipped for CoreCLR for now. $configName = "DLConfigTest" $null = Register-PSSessionConfiguration -Name $configName -PSVersion 2.0 -Force $session = New-RemoteSession -ConfigurationName $configName } AfterAll { if ($skipThisTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } Unregister-PSSessionConfiguration -Name $configName -Force -ErrorAction SilentlyContinue } It "Verifies that Export-PSSession with PS 2.0 session and format type names succeeds" -Skip:$skipThisTest { try { $results = Export-PSSession -Session $session -OutputModule tempTest -CommandName Get-Process ` -AllowClobber -FormatTypeName * -Force -ErrorAction Stop $results.Count | Should -Not -Be 0 } finally { if ($results.Count -gt 0) { Remove-Item -Path $results[0].DirectoryName -Recurse -Force -ErrorAction SilentlyContinue } } } } Describe "GetCommand locally and remotely" -tags "Feature","RequireAdminOnWindows" { BeforeAll { if ($skipTest) { return } $session = New-RemoteSession } AfterAll { if ($skipTest) { return } if ($null -ne $session) { Remove-PSSession $session -ErrorAction SilentlyContinue } } It "Verifies that the number of local cmdlet command count is the same as remote cmdlet command count." { $localCommandCount = (Get-Command -Type Cmdlet).Count $remoteCommandCount = Invoke-Command { (Get-Command -Type Cmdlet).Count } $localCommandCount | Should -Be $remoteCommandCount } } Describe "Import-PSSession on Restricted Session" -tags "Feature","RequireAdminOnWindows","Slow" { BeforeAll { if ($skipTest) { return } $configName = "restricted_" + (Get-RandomFileName) New-PSSessionConfigurationFile -Path $TestDrive\restricted.pssc -SessionType RestrictedRemoteServer Register-PSSessionConfiguration -Path $TestDrive\restricted.pssc -Name $configName -Force $session = New-RemoteSession -ConfigurationName $configName } AfterAll { if ($skipTest) { return } if ($session -ne $null) { Remove-PSSession -Session $session -ErrorAction SilentlyContinue } Unregister-PSSessionConfiguration -Name $configName -Force -ErrorAction SilentlyContinue } It "Verifies that Import-PSSession works on a restricted session" { $errorVariable = $null try { $module = Import-PSSession -Session $session -AllowClobber -ErrorVariable $errorVariable -CommandName Get-Help } finally { if ($module -ne $null) { Remove-Module $module -Force -ErrorAction SilentlyContinue } } $errorVariable | Should -BeNullOrEmpty } } } finally { $global:PSDefaultParameterValues = $originalDefaultParameterValues $WarningPreference = $originalWarningPreference }
{ "content_hash": "fd2720c7de6215f8427ab42850de0c51", "timestamp": "", "source": "github", "line_count": 2085, "max_line_length": 223, "avg_line_length": 42.954916067146286, "alnum_prop": 0.5516017016335235, "repo_name": "daxian-dbw/PowerShell", "id": "0b73e7be7aaa22c4454d9d88a19f4e2903e3a022", "size": "89561", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/powershell/Modules/Microsoft.PowerShell.Utility/Implicit.Remoting.Tests.ps1", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "24" }, { "name": "C#", "bytes": "30861483" }, { "name": "Dockerfile", "bytes": "6482" }, { "name": "HTML", "bytes": "18060" }, { "name": "JavaScript", "bytes": "8738" }, { "name": "PowerShell", "bytes": "5008481" }, { "name": "Rich Text Format", "bytes": "40664" }, { "name": "Roff", "bytes": "214981" }, { "name": "Shell", "bytes": "57893" }, { "name": "XSLT", "bytes": "14397" } ], "symlink_target": "" }
import * as WebSocket from 'ws'; import { Dispatcher } from './dispatcher'; import { Observer } from './observer'; import { Gpg } from '../gpg'; import { Message } from '../model'; import { GpgCreateKeySet, GpgResetYubikey, GpgChangePinYubikey, GpgChangeCard, DeleteSecretKey, RequestAsciiDispatcher, SendKeyToYubiKey, DiceWareDispatcher } from './dispatcher'; export class Dispatch { public readonly dispatcher: Dispatcher[] = []; public static start(gpg: Gpg): Dispatch { console.log('Dispatch.start'); let dispatch = new Dispatch(); dispatch.dispatcher.push(GpgCreateKeySet.create(gpg)); dispatch.dispatcher.push(GpgResetYubikey.create(gpg)); dispatch.dispatcher.push(GpgChangePinYubikey.create(gpg)); dispatch.dispatcher.push(DeleteSecretKey.create(gpg)); dispatch.dispatcher.push(RequestAsciiDispatcher.create(gpg)); dispatch.dispatcher.push(SendKeyToYubiKey.create(gpg)); dispatch.dispatcher.push(GpgChangeCard.create(gpg)); dispatch.dispatcher.push(DiceWareDispatcher.create()); return dispatch; } public run(observer: Observer, ws: WebSocket, m: Message.Message): boolean { console.log('Dispatch.run', m.header); return !!this.dispatcher.find((dispatch: Dispatcher) => dispatch.run(observer, ws, m) ); } }
{ "content_hash": "82e43dd09f294960aaf2079183ad7b48", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 78, "avg_line_length": 31.11904761904762, "alnum_prop": 0.7199693955623565, "repo_name": "mabels/clavator", "id": "636a5da04e1d899b60e165ef7e4749b31c5cd426", "size": "1307", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/server/dispatch.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "158614" }, { "name": "CMake", "bytes": "4584" }, { "name": "Dockerfile", "bytes": "1566" }, { "name": "HTML", "bytes": "291" }, { "name": "JavaScript", "bytes": "23214" }, { "name": "Ruby", "bytes": "8896" }, { "name": "Shell", "bytes": "67843" }, { "name": "TypeScript", "bytes": "353141" } ], "symlink_target": "" }
package jy.sudoku.validator; import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Scanner; /** * * @author jonathanyantz */ public class Puzzle { public static int[][] get(String location) { ArrayList<String> list = new ArrayList(); /** * in the try, get the file in a scanner; * while there is still another line, keep adding to the arraylist; * if the file doesn't exist, it returns an error. */ try { Scanner contents = new Scanner(new File(location)); while(contents.hasNextLine()) { String line = contents.nextLine(); list.add(line); } } catch(FileNotFoundException e) { System.out.println("File does not exist."); list.clear(); int[][] empty = new int[0][0]; return empty; } /** * this returns the 2d array after it had been made. */ return make2d(list); }//end get private static int[][] make2d(ArrayList puzzle) { int[][] arr = new int[puzzle.size()][puzzle.size()]; /** * in the for, every line in the array is stepped through; * in each line, every number is moved into a 2d array. */ for(int i = 0; i < puzzle.size(); i++) { String[] splitLine = puzzle.get(i).toString().split("-"); for(int j = 0; j < splitLine.length; j++) { arr[i][j] = Integer.parseInt(splitLine[j]); } } return arr; }//end make2d }//end class Puzzle
{ "content_hash": "dcacb034e0574774a57197ff12488621", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 75, "avg_line_length": 27.53125, "alnum_prop": 0.5045402951191827, "repo_name": "jryantz/sudoku-validator", "id": "7da5e76ad2225a2961fe5dd33d9f538bb9e6044e", "size": "1762", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/jy/sudoku/validator/Puzzle.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "8497" } ], "symlink_target": "" }
<?php namespace Oro\Bundle\EntityConfigBundle\Migrations\Schema\v1_14_1; use Doctrine\DBAL\Schema\Schema; use Oro\Bundle\MigrationBundle\Migration\Migration; use Oro\Bundle\MigrationBundle\Migration\QueryBag; /** * Execute broken enums fix query. */ class UpdateConfigFieldBrokenEnum implements Migration { /** * {@inheritdoc} */ public function up(Schema $schema, QueryBag $queries) { $queries->addQuery(new UpdateConfigFieldBrokenEnumQuery()); } }
{ "content_hash": "2b3d6299666f9e458a816534d2d19418", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 67, "avg_line_length": 23.285714285714285, "alnum_prop": 0.7280163599182005, "repo_name": "orocrm/platform", "id": "d9163912e90cc7c00ae275f01996d6cec2d69f27", "size": "489", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Oro/Bundle/EntityConfigBundle/Migrations/Schema/v1_14_1/UpdateConfigFieldBrokenEnum.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "618485" }, { "name": "Gherkin", "bytes": "158217" }, { "name": "HTML", "bytes": "1648915" }, { "name": "JavaScript", "bytes": "3326127" }, { "name": "PHP", "bytes": "37828618" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name Ilex peduncularis Reissek ### Remarks null
{ "content_hash": "2e08c9de6b30e91f49070363165b377e", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 11.307692307692308, "alnum_prop": 0.7346938775510204, "repo_name": "mdoering/backbone", "id": "6d59dfe53430c69bfc3c85895b053eff69faad22", "size": "192", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Aquifoliales/Aquifoliaceae/Ilex/Ilex ferdinandi/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.fourinone; import java.util.ArrayList; import java.util.List; import java.io.Serializable; import java.rmi.RemoteException; //import java.rmi.ConnectException; final public class ParkProxy{ private static String sid = null; private Park pk; private ParkLeader pl = null; public ParkProxy(String host, int port, String sn) { pl = new ParkLeader(host,port,sn); pk = pl.getLeaderPark(); init(); } public ParkProxy(String host, int port, String[][] servers, String sn)//all server host and port:string[][] { pl = new ParkLeader(host,port,servers,sn); pk = pl.getLeaderPark();//(Park)BeanService.getBean(host,port,"ParkService");//try change pk if catch exception //new ParkLeader(host,port,String[][]) //pl.getMasterPark(){catch remoteexception and try next until get one}; init(); } // private class ObjectBeanProxy implements ObjectBean{ // private Object obj; // private Long vid; // private String name; // private ObjectBeanProxy(){} /*private ObjectBeanProxy(ObjValue ov, String domainnodekey){ vid = (Long)ov.getObj(domainnodekey+"._me_ta.version"); obj = ov.get(domainnodekey); name = domainnodekey; }*/ //@Delegate(interfaceName="com.fourinone.ObjectBean",methodName="toObject",policy=DelegatePolicy.Implements) // public Object toObject(){ // return obj; // } //@Delegate(interfaceName="com.fourinone.ObjectBean",methodName="getName",policy=DelegatePolicy.Implements) // public String getName(){ // return name; // } // public String toString(){ // return name+":"+obj.toString(); // } /*@Delegate(interfaceName="com.fourinone.ObjectVersion",methodName="getVid",policy=DelegatePolicy.Implements) public Long getVid(){ return vid; }*/ // } // private class ObjectBeanList<E> extends ArrayList implements List{ // private Long vid; // } private void init(){ try{ if(sid==null) sid = pk.getSessionId(); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[init]", e.toString()); } } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean create(String domain, Serializable obj){ return put(domain, BeanContext.getNumber(), obj);//System.nanoTime() } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean put(String domain, String node, Serializable obj){ return put(domain, node, obj, AuthPolicy.OP_ALL); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean put(String domain, String node, Serializable obj, AuthPolicy auth){ return put(domain, node, obj, auth, false); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean create(String domain, String node, Serializable obj, boolean heartbeat){ return put(domain, node, obj, AuthPolicy.OP_ALL, heartbeat); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean put(String domain, String node, Serializable obj, AuthPolicy auth, boolean heartbeat){ return put(domain, node, obj, auth, heartbeat, 0); } public ObjectBean put(String domain, String node, Serializable obj, AuthPolicy auth, boolean heartbeat, int i) { ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node, obj)){ try{ ObjValue ov = pk.create(domain, node, ObjectBytes.toBytes(obj), sid, auth.getPolicy(), heartbeat); ob = OvToBean(ov,domain,node); if(ob!=null&&heartbeat) HbDaemo.runPutTask(pk, pl, domain, node, sid); //System.out.println("created..."); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[put]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = put(domain, node, obj, auth, heartbeat, i+1); //} } //if(e=LeaderException or java.rmi.ConnectException) //{pk=getNextMaster;ob = put(...); if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[put]", ((ClosetoOverException)e).print()); } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="update",policy=DelegatePolicy.Implements) public ObjectBean update(String domain, String node, Serializable obj){ return update(domain, node, obj, 0); } public ObjectBean update(String domain, String node, Serializable obj, int i) { ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node, obj)){ try{ ObjValue ov = pk.update(domain, node, ObjectBytes.toBytes(obj), sid); ob = OvToBean(ov, domain,node); }catch(Exception e){ LogUtil.info("[Park]", "[update]", e.getMessage()); //e.printStackTrace(); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = update(domain, node, obj,i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[update]", ((ClosetoOverException)e).print()); } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="get",policy=DelegatePolicy.Implements) public ObjectBean get(String domain, String node){ return get(domain, node, 0); } public ObjectBean get(String domain, String node, int i) { ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node)){ try{ ObjValue ov = pk.get(domain, node, sid);//getTestObj(); //ob = new ObjectBeanProxy(ov, domain,node); ob = OvToBean(ov, domain,node); }catch(Exception e){ LogUtil.info("[Park]", "[get]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = get(domain, node, i+1); //} } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="getLastest",policy=DelegatePolicy.Implements) public ObjectBean getLastest(String domain, String node, ObjectBean obold){ return getLastest(domain, node, obold, 0); } public ObjectBean getLastest(String domain, String node, ObjectBean obold, int i){ ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node)){ try{ long vid = obold!=null?((ObjectBeanProxy)obold).vid:0l;//ObjectVersion //System.out.println("ob.vid:"+vid); ObjValue ov = pk.getLastest(domain, node, sid, vid); //System.out.println(ov); ob = OvToBean(ov, domain,node); //System.out.println(ob); }catch(Exception e){ LogUtil.info("[Park]", "[getLastest]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = getLastest(domain, node, obold, i+1); //} } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="get",policy=DelegatePolicy.Implements) public List<ObjectBean> getNodes(String domain){ return getNodes(domain, 0); } public List<ObjectBean> getNodes(String domain, int i) { List<ObjectBean> objlist = null; if(ParkObjValue.checkGrammar(domain)){ try{ ObjValue ov = pk.get(domain, null, sid);//getTestObj(); objlist = OvToBeanList(ov, domain); }catch(Exception e){ LogUtil.info("[Park]", "[getNodes]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) objlist = getNodes(domain, i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[getNodes]", ((ClosetoOverException)e).print()); } } } return objlist; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="getLastest",policy=DelegatePolicy.Implements) public List<ObjectBean> getNodesLastest(String domain, List<ObjectBean> oblist){ return getNodesLastest(domain, oblist, 0); } public List<ObjectBean> getNodesLastest(String domain, List<ObjectBean> oblist, int i){ List<ObjectBean> objlist = null; if(ParkObjValue.checkGrammar(domain)){ try{ long vid = oblist!=null?((ObjectBeanList)oblist).vid:0l; ObjValue ov = pk.getLastest(domain, null, sid, vid); //System.out.println("getNodesLastest:"+ov); objlist = OvToBeanList(ov, domain); }catch(Exception e){ LogUtil.info("[Park]", "[getNodesLastest]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) objlist = getNodesLastest(domain, oblist, i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[getNodesLastest]", ((ClosetoOverException)e).print()); } } } return objlist; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="delete",policy=DelegatePolicy.Implements) public ObjectBean remove(String domain, String node) { return remove(domain, node, 0); } public ObjectBean remove(String domain, String node, int i) { ObjectBean ob=null; //System.out.println("remove(String domain, String node):"+domain); if(ParkObjValue.checkGrammar(domain,node)){ try{ ObjValue ov = pk.delete(domain, node, sid); ob = OvToBean(ov, domain,node); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[delete]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = remove(domain, node, i+1); //} } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="delete",policy=DelegatePolicy.Implements) public List<ObjectBean> remove(String domain){ return remove(domain,0); } public List<ObjectBean> remove(String domain, int i) { List<ObjectBean> objlist = null; if(ParkObjValue.checkGrammar(domain)){ try{ ObjValue ov = pk.delete(domain, null, sid); //System.out.println(ov); objlist = OvToBeanList(ov, domain); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[delete]", e.getMessage()); if(e instanceof RemoteException){//ConnectException //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) objlist = remove(domain,i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[delete]", ((ClosetoOverException)e).print()); } } } return objlist; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="setDeletable",policy=DelegatePolicy.Implements) public boolean updateDomainAuth(String domain){ return updateDomainAuth(domain,0); } public boolean updateDomainAuth(String domain, int i){ boolean setflag = false; if(ParkObjValue.checkGrammar(domain)){ try{ setflag = pk.update(domain, AuthPolicy.OP_ALL.getPolicy(), sid); }catch(Exception e){ LogUtil.info("[Park]", "[setDeletable]", e.getMessage()); if(e instanceof RemoteException){//ConnectException pk = pl.getNextLeader(); if(pk!=null) setflag = updateDomainAuth(domain,i+1); } } } return setflag; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="addLastestListener",policy=DelegatePolicy.Implements) public void addLastestListener(String domain, String node, ObjectBean ob, LastestListener liser) { final String dm = domain; final String nd = node; final ObjectBean oob = ob; final LastestListener lis = liser; new AsyncExector(){ public void task(){ try{ /*ObjectBean newob = null; while((newob=getLastest(dm, nd, oldob))==null); LogUtil.fine("[Park]","[Trim LastestEvent]","[obj]"); LastestEvent le = new LastestEvent(newob); lis.happenLastest(le);*/ ObjectBean oldob = oob; while(true){ ObjectBean newob = getLastest(dm, nd, oldob); if(newob!=null){ LogUtil.fine("[Park]","[Trim LastestEvent]","[obj]"); LastestEvent le = new LastestEvent(newob); if(lis.happenLastest(le)) break; oldob = (ObjectBean)le.getSource(); } } }catch(Exception e){ LogUtil.info("[Park]","[addLastestListener]",e); } } }.run();//ScheduledExecutorService or Thread.sleep(1) but time delay, so add time param } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="addLastestListener",policy=DelegatePolicy.Implements) public void addLastestListener(String domain, List<ObjectBean> oblist, LastestListener liser) { final String dm = domain; final List<ObjectBean> ols = oblist; final LastestListener lis = liser; new AsyncExector(){ public void task(){ try{ /*List<ObjectBean> newls = null; while((newls=getNodesLastest(dm, oldls))==null); LogUtil.fine("[Park]","[Trim LastestEvent]","[list]"); LastestEvent le = new LastestEvent(newls); lis.happenLastest(le);*/ List<ObjectBean> oldls = ols; while(true){ List<ObjectBean> newls = getNodesLastest(dm, oldls); if(newls!=null){ LogUtil.fine("[Park]","[Trim LastestEvent]","[list]"); LastestEvent le = new LastestEvent(newls); if(lis.happenLastest(le)) break; oldls = (List<ObjectBean>)le.getSource(); } } }catch(Exception e){ LogUtil.info("[Park]","[addLastestListener]",e); } } }.run(); } public ObjectBean OvToBean(ObjValue ov, String domain, String node){ if(ov!=null&&!ov.isEmpty()) { //System.out.println("OvToBean:"+ov); ObjectBeanProxy obp = new ObjectBeanProxy(); //ObjectBean ob = (ObjectBean)DelegateConsole.bind(new Class[]{ObjectBean.class,ObjectVersion.class}, new ObjectBeanProxy(ov, domainnodekey)); //ObjectBeanProxy obp = (ObjectBeanProxy)ob; String domainnodekey = ParkObjValue.getDomainnodekey(domain, node); obp.vid = (Long)ov.getObj(ParkMeta.getYBB(domainnodekey)); obp.obj = ObjectBytes.toObject((byte[])ov.get(domainnodekey)); obp.name = domainnodekey; return obp; }else return null; } public List<ObjectBean> OvToBeanList(ObjValue ov, String domain){ if(ov!=null&&!ov.isEmpty()) { ObjectBeanList<ObjectBean> objlist = new ObjectBeanList<ObjectBean>(); objlist.vid = (Long)ov.getObj(ParkMeta.getYBB(domain)); ObjValue nodeversion = ov.getWidely(ParkMeta.getYBB(domain+"..*")); ArrayList<String> nvnames = nodeversion.getObjNames(); for(String nvname:nvnames){ ObjectBeanProxy obp = new ObjectBeanProxy(); obp.vid = (Long)nodeversion.getObj(nvname); obp.name = nvname.substring(0,nvname.indexOf(ParkMeta.getYSJ())); obp.obj = ObjectBytes.toObject((byte[])ov.getObj(obp.name)); //ObjectBean ob = (ObjectBean)DelegateConsole.bind(new Class[]{ObjectBean.class,ObjectVersion.class}, obp); objlist.add(obp); } return objlist; }else return null; } /* private ObjValue getTestObj(){ ObjValue ov = new ObjValue(); ov.set("d","2"); ov.setObj("d._me_ta.version",11l); ov.set("d.n","aaa"); ov.setObj("d.n._me_ta.version",111l); ov.set("d.m","bbb"); ov.setObj("d.m._me_ta.version",222l); return ov; } */ public static void main(String[] args){ try{ //Park pk = (Park)BeanService.getBean("localhost",1888,"ParkService"); //System.out.println(pk.put(args[0], args[1], args[2], sid)); //ParkProxy pp = new ParkProxy(); //ParkLocal pp = DelegateHandle.bind(ParkLocal.class, ParkProxy.class); ParkLocal pp = BeanContext.getPark(); pp.create(args[0],args[1],args[2],AuthPolicy.OP_ALL,true); //pp.create("d","m","b",true); //pp.create("d","x","c",true); /*pp.create("d","n","a"); pp.create("d","m","b"); pp.create("d","x","c"); pp.create("d","y","d"); //System.out.println("ob_put:"+ob_put.toObject()); ObjectBean ob_get = pp.get("d","g"); System.out.println("ob_get:"+ob_get); if(ob_get!=null){ System.out.println("ob_get.toObject:"+ob_get.toObject()); System.out.println("obp.vid:"+((ObjectBeanProxy)ob_get).vid); } List<ObjectBean> oblist = pp.get("d"); for(ObjectBean obean:oblist) System.out.println("obean:"+obean.getName()); System.out.println(pp.getLastest("d","n",ob_get)); System.out.println(pp.getLastest("d",oblist)); System.out.println(pp.delete("d","n")); System.out.println(pp.delete("d")); */ /* ObjectBean ob = pp.get("d","n"); System.out.println("ob:"+ob.toObject()); System.out.println("ob:"+ob.getName()); ObjectBeanProxy obp = (ObjectBeanProxy)ob; System.out.println("obp:"+obp.vid); //System.out.println(pp.getLastest("d","n",null)); List<ObjectBean> lob = pp.getNodes(null);//"d" System.out.println("lob:"+lob); //for(ObjectBean obean:lob) //System.out.println("obean:"+obean.getName()); */ }catch(Exception e){ e.printStackTrace(); } } }
{ "content_hash": "7bf430a36bd5750af6ea7fd1cad5dc52", "timestamp": "", "source": "github", "line_count": 534, "max_line_length": 145, "avg_line_length": 32.835205992509366, "alnum_prop": 0.6464583095699783, "repo_name": "hnlaomie/fourinone", "id": "ca1cb2dc7cdaf8c4cd7a5fac8986dd8376e76025", "size": "17534", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/main/java/com/fourinone/ParkProxy.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "357283" } ], "symlink_target": "" }
package cn.ucloud.ufile.body; public class ErrorBody { private int RetCode; private String ErrMsg; public ErrorBody() { } public int getRetCode() { return RetCode; } public void setRetCode(int retCode) { RetCode = retCode; } public String getErrMsg() { return ErrMsg; } public void setErrMsg(String errMsg) { ErrMsg = errMsg; } }
{ "content_hash": "15b5acf267a754e6f1c2b9e840bd7494", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 39, "avg_line_length": 13.846153846153847, "alnum_prop": 0.6861111111111111, "repo_name": "hsiun/yoyo", "id": "a1fcb525c7828770f14b36a325676cbbc6299015", "size": "360", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ufile/java/v1/UFileSDK/src/cn/ucloud/ufile/body/ErrorBody.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "224" }, { "name": "C#", "bytes": "49061" }, { "name": "HTML", "bytes": "23829" }, { "name": "Java", "bytes": "615595" }, { "name": "JavaScript", "bytes": "43181" }, { "name": "Objective-C", "bytes": "897907" }, { "name": "PHP", "bytes": "47614" }, { "name": "PowerShell", "bytes": "3137" }, { "name": "Python", "bytes": "118181" }, { "name": "Shell", "bytes": "3661" }, { "name": "Swift", "bytes": "33240" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.InteropServices; using System.Windows; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("WpfTwitterStreamingApplication")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("WpfTwitterStreamingApplication")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] //In order to begin building localizable applications, set //<UICulture>CultureYouAreCodingWith</UICulture> in your .csproj file //inside a <PropertyGroup>. For example, if you are using US english //in your source files, set the <UICulture> to en-US. Then uncomment //the NeutralResourceLanguage attribute below. Update the "en-US" in //the line below to match the UICulture setting in the project file. //[assembly: NeutralResourcesLanguage("en-US", UltimateResourceFallbackLocation.Satellite)] [assembly: ThemeInfo( ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located //(used if a resource is not found in the page, // or application resource dictionaries) ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located //(used if a resource is not found in the page, // app, or any theme specific resource dictionaries) )] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "91c6754979f344bacb98908753e19714", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 96, "avg_line_length": 39.44642857142857, "alnum_prop": 0.7537347215934812, "repo_name": "josemigallas/WcfTwitterStreamingService", "id": "f8d505b67e6d4e6b7047b96707e975c7cb8bdc3f", "size": "2212", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "WpfTwitterStreamingApplication/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "31257" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>paco: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.15.1 / paco - 4.0.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> paco <small> 4.0.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-05-27 23:47:52 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-05-27 23:47:52 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq 8.15.1 Formal proof management system dune 3.2.0 Fast, portable, and opinionated build system ocaml 4.10.2 The OCaml compiler (virtual package) ocaml-base-compiler 4.10.2 Official release 4.10.2 ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.3 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; name: &quot;coq-paco&quot; version: &quot;4.0.0&quot; maintainer: &quot;paco@sf.snu.ac.kr&quot; synopsis: &quot;Coq library implementing parameterized coinduction&quot; homepage: &quot;https://github.com/snu-sf/paco/&quot; dev-repo: &quot;git+https://github.com/snu-sf/paco.git&quot; bug-reports: &quot;https://github.com/snu-sf/paco/issues/&quot; authors: [ &quot;Chung-Kil Hur &lt;gil.hur@sf.snu.ac.kr&gt;&quot; &quot;Georg Neis &lt;neis@mpi-sws.org&gt;&quot; &quot;Derek Dreyer &lt;dreyer@mpi-sws.org&gt;&quot; &quot;Viktor Vafeiadis &lt;viktor@mpi-sws.org&gt;&quot; &quot;Minki Cho &lt;minki.cho@sf.snu.ac.kr&gt;&quot; ] license: &quot;BSD-3&quot; build: [make &quot;-C&quot; &quot;src&quot; &quot;all&quot; &quot;-j%{jobs}%&quot;] install: [make &quot;-C&quot; &quot;src&quot; &quot;-f&quot; &quot;Makefile.coq&quot; &quot;install&quot;] remove: [&quot;rm&quot; &quot;-r&quot; &quot;-f&quot; &quot;%{lib}%/coq/user-contrib/Paco&quot;] depends: [ &quot;coq&quot; {&gt;= &quot;8.6&quot; &amp; &lt; &quot;8.13~&quot;} ] tags: [ &quot;date:2019-04-30&quot; &quot;category:Computer Science/Programming Languages/Formal Definitions and Theory&quot; &quot;category:Mathematics/Logic&quot; &quot;keyword:co-induction&quot; &quot;keyword:simulation&quot; &quot;keyword:parameterized greatest fixed point&quot; ] url { http: &quot;https://github.com/snu-sf/paco/archive/v4.0.0.tar.gz&quot; checksum: &quot;df80f84e36d5801f726382f15d94dcf1&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-paco.4.0.0 coq.8.15.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.15.1). The following dependencies couldn&#39;t be met: - coq-paco -&gt; coq &lt; 8.13~ -&gt; ocaml &lt; 4.10 base of this switch (use `--unlock-base&#39; to force) Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-paco.4.0.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "9fde47cc57c5e64f87897ef0f6d64045", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 159, "avg_line_length": 41.28409090909091, "alnum_prop": 0.5456922653454446, "repo_name": "coq-bench/coq-bench.github.io", "id": "6b2715557a96259211cadad696eb957a83056762", "size": "7291", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.10.2-2.0.6/released/8.15.1/paco/4.0.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
using System; using MonoTouch.Foundation; using MonoTouch.UIKit; using MonoTouch.CoreImage; using System.Collections.Generic; using MonoTouch.Dialog; using System.Drawing; using System.Linq; using System.Reflection; using MonoTouch.CoreGraphics; namespace coreimage { [Register ("AppDelegate")] public partial class AppDelegate : UIApplicationDelegate { /// <summary> /// "Flower" © 2012 Milica Sekulic, used under a Creative Commons Attribution-ShareAlike license: http://creativecommons.org/licenses/by-sa/3.0/ /// </summary> CIImage flower = CIImage.FromCGImage (UIImage.FromFile ("flower.png").CGImage); /// <summary> /// "Sunrise near Atkeison Plateau" © 2012 Charles Atkeison, used under a Creative Commons Attribution-ShareAlike license: http://creativecommons.org/licenses/by-sa/3.0/ /// </summary> CIImage clouds = CIImage.FromCGImage (UIImage.FromFile ("clouds.jpg").CGImage); /// <summary> /// "canon" © 2012 cuatrok77 hernandez, used under a Creative Commons Attribution-ShareAlike license: http://creativecommons.org/licenses/by-sa/3.0/ /// </summary> CIImage heron = CIImage.FromCGImage (UIImage.FromFile ("heron.jpg").CGImage); UIWindow window; #region UIApplicationDelegate Methods public override bool FinishedLaunching (UIApplication app, NSDictionary options) { var root = new RootElement ("Effects") { new Section () { new RootElement ("Color Adjustment"){ new Section () { new RootElement ("ColorControls", (x) => Demo (ColorControls)), new RootElement ("ColorMatrix", (x) => Demo (ColorMatrix)), new RootElement ("ExposureAdjust", (x) => Demo (ExposureAdjust)), new RootElement ("GammaAdjust", (x) => Demo (GammaAdjust)), new RootElement ("HueAdjust", (x) => Demo (HueAdjust)), new RootElement ("TemperatureAndTint", (x) => Demo (TemperatureAndTint)), new RootElement ("ToneCurve", (x) => Demo (ToneCurve)), new RootElement ("Vibrance", (x) => Demo (Vibrance)), new RootElement ("WhitePointAdjust", (x) => Demo(WhitePointAdjust)) } }, new RootElement ("Color Effect"){ new Section () { new RootElement ("ColorCube", (x) => Demo (ColorCube)), new RootElement ("ColorInvert", (x) => Demo (ColorInvert)), new RootElement ("ColorMonochrome", (x) => Demo (ColorMonochrome)), new RootElement ("FalseColor", (x) => Demo (FalseColor)), new RootElement ("SepiaTone", (x) => Demo (SepiaTone)), } }, new RootElement ("Composite Operation"){ new Section () { new RootElement ("AdditionCompositing", (x) => Demo (AdditionCompositing)), new RootElement ("ColorBlendMode", (x) => Demo (ColorBlendMode)), new RootElement ("ColorBurnBlendMode", (x) => Demo (ColorBurnBlendMode)), new RootElement ("ColorDodgeBlendMode", (x) => Demo (ColorDodgeBlendMode)), new RootElement ("DarkenBlendMode", (x) => Demo (DarkenBlendMode)), new RootElement ("DifferenceBlendMode", (x) => Demo (DifferenceBlendMode)), new RootElement ("ExclusionBlendMode", (x) => Demo (ExclusionBlendMode)), new RootElement ("HardLightBlendMode", (x) => Demo (HardLightBlendMode)), new RootElement ("HueBlendMode", (x) => Demo (HueBlendMode)), new RootElement ("LightenBlendMode", (x) => Demo (LightenBlendMode)), new RootElement ("LuminosityBlendMode", (x) => Demo (LuminosityBlendMode)), new RootElement ("MaximumCompositing", (x) => Demo (MaximumCompositing)), new RootElement ("MinimumCompositing", (x) => Demo (MinimumCompositing)), new RootElement ("MultiplyCompositing", (x) => Demo (MultiplyCompositing)), new RootElement ("MultiplyBlendMode", (x) => Demo (MultiplyBlendMode)), new RootElement ("OverlayBlendMode", (x) => Demo (OverlayBlendMode)), new RootElement ("SaturationBlendMode", (x) => Demo (SaturationBlendMode)), new RootElement ("ScreenBlendMode", (x) => Demo (ScreenBlendMode)), new RootElement ("SoftLightBlendMode", (x) => Demo (SoftLightBlendMode)), new RootElement ("SourceAtopCompositing", (x) => Demo (SourceAtopCompositing)), new RootElement ("SourceInCompositing", (x) => Demo(SourceInCompositing)), new RootElement ("SourceOutCompositing", (x) => Demo(SourceOutCompositing)), new RootElement ("SourceOverCompositing", (x) => Demo (SourceOverCompositing)), } }, new RootElement ("Distortions"){ new Section () { } }, new RootElement ("Generators"){ new Section () { new RootElement ("CheckerboardGenerator", (x) => Demo (CheckerboardGenerator)), new RootElement ("ConstantColorGenerator", (x) => Demo (ConstantColorGenerator)), new RootElement ("StripesGenerator", (x) => Demo (StripesGenerator)), } }, new RootElement ("Geometry Adjust"){ new Section () { new RootElement ("AffineTransform", (x) => Demo (AffineTransform)), new RootElement ("Crop", (x) => Demo (Crop)), new RootElement ("StraightenFilter", (x) => Demo (StraightenFilter)), } }, new RootElement ("Gradients"){ new Section () { new RootElement ("GaussianGradient", (x) => Demo (GaussianGradient)), new RootElement ("LinearGradient", (x) =>Demo(LinearGradient)), new RootElement ("RadialGradient", (x) => Demo (RadialGradient)), } }, new RootElement ("Stylize"){ new Section () { new RootElement ("HighlightShadowAdjust", (x) => Demo (HighlightShadowAdjust)), } }, new RootElement ("Vignette", (x) => Demo (Vignette)) } }; window = new UIWindow (UIScreen.MainScreen.Bounds) { RootViewController = new UINavigationController (new DialogViewController (root)) }; window.MakeKeyAndVisible (); return true; } #endregion #region Helper Methods // // Utility function used by pure-output generation filters // public CIImage Crop (CIFilter input) { return new CICrop () { Image = input.OutputImage, Rectangle = new CIVector (0, 0, window.Bounds.Width, window.Bounds.Height) }.OutputImage; } public delegate CIImage ImageFilter (); public UIViewController Demo (ImageFilter makeDemo) { var v = new UIViewController (); var imageView = new UIImageView (v.View.Bounds); v.View.AddSubview (imageView); var output = makeDemo (); var context = CIContext.FromOptions (null); var result = context.CreateCGImage (output, output.Extent); imageView.Image = UIImage.FromImage (result); return v; } #endregion #region Filter Methods #region CICategoryColorAdjustment /// <summary> /// Multiplies source color values and adds a bias factor to each color component. /// </summary> /// <returns> /// The altered Image /// </returns> [Filter] public CIImage ColorMatrix() { var rVector = new CIVector (.5F, 0F, 0F); // Multiple the Red Values by .5 (s.r = dot(s, rVector)) var gVector = new CIVector (0F, 1.5F, 0F); // Multiple the Green Vector by 1.5 (s.g = dot(s, gVector)) var bVector = new CIVector (0F, 0F, .75F); // Multiple the Blue Vectoer by .75 (s.b = dot(s, bVector)) var aVector = new CIVector (0F, 0F, 0F, 1.25F); // Multiple the Alpha values by 1.25 (s.a = dot(s, bVector)) var biasVector = new CIVector (0, 1, 0, 0); // A Bias to be Added to each Color Vector (s = s + bias) var colorMatrix = new CIColorMatrix () { Image = flower, RVector = rVector, GVector = gVector, BVector = bVector, AVector = aVector, BiasVector = biasVector }; return colorMatrix.OutputImage; } /// <summary> /// Adjusts saturation, brightness, and contrast values. /// </summary> /// <returns> /// Altered Image /// </returns> [Filter] public CIImage ColorControls () { var colorCtrls = new CIColorControls () { Image = flower, Brightness = .5F, // Min: 0 Max: 2 Saturation = 1.2F, // Min: -1 Max: 1 Contrast = 3.1F // Min: 0 Max: 4 }; return colorCtrls.OutputImage; } /// <summary> /// Changes the overall hue, or tint, of the source pixels. /// </summary> /// <returns> /// The Altered Image. /// </returns> [Filter] public CIImage HueAdjust() { var hueAdjust = new CIHueAdjust() { Image = flower, Angle = 1F // Default is 0 }; return hueAdjust.OutputImage; } /// <summary> /// Adapts the reference white point for an image. /// </summary> /// <returns> /// The Color Adjusted Image /// </returns> [Filter] public CIImage TemperatureAndTint() { var temperatureAdjust = new CITemperatureAndTint() { Image = flower, Neutral = new CIVector(6500, 0), // Default [6500, 0] TargetNeutral = new CIVector(4000, 0), // Default [6500, 0] }; return temperatureAdjust.OutputImage; } /// <summary> /// Adjusts tone response of the R, G, and B channels of an image. /// </summary> /// <returns> /// The adjusted image /// </returns> [Filter] public CIImage ToneCurve() { var point0 = new CIVector(0,0); // Default [0 0] var point1 = new CIVector(.1F, .5F); // Default [.25 .25] var point2 = new CIVector(.3F, .15F); // Default [.3 .15] var point3 = new CIVector(.6F, .6F); // Default [.75 .75] var point4 = new CIVector(1.1F, 1F); // Default [1 1] var toneCurve = new CIToneCurve() { Image = flower, Point0 = point0, Point1 = point1, Point2 = point2, Point3 = point3, Point4 = point4, }; return toneCurve.OutputImage; } /// <summary> /// Adjusts the saturation of an image while keeping pleasing skin tones. /// </summary> [Filter] public CIImage Vibrance() { var vibrance = new CIVibrance() { Image = flower, Amount = -1.0F // Default 0 }; return vibrance.OutputImage; } /// <summary> /// Add a reduction of an image's brightness or saturation at the periphery compared to the image center. /// </summary> [Filter] public CIImage Vignette() { var vignette = new CIVignette() { Image = flower, Intensity = 2F, Radius = 10F, }; return vignette.OutputImage; } /// <summary> /// Adjusts the reference white point for an image and maps all colors in the source using the new reference. /// </summary> /// <returns> /// The Color Adjusted Image /// </returns> public CIImage WhitePointAdjust() { var whitePointAdjust = new CIWhitePointAdjust() { Image = flower, Color = new CIColor(new CGColor(255F, 0, 187F)) // A Magenta Color }; return whitePointAdjust.OutputImage; } #endregion #region CICategoryColorEffect /// <summary> /// Applies a Sepia Filter to an Image /// </summary> /// <returns> /// Image with Sepia Filter /// </returns> [Filter] public CIImage SepiaTone () { var sepia = new CISepiaTone () { Image = flower, Intensity = .8f }; return sepia.OutputImage; } /// <summary> /// Uses a three-dimensional color table to transform the source image pixels. /// </summary> /// <returns> /// The Altered Image /// </returns> [Filter] public CIImage ColorCube () { var data = new NSData (); var cube = new CIColorCube () { Image = flower, CubeDimension = Convert.ToSingle (Math.Pow (2F, 2F)), //CubeData = }; return cube.OutputImage; } /// <summary> /// Inverts the colors in an image. /// </summary> /// <returns> /// The Altered Image /// </returns> public CIImage ColorInvert () { var invert = new CIColorInvert () { Image = flower }; return invert.OutputImage; } /// <summary> /// Remaps colors so they fall within shades of a single color. /// </summary> /// <returns> /// The Altered Image /// </returns> [Filter] public CIImage ColorMonochrome () { var inputColor = new CIColor (new CGColor (100F, 0F, 100F)); // Make it Purple R + B = Purple var monoChrome = new CIColorMonochrome () { Image = flower, Color = inputColor, Intensity = 1F, // Default 1 }; return monoChrome.OutputImage; } /// <summary> /// Maps luminance to a color ramp of two colors. /// </summary> /// <returns> /// The altered Image /// </returns> [Filter] public CIImage FalseColor () { var color0 = new CIColor (new CGColor (255F, 251F, 0F)); // A Yellowish Color var color1 = new CIColor (new CGColor (51F, 0F, 255F)); // A Purplish Color var falseColor = new CIFalseColor () { Image = flower, Color0 = color0, Color1 = color1 }; return falseColor.OutputImage; } /// <summary> /// Adjusts midtone brightness. /// </summary> /// <returns> /// Alters the image /// </returns> [Filter] public CIImage GammaAdjust () { var gammaAdjust = new CIGammaAdjust () { Image = flower, Power = 3F, // Default value: 0.75 }; return gammaAdjust.OutputImage; } #endregion #region CategoryGradient /// <summary> /// Generates a gradient that varies from one color to another using a Gaussian distribution. /// </summary> /// <returns> /// The gradient. /// </returns> [Filter] public CIImage GaussianGradient () { var centerVector = new CIVector (100, 100); // Default is [150 150] var color1 = CIColor.FromRgba (1, 0, 1, 1); var color0 = CIColor.FromRgba (0, 1, 1, 1); var gaussGradient = new CIGaussianGradient () { Center = centerVector, Color0 = color0, Color1 = color1, Radius = 280f // Default is 300 }; return Crop (gaussGradient); } /// <summary> /// Generates a gradient that varies along a linear axis between two defined endpoints. /// </summary> /// <returns> /// The gradient. /// </returns> [Filter] public CIImage LinearGradient() { var point0 = new CIVector(0, 0); // Default [0 0] var point1 = new CIVector(250, 250); // Default [200 200] var linearGrad = new CILinearGradient() { Point0 = point0, Point1 = point1, Color0 = new CIColor (UIColor.Red), Color1 = new CIColor (UIColor.Blue) }; return Crop (linearGrad); } /// <summary> /// Generates a gradient that varies radially between two circles having the same center. /// </summary> /// <returns> /// The gradient. /// </returns> [Filter] public CIImage RadialGradient() { var center = new CIVector(100, 100); // Default [150 150] var radGradient = new CIRadialGradient() { Center = center, Radius0 = 10F, // Default 5 Radius1 = 150F, // Default 100 Color0 = new CIColor(new CGColor(0, 255F, 0)), // Green Color1 = new CIColor(new CGColor(0, 0, 0)) // Black }; return Crop (radGradient); } #endregion #region CICategoryGeometryAdjustment /// <summary> /// Applies a crop to an image. /// </summary> [Filter] public CIImage Crop () { var crop = new CICrop () { Image = flower, Rectangle = new CIVector (0, 0, 300, 300) }; return crop.OutputImage; } /// <summary> /// Applies an affine transform to an image /// </summary> /// <returns> /// The Altered Image /// </returns> [Filter] public CIImage AffineTransform () { // Create an AffineTransform to Skew the Image var transform = new CGAffineTransform (1F, .5F, .5F, 1F, 0F, 0F); var affineTransform = new CIAffineTransform () { Image = flower, Transform = transform }; return affineTransform.OutputImage; } /// <summary> /// Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop. /// </summary> /// <returns> /// The altered Image /// </returns> [Filter] public CIImage ExposureAdjust () { var exposureAdjust = new CIExposureAdjust () { Image = flower, EV = 2F // Default value: 0.50 Minimum: 0.00 Maximum: 0.00 Slider minimum: -10.00 Slider maximum: 10.00 Identity: 0.00 }; return exposureAdjust.OutputImage; } /// <summary> /// Rotates the source image by the specified angle in radians. /// </summary> /// <returns> /// The filtered Image /// </returns> [Filter] public CIImage StraightenFilter() { var straightFilter = new CIStraightenFilter() { Image = heron, Angle = Convert.ToSingle(Math.PI / 4.0) // Change by 45 degrees = pi/4 Radians. }; return straightFilter.OutputImage; } #endregion #region CICategoryCompositeOperation /// <summary> /// Adds color components to achieve a brightening effect. /// </summary> /// <returns> /// The composite Image /// </returns> [Filter] public CIImage AdditionCompositing () { var addComp = new CIAdditionCompositing () { Image = heron, BackgroundImage = clouds, }; return addComp.OutputImage; } /// <summary> /// Uses the luminance values of the background with the hue and saturation values of the source image. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage ColorBlendMode () { var colorBlend = new CIColorBlendMode () { Image = heron, BackgroundImage = clouds }; return colorBlend.OutputImage; } /// <summary> /// Darkens the background image samples to reflect the source image samples. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage ColorBurnBlendMode() { var colorBurn = new CIColorBurnBlendMode() { Image = heron, BackgroundImage = clouds }; return colorBurn.OutputImage; } /// <summary> /// Brightens the background image samples to reflect the source image samples. /// </summary> /// <returns> /// The composite Image /// </returns> public CIImage ColorDodgeBlendMode () { var colorDodgeBlend = new CIColorDodgeBlendMode () { Image = heron, BackgroundImage = clouds, }; return colorDodgeBlend.OutputImage; } /// <summary> /// Creates composite image samples by choosing the darker samples (from either the source image or the background). /// </summary> /// <returns> /// The composite Image /// </returns> [Filter] public CIImage DarkenBlendMode() { var darkenBlend = new CIDarkenBlendMode() { Image = heron, BackgroundImage = clouds }; return darkenBlend.OutputImage; } /// <summary> /// Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value. /// </summary> /// <returns> /// The composite image. /// </returns> [Filter] public CIImage DifferenceBlendMode () { var differenceBlend = new CIDifferenceBlendMode () { Image = heron, BackgroundImage = clouds }; return differenceBlend.OutputImage; } /// <summary> /// Produces an effect similar to that produced by the CIDifferenceBlendMode filter but with lower contrast. /// </summary> /// <returns> /// The composite Image /// </returns> [Filter] public CIImage ExclusionBlendMode () { var exclusionBlend = new CIExclusionBlendMode () { Image = heron, BackgroundImage = clouds }; return exclusionBlend.OutputImage; } /// <summary> /// Either multiplies or screens colors, depending on the source image sample color. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage HardLightBlendMode () { var hardLightBlend = new CIHardLightBlendMode () { Image = heron, BackgroundImage = clouds }; return hardLightBlend.OutputImage; } /// <summary> /// Uses the luminance and saturation values of the background with the hue of the source image. /// </summary> /// <returns> /// The composite Image /// </returns> [Filter] public CIImage HueBlendMode() { var hueBlend = new CIHueBlendMode() { Image = heron, BackgroundImage = clouds }; return hueBlend.OutputImage; } /// <summary> /// Creates composite image samples by choosing the lighter samples (either from the source image or the background). /// </summary> /// <returns> /// The composite Image /// </returns> [Filter] public CIImage LightenBlendMode() { var lightenBlend = new CILightenBlendMode() { Image = heron, BackgroundImage = clouds }; return lightenBlend.OutputImage; } /// <summary> /// Uses the hue and saturation of the background with the luminance of the source image. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage LuminosityBlendMode() { var luminosityBlend = new CILuminosityBlendMode() { Image = heron, BackgroundImage = clouds }; return luminosityBlend.OutputImage; } /// <summary> /// Computes the maximum value, by color component, of two input images and creates an output image using the maximum values. /// </summary> /// <returns> /// The composite image. /// </returns> [Filter] public CIImage MaximumCompositing() { var maxComposite = new CIMaximumCompositing() { Image = heron, BackgroundImage = clouds }; return maxComposite.OutputImage; } /// <summary> /// Computes the minimum value, by color component, of two input images and creates an output image using the minimum values. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage MinimumCompositing() { var minComposite = new CIMinimumCompositing() { Image = heron, BackgroundImage = clouds }; return minComposite.OutputImage; } /// <summary> /// Multiplies the source image samples with the background image samples. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage MultiplyBlendMode() { var multiBlend = new CIMultiplyBlendMode() { Image = heron, BackgroundImage = clouds }; return multiBlend.OutputImage; } /// <summary> /// Multiplies the color component of two input images and creates an output image using the multiplied values. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage MultiplyCompositing() { var multiComposite = new CIMultiplyCompositing() { Image = heron, BackgroundImage = clouds }; return multiComposite.OutputImage; } /// <summary> /// Overlaies the blend mode. /// </summary> /// <returns> /// The blend mode. /// </returns> [Filter] public CIImage OverlayBlendMode() { var overlayBlend = new CIOverlayBlendMode() { Image = heron, BackgroundImage = clouds }; return overlayBlend.OutputImage; } /// <summary> /// Saturations the blend mode. /// </summary> /// <returns> /// The composite image. /// </returns> [Filter] public CIImage SaturationBlendMode() { var saturationBlend = new CISaturationBlendMode() { Image = heron, BackgroundImage = clouds, }; return saturationBlend.OutputImage; } /// <summary> /// Multiplies the inverse of the source image samples with the inverse of the background image samples. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage ScreenBlendMode() { var screenBlend = new CIScreenBlendMode() { Image = heron, BackgroundImage = clouds }; return screenBlend.OutputImage; } /// <summary> /// Either darkens or lightens colors, depending on the source image sample color. /// </summary> /// <returns> /// The Composite Image. /// </returns> [Filter] public CIImage SoftLightBlendMode() { var softLightBlend = new CISoftLightBlendMode() { Image = heron, BackgroundImage = clouds }; return softLightBlend.OutputImage; } /// <summary> /// Places the source image over the background image, then uses the luminance of the background image to determine what to show. /// </summary> /// <returns> /// The Composite Image /// </returns> [Filter] public CIImage SourceAtopCompositing() { var sourceAtopComposite = new CISourceAtopCompositing() { Image = heron, BackgroundImage = clouds, }; return sourceAtopComposite.OutputImage; } /// <summary> /// Uses the second image to define what to leave in the source image, effectively cropping the image. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage SourceInCompositing() { var sourceComposite = new CISourceInCompositing() { Image = clouds, // This image will be Cropped BackgroundImage = heron }; return sourceComposite.OutputImage; } /// <summary> /// Uses the second image to define what to take out of the first image. /// </summary> /// <returns> /// The composite image /// </returns> [Filter] public CIImage SourceOutCompositing() { var sourceOutComposite = new CISourceOutCompositing() { Image = clouds, // This Image will be Cropped BackgroundImage = heron }; return sourceOutComposite.OutputImage; } /// <summary> /// Places the second image over the first. /// </summary> /// <returns> /// The composite Image /// </returns> [Filter] public CIImage SourceOverCompositing() { var sourceOverComposite = new CISourceOverCompositing() { Image = heron, BackgroundImage = clouds }; return sourceOverComposite.OutputImage; } #endregion #region CICategoryGenerator /// <summary> /// Generates a checkerboard pattern. /// </summary> /// <returns> /// An Image of a Checkboard pattern /// </returns> [Filter] public CIImage CheckerboardGenerator () { // Color 1 var c0 = CIColor.FromRgb (1, 0, 0); var c1 = CIColor.FromRgb (0, 1, 0); var checker = new CICheckerboardGenerator () { Color0 = c0, Color1 = c1, Center = new CIVector (new float[] { 10 , 10 }), // Default [80 80] Sharpness = 1F // Default 1 }; return Crop (checker); } /// <summary> /// Generates a solid color. /// </summary> /// <returns> /// A Solid Color Image /// </returns> [Filter] public CIImage ConstantColorGenerator () { var colorGen = new CIConstantColorGenerator () { Color = new CIColor (UIColor.Blue) }; return Crop (colorGen); } /// <summary> /// Generates a stripe pattern. /// </summary> /// <returns> /// The generated pattern. /// </returns> [Filter] public CIImage StripesGenerator() { var stripeGen = new CIStripesGenerator() { Center = new CIVector(150, 100), // Default [150 150] Color0 = new CIColor (UIColor.Blue), Color1 = new CIColor (UIColor.Red), Width = 10, }; return Crop (stripeGen); } #endregion #region CICategoryStylize /// <summary> /// Adjust the tonal mapping of an image while preserving spatial detail. /// </summary> /// <returns> /// The altered Image /// </returns> [Filter] public CIImage HighlightShadowAdjust () { var shadowAdjust = new CIHighlightShadowAdjust () { Image = flower, HighlightAmount = .75F, // Default is 1 ShadowAmount = 1.5F // Default is 0 }; return shadowAdjust.OutputImage; } #endregion #endregion } }
{ "content_hash": "65119accf151f7cff85586cce4c19174", "timestamp": "", "source": "github", "line_count": 1083, "max_line_length": 172, "avg_line_length": 25.542936288088644, "alnum_prop": 0.6312764342262227, "repo_name": "bratsche/monotouch-samples", "id": "63257775bed6685790a319fd3badbddb5b1760f0", "size": "27666", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CoreImage/AppDelegate.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1429454" }, { "name": "D", "bytes": "1978" }, { "name": "Objective-C", "bytes": "5970" }, { "name": "Shell", "bytes": "4933" } ], "symlink_target": "" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.grinder.util; import java.util.Map; import java.util.Set; import net.grinder.plugin.http.tcpproxyfilter.options.GenerationOption; import net.grinder.plugin.http.xml.BaseURIType; import net.grinder.plugin.http.xml.BodyType; import net.grinder.plugin.http.xml.CommonHeadersType; import net.grinder.plugin.http.xml.HeadersType; import net.grinder.plugin.http.xml.RelativeURIType; import net.grinder.plugin.http.xml.TokenReferenceType; import net.grinder.plugin.http.xml.TokenType; /** * Abstract Helper class for Freemaker templates. The instance of the subclass will be passed to * Freemaker templates as a name "f" * * Freemaker templates can refer this like ${f.escapeQuote(hello)}. * * @author JunHo Yoon * @since 1.0 */ public abstract class Functions { /** * Generate token reference string. * * @param token * token * @return generated snippet */ public abstract String generateTokenReference(TokenReferenceType token); /** * Generate the header reference string. * * @param headers * headers * @return generated snippet */ public abstract String generateHeaderParameter(HeadersType headers); /** * Generate the POST method body string. * * @param tokens * token list which POST method will use * @param bodyType * bodyType * @return generated snippet */ public abstract String generatePostParameter(TokenReferenceType[] tokens, BodyType bodyType); /** * Generate URL construction string. * * @param relativeURIType * uri type. * @return generated snippet */ public abstract String generatePathString(RelativeURIType relativeURIType); private final Map<String, CommonHeadersType> headerMap; private final Map<String, TokenType> tokenMap; private final Set<GenerationOption> genetationOpts; private final Map<String, BaseURIType> urlMap; /** * Constructor. * * @param urlMap * map between url extends ids and real {@link BaseURIType}s * @param headerMap * map between header extends ids and real {@link CommonHeadersType}s * @param tokenMap * map between token extends ids and real {@link TokenType}s * @param genetationOpts * script generation option list */ public Functions(Map<String, BaseURIType> urlMap, Map<String, CommonHeadersType> headerMap, Map<String, TokenType> tokenMap, Set<GenerationOption> genetationOpts) { this.urlMap = urlMap; this.headerMap = headerMap; this.tokenMap = tokenMap; this.genetationOpts = genetationOpts; } /** * Quote the given value. * * @param value * value * @return Quote value */ public abstract String escapeQuote(String value); /** * Check if the option is applied. * * @param optionKey * option key * @return true if applied. * @see GenerationOption */ public boolean hasOption(String optionKey) { GenerationOption option = GenerationOption.valueOf(optionKey); return getGenetationOpts().contains(option); } /** * Generate URI string from the given {@link BaseURIType} parameter. * * @param uri * uri * @return URL string. */ public String generateURLString(BaseURIType uri) { String url = uri.getScheme() + "://" + uri.getHost(); if (!(uri.getScheme() == BaseURIType.Scheme.HTTP && uri.getPort() == 80) && !(uri.getScheme() == BaseURIType.Scheme.HTTPS && uri.getPort() == 443)) { url = url + ":" + uri.getPort(); } return url; } /** * Wrap the given snippet with comment. * * @param snippet * snippet * @return commented snippet */ public abstract String wrapWithComment(String snippet); public Map<String, CommonHeadersType> getHeaderMap() { return headerMap; } public Map<String, TokenType> getTokenMap() { return tokenMap; } public Set<GenerationOption> getGenetationOpts() { return genetationOpts; } public Map<String, BaseURIType> getUrlMap() { return urlMap; } }
{ "content_hash": "7fdd02b90c43889e8dbc82394a8b3519", "timestamp": "", "source": "github", "line_count": 165, "max_line_length": 96, "avg_line_length": 27.715151515151515, "alnum_prop": 0.6964793352285152, "repo_name": "naver/ngrinder-recorder", "id": "53ef97e5b2ce2ff2d2677f29c72063e3616d1dca", "size": "4573", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/net/grinder/util/Functions.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "391338" }, { "name": "Shell", "bytes": "4786" } ], "symlink_target": "" }
from __future__ import division from math import exp from math import log """ This module contains a collection of functions to calculate a mean tree height in meters using various height growth curves found in the literature. 5/04/2016 Ryan Michie Oregon DEQ # --------------- Citations Bruce D. 1981. Consistent height-growth and growth-rate estimates for remeasured plots. Forest Science 27(4):711-725. Curtis, R.O., Herman, F.R., DeMars, D.J. 1974. Height growth and site index for Douglas-fir in high-elevation forests of the Oregon-Washington Cascades. Forest Science 20(4):307-316. Farr, W.A. 1984. Site index and height growth curves for unmanaged even-aged stands of Western hemlock and Sitka spruce in southeast Alaska. Research Paper. PNW-326. U.S. Department of Agriculture, Forest Service, Pacific Northwest Forest and Range Experiment Station. Portland, OR. Keyser, C.E. 2008a (revised June 16, 2015). Pacific northwest coast (PN) variant overview forest vegetation simulator. Internal Report Fort Collins, CO: U.S. Department of Agriculture, Forest Service, Forest Management Service Center. Keyser, C.E. 2008b (revised June 16, 2015). Westside cascades (WC) variant overview forest vegetation simulator. Internal Report. Fort Collins, CO: U.S. Department of Agriculture, Forest Service, Forest Management Service Center. King, J.E. 1966. Site index curves for Douglas-fir in the pacific northwest. Weyerhaeuser Forestry Paper No. 8. Weyerhaeuser Forestry Research Center. Centralia, WA. Kurucz, J.F. 1985 Metric SI tables for red cedar stands. In: Mitchell K.J., Polsson, K.R. 1988. Site index curves and tables for British Columbia: Coastal Species. Report 37. Forest Resource Development Agreement, BC Ministry of Forests and Lands, Government of Canada Mitchell K.J., Polsson, K.R. 1988. Site index curves and tables for British Columbia: Coastal Species. Report 37. Forest Resource Development Agreement, BC Ministry of Forests and Lands, Government of Canada. Nussbaum, A.F., 1996. Site index curves and tables for British Columbia: coastal speceis. 2nd edition. B.C. Ministiry of Forests Research Program. Land management handbook. Field Guide Insert 3. Thrower J.S., Goudie, J.W. 1992. Development of height-age and site-index fuctions for even age interior douglas-fir in British Columbia. Research Note No. 109. BC Ministiry of Forests and Lands, Government of Canada Wiley, Kenneth N. 1978. Site index tables for western hemlock in the Pacific Northwest. For. Pap. No. 17. Centralia, WA: Weyerhaeuser Forestry Research Center. 28 p. Worthington N.P., Johnson F.A., Staebler G.R., Lloyd W.J. 1960, Normal yield tables for red alder. Research Paper 36. U.S. Department of Agricuture, Forest Service, Pacfifc Northwest Forest and Range Experiment Station, Portland, OR. """ def ht_western_redcedar_farr(si50_ft, tree_age, ytobh=4.7): """ Returns height in meters of western redcedar given the 50 year base site index height in feet, tree age, and years to breast height. Equation is from Farr 1984 using formulation presented in Keyser 2008a. Keyser 2008a uses this equation in the Forest Vegetation Simulator (FVS) Pacific Northwest Coast (PN) variant to calcuate height of Western redcedar and Sitka spruce. Default of 4.7 years required to reach breast height from Harrington and Gould 2010 "Growth of Western Redcedar and Yellow-Cedar" page 99 in PNW-GTR-828 (A Tale of Two Cedars...) """ b0 = -0.2050542 b1 = 1.449615 b2 = -0.01780992 b3 = 0.0000651975 b4 = -0.0000000000000000000000109559 b5 = -5.611879 b6 = 2.418604 b7 = -0.259311 b8 = 0.000135145 b9 = -0.00000000000170114 b10 = 0.0000000000000000000000000079642 b11 = -86.43 # calculate the breast height age age_bh = tree_age - ytobh ht_ft = (4.5 + exp(b0 + b1* log(age_bh) + b2*(log(age_bh))**3 + b3*(log(age_bh))**5 + b4*(log(age_bh))**30) + ((si50_ft - 4.5) + b11) * (exp (b5 + b6* log(age_bh) + b7 * (log(age_bh))**2 + b8*(log(age_bh))**5 + b9*(log(age_bh))**16 + b10* (log(age_bh))**36) ) ) ht_m = ht_ft * 0.3048 return ht_m def ht_western_redcedar_kurucz(si50_m, tree_age, ytobh=4.7): """ Returns height in meters of western redcedar given the 50 year base site index height in meters, tree age, and the years to breast height. Equation is from Mitchell and Polsson 1988 using formulation presented in Kurucz 1985. Defulat of 4.7 years required to reach breast height from Harrington and Gould 2010 "Growth of Western Redcedar and Yellow-Cedar" page 99 in PNW-GTR-828 (A Tale of Two Cedars...) """ age_bh = tree_age - ytohb # height equation from Mitchell and Polsson 1988 - pg 7 (appendix 2) # height is in meters b1 = -3.11785 + (0.05027 * 2500) / (si50_m - 1.3) b2 = -0.02465 + (0.01411 * 2500) / (si50_m - 1.3) b3 = 0.00174 + (0.000097667 * 2500) / (si50_m - 1.3) ht_m = 1.3 + (age_bh * age_bh) / (b1 + (b2 * age_bh) + (b3 * (age_bh * age_bh))) if age_bh > 50: ht_m = ht_m + 0.02379545 * ht_m - 0.000475909 * age_bh * ht_m return ht_m def ht_douglas_fir_king(si50_ft, tree_age, ytobh): """ Returns height in meters of Douglas fir given the 50 year base site index height in feet, tree age, and years to breast height. Equation is from King 1966. """ # calculate the breast height age age_bh = tree_age - ytobh b0 = -0.954038 b1 = 0.109757 b2 = 0.0558178 b3 = 0.00792236 b4 = -0.000733819 b5 = 0.000197693 Z = 2500 / (si50_ft - 4.5) ht_ft = (((age_bh**2) / (b0 + (b1 * Z) + ((b2 + (b3 * Z)) * age_bh) + ((b4 + (b5 * Z)) * age_bh**2))) + 4.5) ht_m = ht_ft * 0.3048 return ht_m def ht_douglas_fir_curtis(si100_ft, tree_age): """ Returns height in meters of Douglas fir given the 100 year base site index height in feet and tree age. Equation is from Curtis et al 1974. """ b0 = 0.6192 b1 = -5.3394 b2 = 240.29 b3 = 3368.9 ytobh = 0 # calculate the breast height age age_bh = tree_age - ytobh ht_ft = (((si100_ft - 4.5) / (b0 + (b1 / (si100_ft - 4.5))) + (b2 + (b3 / (si100_ft - 4.5))) * age_bh**-1.4) + 4.5) ht_m = ht_ft * 0.3048 return ht_m def ht_douglas_fir_bruce(si50_ft, tree_age, ytobh): """ Returns height in meters of Douglas fir given the 50 year base site index height in feet, tree age, and years to breast height. Equations are from Bruce (1981). """ b3 = -0.477762 - 0.894427 * (si50_ft / 100) + 0.793548 * ((si50_ft / 100)** 3) b2 = log(4.5/si50_ft) / ((ytobh ** b3) - (63.25 - si50_ft / 20) ** b3) ht_ft = (si50_ft * exp(b2*((tree_age) **b3 - (63.25 - si50_ft / 20) ** b3))) ht_m = ht_ft * 0.3048 return ht_m def ht_other(si100_ft, tree_age, ytobh, adj): """ Returns the height of a tree without a specfic site index curve. The equation is based on the 100 year base site index height of Douglas Fir in feet from Curtis et al 1974. Keyser 2008a and Keyser 2008b use this equation in the Forest Vegetation Simulator (FVS) to calcuate height of other species given an adjustment factor to site index. Adjustment factors for each species is found in table 3.4.2. This equation can be used for Alaska cedar / western larch, Douglas-fir, coast redwood, western redcedar, bigleaf maple, white alder / Pacific madrone, paper birch, giant chinquapin / tanoak, quaking aspen, black cottonwood, western juniper, whitebark pine, knobcone pine, Pacific yew, Pacific dogwood, hawthorn species, bitter cherry, willow species, and others """ b0 = 0.6192 b1 = -5.3394 b2 = 240.29 b3 = 3368.9 # calculate the breast height age age_bh = tree_age - ytobh # Apply adjustment to site index from Keyser 2008 si_adj = si100_ft * adj ht_ft = (((si_adj - 4.5) / (b0 + (b1 / (si_adj - 4.5))) + (b2 + (b3 / (si_adj - 4.5))) * age_bh**-1.4) + 4.5) ht_m = ht_ft * 0.3048 return ht_m def ht_red_alder_worthington(si50_ft, tree_age): """ Returns the height of red alder in meters given the 50 year base site index height in feet and tree age. Equations are from Worthington et al 1960. """ b0 = 0.60924 b1 = 19.538 ht_m = 1 / ((b0 + b1 / tree_age) / si50_ft) * 0.3048 return ht_m def ht_western_hemlock_wiley(si50_ft, tree_age, ytobh): """ Returns height in meters of western hemlock given the 50 year base site index height in feet, tree age, and years to breast height. Equation is from Wiley 1978 using formulation presented in Keyser 2008a. Keyser 2008a uses this equation in the Forest Vegetation Simulator (FVS) Pacific Northwest Coast (PN) variant. """ b0 = -1.7307 b1 = 0.1394 b2 = -0.0616 b3 = 0.0137 b4 = 0.00192 b5 = 0.00007 # calculate the breast height age age_bh = tree_age - ytobh Z = 2500 / (si50_ft - 4.5) ht_ft = (age_bh**2 / (b0 + (b1 * Z) + ((b2 + (b3 * Z)) * age_bh) + ((b4 + (b5 * Z)) * age_bh**2))) + 4.5 ht_m = ht_ft * 0.3048 return ht_m def ytobh_bruce(si50_ft): """ Returns the years required to reach breast height from equation 8 in Bruce (1981). Applies to Douglas Fir. """ ytobh = 13.25 - si50_ft / 20 return ytobh def ytobh_harrington(): """ 4.7 years required to reach breast height from Harrington and Gould 2010 "Growth of Western Redcedar and Yellow-Cedar" page 99 in PNW-GTR-828 (A Tale of Two Cedars...) """ return 4.7 def ytobh_thrower(si50_ft, yrst=4): """ Returns the years required to reach breast height from equation 10 in Thrower and Goudie (1992). yrst = year to stump height (0.3 m) Authors used average of 4 years for yrst. Can be different based on SI. """ ytobh = yrst + (98.97 * 1 / si50_ft) return ytobh def ytobh_mitchell(): """ Mitchell and Polsson 1988 calcualte a mean of 9.5 years to reach breast height for Western Redcedar. """ return 9.5 def ytobh_western_hemlock(): """ Hoyer and Swanzy 1986 report 4 and 5 years to breast height for western hemlock in coastal plots in the Pacfic Nortwest. Using the average of 4.5 years. """ return 4.5 def ytobh_nussbaum(si50_ft): """ Years to breast height for sitka spruce as reported in Nussbaum 1996. """ si50_m = si50_ft * 0.3048 return 11.7-(0.185 * si50_m)
{ "content_hash": "0189ad30371e7ca9c958e5482c9a0eb9", "timestamp": "", "source": "github", "line_count": 341, "max_line_length": 112, "avg_line_length": 32.34310850439883, "alnum_prop": 0.6315169099646387, "repo_name": "rmichie/PyScripts", "id": "ed9f4ef7837fe5d69a644a5fb3541aa466e723d0", "size": "11029", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tree_height_growth.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "267709" } ], "symlink_target": "" }
from .content_types import * from .csv import * from .dynamic import * from .expandable import * from .fields import *
{ "content_hash": "90d518339e78e0ad6be0951fe3876869", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 28, "avg_line_length": 23.8, "alnum_prop": 0.7394957983193278, "repo_name": "digitalocean/netbox", "id": "eacde0040a9eec185dbce537854cd22189ab3ba6", "size": "119", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "netbox/utilities/forms/fields/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "189339" }, { "name": "HTML", "bytes": "570800" }, { "name": "JavaScript", "bytes": "326125" }, { "name": "Python", "bytes": "1815170" }, { "name": "Shell", "bytes": "2786" } ], "symlink_target": "" }
/** * Listens for the app launching then creates the window. * * @see http://developer.chrome.com/apps/app.runtime.html * @see http://developer.chrome.com/apps/app.window.html */ chrome.app.runtime.onLaunched.addListener(function(launchData) { chrome.app.window.create( 'index.html', { frame: 'none', id: 'mainWindow', bounds: {width: 800, height: 600} } ); });
{ "content_hash": "9c0513b260e6b46c33c955de99dc9b69", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 64, "avg_line_length": 25.0625, "alnum_prop": 0.6458852867830424, "repo_name": "mtusk/super-soaper", "id": "376e177295c68d60434c57c95700c4fa1a0af20a", "size": "401", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SuperSoaper.App/background.js", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "3718" }, { "name": "CSS", "bytes": "158" }, { "name": "HTML", "bytes": "8889" }, { "name": "JavaScript", "bytes": "3903" } ], "symlink_target": "" }
set +e _term() { echo "caught signal" kill -TERM "$virt_launcher_pid" 2>/dev/null } trap _term SIGTERM SIGINT SIGQUIT # HACK # Try to create /dev/kvm if not present if [ ! -e /dev/kvm ]; then mknod /dev/kvm c 10 $(grep '\<kvm\>' /proc/misc | cut -f 1 -d' ') fi chown :qemu /dev/kvm chmod 660 /dev/kvm # Cockpit/OCP hack to all shoing the vm terminal mv /usr/bin/sh /usr/bin/sh.orig mv /sh.sh /usr/bin/sh chmod +x /usr/bin/sh ./virt-launcher $@ & virt_launcher_pid=$! while true; do if ! [ -d /proc/$virt_launcher_pid ]; then break; fi sleep 1 done # call wait after we know the pid has exited in order # to get the return code. If we call wait before the pid # exits, wait will actually return early when we forward # the trapped signal in _trap(). We don't want that. wait -n $virt_launcher_pid rc=$? echo "virt-launcher exited with code $rc" # if the qemu pid outlives virt-launcher because virt-launcher # segfaulted/panicked/etc... then make sure we perform a sane # shutdown of the qemu process before exitting. qemu_pid=$(pgrep -u qemu) if [ -n "$qemu_pid" ]; then echo "qemu pid outlived virt-launcher process. Sending SIGTERM" kill -SIGTERM $qemu_pid # give the pid 10 seconds to exit. for x in $(seq 1 10); do if ! [ -d /proc/$qemu_pid ]; then echo "qemu pid [$qemu_pid] exited after after SIGTERM" exit $rc fi echo "waiting for qemu pid [$qemu_pid] to exit" sleep 1 done # if we got here, the pid never exitted gracefully. echo "timed out waiting for qemu pid [$qemu_pid] to exit" fi exit $rc
{ "content_hash": "5c05eb071e6208c8ea829b41fcdc7523", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 68, "avg_line_length": 24.603174603174605, "alnum_prop": 0.6825806451612904, "repo_name": "fabiand/kubevirt", "id": "fe59d89eeaa2ebdb0db4305b11e6f53ece98dbf2", "size": "1562", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cmd/virt-launcher/entrypoint.sh", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "1329814" }, { "name": "Makefile", "bytes": "2140" }, { "name": "Shell", "bytes": "74887" } ], "symlink_target": "" }
set(PACKAGE_VERSION "6.700.4") # Check whether the requested PACKAGE_FIND_VERSION is compatible if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}") set(PACKAGE_VERSION_COMPATIBLE FALSE) else() set(PACKAGE_VERSION_COMPATIBLE TRUE) if ("${PACKAGE_VERSION}" VERSION_EQUAL "${PACKAGE_FIND_VERSION}") set(PACKAGE_VERSION_EXACT TRUE) endif() endif()
{ "content_hash": "d61cc64259af29e562d946e7d23ade30", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 67, "avg_line_length": 33.54545454545455, "alnum_prop": 0.7317073170731707, "repo_name": "KaimingOuyang/HPC-K-Means", "id": "19d634dd62ff2fbb2c52730167c44af90d118577", "size": "369", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "armadillo-6.700.4/build/InstallFiles/ArmadilloConfigVersion.cmake", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "609" }, { "name": "C", "bytes": "18803878" }, { "name": "C++", "bytes": "113300862" }, { "name": "CMake", "bytes": "433056" }, { "name": "Cuda", "bytes": "26751" }, { "name": "FORTRAN", "bytes": "394128" }, { "name": "Groff", "bytes": "677565" }, { "name": "HTML", "bytes": "1413540" }, { "name": "M4", "bytes": "1204" }, { "name": "Makefile", "bytes": "787790" }, { "name": "Matlab", "bytes": "17397" }, { "name": "Objective-C", "bytes": "847314" }, { "name": "Perl", "bytes": "11571" }, { "name": "Prolog", "bytes": "3949" }, { "name": "Python", "bytes": "26431" }, { "name": "Shell", "bytes": "79099" } ], "symlink_target": "" }
FROM balenalib/raspberrypi3-64-debian:bookworm-run # A few reasons for installing distribution-provided OpenJDK: # # 1. Oracle. Licensing prevents us from redistributing the official JDK. # # 2. Compiling OpenJDK also requires the JDK to be installed, and it gets # really hairy. # # For some sample build times, see Debian's buildd logs: # https://buildd.debian.org/status/logs.php?pkg=openjdk-8 RUN apt-get update && apt-get install -y --no-install-recommends \ bzip2 \ unzip \ xz-utils \ binutils \ fontconfig libfreetype6 \ ca-certificates p11-kit \ && rm -rf /var/lib/apt/lists/* ENV JAVA_HOME /usr/local/openjdk-17 ENV PATH $JAVA_HOME/bin:$PATH # Default to UTF-8 file.encoding ENV LANG C.UTF-8 RUN curl -SLO "https://download.java.net/java/GA/jdk17.0.1/2a2082e5a09d4267845be086888add4f/12/GPL/openjdk-17.0.1_linux-aarch64_bin.tar.gz" \ && echo "86653d48787e5a1c029df10da7808194fe8bd931ddd72ff3d42850bf1afb317e openjdk-17.0.1_linux-aarch64_bin.tar.gz" | sha256sum -c - \ && mkdir -p "$JAVA_HOME" \ && tar --extract \ --file openjdk-17.0.1_linux-aarch64_bin.tar.gz \ --directory "$JAVA_HOME" \ --strip-components 1 \ --no-same-owner \ && rm -f openjdk-17.0.1_linux-aarch64_bin.tar.gz \ && { \ echo '#!/usr/bin/env bash'; \ echo 'set -Eeuo pipefail'; \ echo 'trust extract --overwrite --format=java-cacerts --filter=ca-anchors --purpose=server-auth "$JAVA_HOME/lib/security/cacerts"'; \ } > /etc/ca-certificates/update.d/docker-openjdk \ && chmod +x /etc/ca-certificates/update.d/docker-openjdk \ && /etc/ca-certificates/update.d/docker-openjdk \ && find "$JAVA_HOME/lib" -name '*.so' -exec dirname '{}' ';' | sort -u > /etc/ld.so.conf.d/docker-openjdk.conf \ && ldconfig \ && java -Xshare:dump \ && fileEncoding="$(echo 'System.out.println(System.getProperty("file.encoding"))' | jshell -s -)"; [ "$fileEncoding" = 'UTF-8' ]; rm -rf ~/.java \ && javac --version \ && java --version CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Debian Bookworm \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nOpenJDK v17-jdk \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
{ "content_hash": "cbad7465d78ac3ebe99cdb541b4144ca", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 679, "avg_line_length": 51.810344827586206, "alnum_prop": 0.7008319467554076, "repo_name": "resin-io-library/base-images", "id": "656727830c0a12258f928ddfbbfba34edf72751f", "size": "3026", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/openjdk/raspberrypi3-64/debian/bookworm/17-jdk/run/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "71234697" }, { "name": "JavaScript", "bytes": "13096" }, { "name": "Shell", "bytes": "12051936" }, { "name": "Smarty", "bytes": "59789" } ], "symlink_target": "" }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chromeos/dbus/smb_provider_client.h" #include <memory> #include "base/bind.h" #include "base/bind_helpers.h" #include "base/files/file_util.h" #include "base/memory/weak_ptr.h" #include "dbus/bus.h" #include "dbus/message.h" #include "dbus/object_proxy.h" namespace chromeos { namespace { smbprovider::ErrorType GetErrorFromReader(dbus::MessageReader* reader) { int32_t int_error; if (!reader->PopInt32(&int_error) || !smbprovider::ErrorType_IsValid(int_error)) { DLOG(ERROR) << "SmbProviderClient: Failed to get an error from the response"; return smbprovider::ERROR_DBUS_PARSE_FAILED; } return static_cast<smbprovider::ErrorType>(int_error); } smbprovider::ErrorType GetErrorAndProto( dbus::Response* response, google::protobuf::MessageLite* protobuf_out) { if (!response) { DLOG(ERROR) << "Failed to call smbprovider"; return smbprovider::ERROR_DBUS_PARSE_FAILED; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); if (error != smbprovider::ERROR_OK) { return error; } if (!reader.PopArrayOfBytesAsProto(protobuf_out)) { DLOG(ERROR) << "Failed to parse protobuf."; return smbprovider::ERROR_DBUS_PARSE_FAILED; } return smbprovider::ERROR_OK; } bool ParseDeleteList(const base::ScopedFD& fd, int32_t bytes_written, smbprovider::DeleteListProto* delete_list) { DCHECK(delete_list); std::vector<uint8_t> buffer(bytes_written); return base::ReadFromFD(fd.get(), reinterpret_cast<char*>(buffer.data()), buffer.size()) && delete_list->ParseFromArray(buffer.data(), buffer.size()); } std::unique_ptr<smbprovider::MountConfigProto> CreateMountConfigProto( bool enable_ntlm) { auto mount_config = std::make_unique<smbprovider::MountConfigProto>(); mount_config->set_enable_ntlm(enable_ntlm); return mount_config; } class SmbProviderClientImpl : public SmbProviderClient { public: SmbProviderClientImpl() = default; ~SmbProviderClientImpl() override {} void Mount(const base::FilePath& share_path, const MountOptions& options, base::ScopedFD password_fd, MountCallback callback) override { smbprovider::MountOptionsProto options_proto; options_proto.set_path(share_path.value()); options_proto.set_original_path(options.original_path); options_proto.set_workgroup(options.workgroup); options_proto.set_username(options.username); options_proto.set_skip_connect(options.skip_connect); options_proto.set_account_hash(options.account_hash); options_proto.set_save_password(options.save_password); options_proto.set_restore_password(options.restore_password); std::unique_ptr<smbprovider::MountConfigProto> config = CreateMountConfigProto(options.ntlm_enabled); options_proto.set_allocated_mount_config(config.release()); dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kMountMethod); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(options_proto); writer.AppendFileDescriptor(password_fd.get()); CallMethod(&method_call, &SmbProviderClientImpl::HandleMountCallback, &callback); } void Unmount(int32_t mount_id, bool remove_password, StatusCallback callback) override { smbprovider::UnmountOptionsProto options; options.set_mount_id(mount_id); options.set_remove_password(remove_password); CallDefaultMethod(smbprovider::kUnmountMethod, options, &callback); } void ReadDirectory(int32_t mount_id, const base::FilePath& directory_path, ReadDirectoryCallback callback) override { smbprovider::ReadDirectoryOptionsProto options; options.set_mount_id(mount_id); options.set_directory_path(directory_path.value()); CallMethod(smbprovider::kReadDirectoryMethod, options, &SmbProviderClientImpl::HandleProtoCallback< smbprovider::DirectoryEntryListProto>, &callback); } void GetMetadataEntry(int32_t mount_id, const base::FilePath& entry_path, GetMetdataEntryCallback callback) override { smbprovider::GetMetadataEntryOptionsProto options; options.set_mount_id(mount_id); options.set_entry_path(entry_path.value()); CallMethod(smbprovider::kGetMetadataEntryMethod, options, &SmbProviderClientImpl::HandleProtoCallback< smbprovider::DirectoryEntryProto>, &callback); } void OpenFile(int32_t mount_id, const base::FilePath& file_path, bool writeable, OpenFileCallback callback) override { smbprovider::OpenFileOptionsProto options; options.set_mount_id(mount_id); options.set_file_path(file_path.value()); options.set_writeable(writeable); CallMethod(smbprovider::kOpenFileMethod, options, &SmbProviderClientImpl::HandleOpenFileCallback, &callback); } void CloseFile(int32_t mount_id, int32_t file_id, StatusCallback callback) override { smbprovider::CloseFileOptionsProto options; options.set_mount_id(mount_id); options.set_file_id(file_id); CallDefaultMethod(smbprovider::kCloseFileMethod, options, &callback); } void ReadFile(int32_t mount_id, int32_t file_id, int64_t offset, int32_t length, ReadFileCallback callback) override { smbprovider::ReadFileOptionsProto options; options.set_mount_id(mount_id); options.set_file_id(file_id); options.set_offset(offset); options.set_length(length); CallMethod(smbprovider::kReadFileMethod, options, &SmbProviderClientImpl::HandleReadFileCallback, &callback); } void DeleteEntry(int32_t mount_id, const base::FilePath& entry_path, bool recursive, StatusCallback callback) override { smbprovider::DeleteEntryOptionsProto options; options.set_mount_id(mount_id); options.set_entry_path(entry_path.value()); options.set_recursive(recursive); CallDefaultMethod(smbprovider::kDeleteEntryMethod, options, &callback); } void CreateFile(int32_t mount_id, const base::FilePath& file_path, StatusCallback callback) override { smbprovider::CreateFileOptionsProto options; options.set_mount_id(mount_id); options.set_file_path(file_path.value()); CallDefaultMethod(smbprovider::kCreateFileMethod, options, &callback); } void Truncate(int32_t mount_id, const base::FilePath& file_path, int64_t length, StatusCallback callback) override { smbprovider::TruncateOptionsProto options; options.set_mount_id(mount_id); options.set_file_path(file_path.value()); options.set_length(length); CallDefaultMethod(smbprovider::kTruncateMethod, options, &callback); } void WriteFile(int32_t mount_id, int32_t file_id, int64_t offset, int32_t length, base::ScopedFD temp_fd, StatusCallback callback) override { smbprovider::WriteFileOptionsProto options; options.set_mount_id(mount_id); options.set_file_id(file_id); options.set_offset(offset); options.set_length(length); dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kWriteFileMethod); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(options); writer.AppendFileDescriptor(temp_fd.get()); CallDefaultMethod(&method_call, &callback); } void CreateDirectory(int32_t mount_id, const base::FilePath& directory_path, bool recursive, StatusCallback callback) override { smbprovider::CreateDirectoryOptionsProto options; options.set_mount_id(mount_id); options.set_directory_path(directory_path.value()); options.set_recursive(recursive); CallDefaultMethod(smbprovider::kCreateDirectoryMethod, options, &callback); } void MoveEntry(int32_t mount_id, const base::FilePath& source_path, const base::FilePath& target_path, StatusCallback callback) override { smbprovider::MoveEntryOptionsProto options; options.set_mount_id(mount_id); options.set_source_path(source_path.value()); options.set_target_path(target_path.value()); CallDefaultMethod(smbprovider::kMoveEntryMethod, options, &callback); } void CopyEntry(int32_t mount_id, const base::FilePath& source_path, const base::FilePath& target_path, StatusCallback callback) override { smbprovider::CopyEntryOptionsProto options; options.set_mount_id(mount_id); options.set_source_path(source_path.value()); options.set_target_path(target_path.value()); CallCopyEntryMethod(options, std::move(callback)); } void GetDeleteList(int32_t mount_id, const base::FilePath& entry_path, GetDeleteListCallback callback) override { smbprovider::GetDeleteListOptionsProto options; options.set_mount_id(mount_id); options.set_entry_path(entry_path.value()); CallMethod(smbprovider::kGetDeleteListMethod, options, &SmbProviderClientImpl::HandleGetDeleteListCallback, &callback); } void GetShares(const base::FilePath& server_url, ReadDirectoryCallback callback) override { smbprovider::GetSharesOptionsProto options; options.set_server_url(server_url.value()); CallMethod(smbprovider::kGetSharesMethod, options, &SmbProviderClientImpl::HandleProtoCallback< smbprovider::DirectoryEntryListProto>, &callback); } void SetupKerberos(const std::string& account_id, SetupKerberosCallback callback) override { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kSetupKerberosMethod); dbus::MessageWriter writer(&method_call); writer.AppendString(account_id); CallMethod(&method_call, &SmbProviderClientImpl::HandleSetupKerberosCallback, &callback); } void ParseNetBiosPacket(const std::vector<uint8_t>& packet, uint16_t transaction_id, ParseNetBiosPacketCallback callback) override { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kParseNetBiosPacketMethod); dbus::MessageWriter writer(&method_call); writer.AppendArrayOfBytes(packet.data(), packet.size()); writer.AppendUint16(transaction_id); CallMethod(&method_call, &SmbProviderClientImpl::HandleParseNetBiosPacketCallback, &callback); } void StartCopy(int32_t mount_id, const base::FilePath& source_path, const base::FilePath& target_path, StartCopyCallback callback) override { smbprovider::CopyEntryOptionsProto options; options.set_mount_id(mount_id); options.set_source_path(source_path.value()); options.set_target_path(target_path.value()); CallMethod(smbprovider::kStartCopyMethod, options, &SmbProviderClientImpl::HandleStartCopyCallback, &callback); } void ContinueCopy(int32_t mount_id, int32_t copy_token, StatusCallback callback) override { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kContinueCopyMethod); dbus::MessageWriter writer(&method_call); writer.AppendInt32(mount_id); writer.AppendInt32(copy_token); CallDefaultMethod(&method_call, &callback); } void StartReadDirectory(int32_t mount_id, const base::FilePath& directory_path, StartReadDirectoryCallback callback) override { smbprovider::ReadDirectoryOptionsProto options; options.set_mount_id(mount_id); options.set_directory_path(directory_path.value()); CallMethod(smbprovider::kStartReadDirectoryMethod, options, &SmbProviderClientImpl::HandleStartReadDirectoryCallback, &callback); } void ContinueReadDirectory(int32_t mount_id, int32_t read_dir_token, ReadDirectoryCallback callback) override { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kContinueReadDirectoryMethod); dbus::MessageWriter writer(&method_call); writer.AppendInt32(mount_id); writer.AppendInt32(read_dir_token); CallMethod(&method_call, &SmbProviderClientImpl::HandleContinueReadDirectoryCallback, &callback); } void UpdateMountCredentials(int32_t mount_id, std::string workgroup, std::string username, base::ScopedFD password_fd, StatusCallback callback) override { smbprovider::UpdateMountCredentialsOptionsProto options; options.set_mount_id(mount_id); options.set_workgroup(workgroup); options.set_username(username); dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kUpdateMountCredentialsMethod); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(options); writer.AppendFileDescriptor(password_fd.get()); CallDefaultMethod(&method_call, &callback); } void UpdateSharePath(int32_t mount_id, const std::string& share_path, StatusCallback callback) override { smbprovider::UpdateSharePathOptionsProto options; options.set_mount_id(mount_id); options.set_path(share_path); dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kUpdateSharePathMethod); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(options); CallDefaultMethod(&method_call, &callback); } protected: // DBusClient override. void Init(dbus::Bus* bus) override { proxy_ = bus->GetObjectProxy( smbprovider::kSmbProviderServiceName, dbus::ObjectPath(smbprovider::kSmbProviderServicePath)); DCHECK(proxy_); } private: // Calls the DBUS method |name|, passing the |protobuf| as an argument. // |handler| is the member function in this class that receives // the response and then passes the processed response to |callback|. template <typename CallbackHandler, typename Callback> void CallMethod(const char* name, const google::protobuf::MessageLite& protobuf, CallbackHandler handler, Callback callback) { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, name); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(protobuf); CallMethod(&method_call, handler, callback); } // Calls the method specified in |method_call|. |handler| is the member // function in this class that receives the response and then passes the // processed response to |callback|. template <typename CallbackHandler, typename Callback> void CallMethod(dbus::MethodCall* method_call, CallbackHandler handler, Callback callback) { proxy_->CallMethod( method_call, dbus::ObjectProxy::TIMEOUT_USE_DEFAULT, base::BindOnce(handler, GetWeakPtr(), std::move(*callback))); } // Calls the D-Bus method |name|, passing the |protobuf| as an argument. // Uses the default callback handler to process |callback|. template <typename Callback> void CallDefaultMethod(const char* name, const google::protobuf::MessageLite& protobuf, Callback callback) { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, name); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(protobuf); CallDefaultMethod(&method_call, callback); } // Calls the method specified in |method_call|. Uses the default callback // handler to process |callback|. template <typename Callback> void CallDefaultMethod(dbus::MethodCall* method_call, Callback callback) { proxy_->CallMethod( method_call, dbus::ObjectProxy::TIMEOUT_USE_DEFAULT, base::BindOnce(&SmbProviderClientImpl::HandleDefaultCallback, GetWeakPtr(), method_call->GetMember(), std::move(*callback))); } // Calls the CopyEntry D-Bus method with no timeout, passing the |protobuf| as // an argument. Uses the default callback handler to process |callback|. void CallCopyEntryMethod(const google::protobuf::MessageLite& protobuf, StatusCallback callback) { dbus::MethodCall method_call(smbprovider::kSmbProviderInterface, smbprovider::kCopyEntryMethod); dbus::MessageWriter writer(&method_call); writer.AppendProtoAsArrayOfBytes(protobuf); proxy_->CallMethod( &method_call, dbus::ObjectProxy::TIMEOUT_INFINITE, base::BindOnce(&SmbProviderClientImpl::HandleDefaultCallback, GetWeakPtr(), method_call.GetMember(), std::move(callback))); } // Handles D-Bus callback for mount. void HandleMountCallback(MountCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "Mount: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); if (error != smbprovider::ERROR_OK) { std::move(callback).Run(error, -1); return; } int32_t mount_id = -1; if (!reader.PopInt32(&mount_id) || mount_id < 0) { LOG(ERROR) << "Mount: failed to parse mount id"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1); return; } std::move(callback).Run(smbprovider::ERROR_OK, mount_id); } // Handles D-Bus callback for OpenFile. void HandleOpenFileCallback(OpenFileCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "OpenFile: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); if (error != smbprovider::ERROR_OK) { std::move(callback).Run(error, -1); return; } int32_t file_id = -1; if (!reader.PopInt32(&file_id) || file_id < 0) { LOG(ERROR) << "OpenFile: failed to parse mount id"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1); return; } std::move(callback).Run(smbprovider::ERROR_OK, file_id); } // Handles D-Bus callback for ReadFile. void HandleReadFileCallback(ReadFileCallback callback, dbus::Response* response) { base::ScopedFD fd; if (!response) { LOG(ERROR) << "ReadFile: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, fd); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); if (error != smbprovider::ERROR_OK) { std::move(callback).Run(error, fd); return; } if (!reader.PopFileDescriptor(&fd)) { LOG(ERROR) << "ReadFile: failed to parse file descriptor"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, fd); return; } std::move(callback).Run(smbprovider::ERROR_OK, fd); } // Handles D-Bus callback for GetDeleteList. void HandleGetDeleteListCallback(GetDeleteListCallback callback, dbus::Response* response) { base::ScopedFD fd; smbprovider::DeleteListProto delete_list; if (!response) { LOG(ERROR) << "GetDeleteList: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, delete_list); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); if (error != smbprovider::ERROR_OK) { std::move(callback).Run(error, delete_list); return; } int32_t bytes_written; bool success = reader.PopFileDescriptor(&fd) && reader.PopInt32(&bytes_written) && ParseDeleteList(fd, bytes_written, &delete_list); if (!success) { LOG(ERROR) << "GetDeleteList: parse failure."; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, delete_list); return; } std::move(callback).Run(smbprovider::ERROR_OK, delete_list); } // Handles D-Bus callback for SetupKerberos. void HandleSetupKerberosCallback(SetupKerberosCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "SetupKerberos: failed to call smbprovider"; std::move(callback).Run(false /* success */); return; } dbus::MessageReader reader(response); bool result; if (!reader.PopBool(&result)) { LOG(ERROR) << "SetupKerberos: parse failure."; std::move(callback).Run(false /* success */); return; } std::move(callback).Run(result); } void HandleParseNetBiosPacketCallback(ParseNetBiosPacketCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "ParseNetBiosPacket: failed to call smbprovider"; std::move(callback).Run(std::vector<std::string>()); return; } dbus::MessageReader reader(response); smbprovider::HostnamesProto proto; if (!reader.PopArrayOfBytesAsProto(&proto)) { LOG(ERROR) << "ParseNetBiosPacket: Failed to parse protobuf."; std::move(callback).Run(std::vector<std::string>()); return; } std::vector<std::string> hostnames(proto.hostnames().begin(), proto.hostnames().end()); std::move(callback).Run(hostnames); } void HandleStartCopyCallback(StartCopyCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "StartCopy: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1 /* copy_token */); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); int32_t copy_token; if (!reader.PopInt32(&copy_token)) { LOG(ERROR) << "StartCopy: parse failure."; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1 /* copy_token*/); } if (error != smbprovider::ERROR_COPY_PENDING) { std::move(callback).Run(error, -1 /* copy_token */); return; } std::move(callback).Run(smbprovider::ERROR_COPY_PENDING, copy_token); } void HandleStartReadDirectoryCallback(StartReadDirectoryCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "StartReadDirectory: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1 /* read_dir_token */, smbprovider::DirectoryEntryListProto()); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); smbprovider::DirectoryEntryListProto entries; int32_t read_dir_token; if (!reader.PopArrayOfBytesAsProto(&entries) || !reader.PopInt32(&read_dir_token)) { LOG(ERROR) << "StartReadDirectory: Failed to parse protobuf."; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, -1 /* read_dir_token */, smbprovider::DirectoryEntryListProto()); return; } std::move(callback).Run(error, read_dir_token, entries); } void HandleContinueReadDirectoryCallback(ReadDirectoryCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << "ContinueReadDirectory: failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, smbprovider::DirectoryEntryListProto()); return; } dbus::MessageReader reader(response); smbprovider::ErrorType error = GetErrorFromReader(&reader); smbprovider::DirectoryEntryListProto entries; if (!reader.PopArrayOfBytesAsProto(&entries)) { LOG(ERROR) << "ContinueReadDirectory: Failed to parse protobuf."; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED, smbprovider::DirectoryEntryListProto()); return; } std::move(callback).Run(error, entries); } // Default callback handler for D-Bus calls. void HandleDefaultCallback(const std::string& method_name, StatusCallback callback, dbus::Response* response) { if (!response) { LOG(ERROR) << method_name << ": failed to call smbprovider"; std::move(callback).Run(smbprovider::ERROR_DBUS_PARSE_FAILED); return; } dbus::MessageReader reader(response); std::move(callback).Run(GetErrorFromReader(&reader)); } // Handles D-Bus responses for methods that return an error and a protobuf // object. template <class T> void HandleProtoCallback(base::OnceCallback<void(smbprovider::ErrorType error, const T& response)> callback, dbus::Response* response) { T proto; smbprovider::ErrorType error(GetErrorAndProto(response, &proto)); std::move(callback).Run(error, proto); } base::WeakPtr<SmbProviderClientImpl> GetWeakPtr() { return base::AsWeakPtr(this); } dbus::ObjectProxy* proxy_ = nullptr; DISALLOW_COPY_AND_ASSIGN(SmbProviderClientImpl); }; } // namespace SmbProviderClient::MountOptions::MountOptions() = default; SmbProviderClient::MountOptions::~MountOptions() = default; SmbProviderClient::SmbProviderClient() = default; SmbProviderClient::~SmbProviderClient() = default; // static std::unique_ptr<SmbProviderClient> SmbProviderClient::Create() { return std::make_unique<SmbProviderClientImpl>(); } } // namespace chromeos
{ "content_hash": "69ced9f5d1fe72f017f468b6c48e6e8f", "timestamp": "", "source": "github", "line_count": 724, "max_line_length": 80, "avg_line_length": 37.908839779005525, "alnum_prop": 0.6489834584274575, "repo_name": "endlessm/chromium-browser", "id": "f54eb833621c57ddda2122d5319adcf0008f1db9", "size": "27446", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chromeos/dbus/smb_provider_client.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }