text
stringlengths
2
1.04M
meta
dict
package org.springframework.boot.configurationprocessor; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.TypeMirror; /** * A {@link PropertyDescriptor} for a standard JavaBean property. * * @author Stephane Nicoll */ class JavaBeanPropertyDescriptor extends PropertyDescriptor<ExecutableElement> { JavaBeanPropertyDescriptor(TypeElement ownerElement, ExecutableElement factoryMethod, ExecutableElement getter, String name, TypeMirror type, VariableElement field, ExecutableElement setter) { super(ownerElement, factoryMethod, getter, name, type, field, getter, setter); } @Override protected boolean isProperty(MetadataGenerationEnvironment env) { boolean isCollection = env.getTypeUtils().isCollectionOrMap(getType()); return !env.isExcluded(getType()) && getGetter() != null && (getSetter() != null || isCollection); } @Override protected Object resolveDefaultValue(MetadataGenerationEnvironment environment) { return environment.getFieldDefaultValue(getOwnerElement(), getName()); } }
{ "content_hash": "93c8b726044f140775c1176a6edfa371", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 86, "avg_line_length": 32.91428571428571, "alnum_prop": 0.7916666666666666, "repo_name": "lburgazzoli/spring-boot", "id": "80d21421f3a09db3d67f82e862dd21fadeaf52d0", "size": "1773", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "spring-boot-project/spring-boot-tools/spring-boot-configuration-processor/src/main/java/org/springframework/boot/configurationprocessor/JavaBeanPropertyDescriptor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "6954" }, { "name": "CSS", "bytes": "5769" }, { "name": "FreeMarker", "bytes": "2134" }, { "name": "Groovy", "bytes": "49512" }, { "name": "HTML", "bytes": "69689" }, { "name": "Java", "bytes": "11602150" }, { "name": "JavaScript", "bytes": "37789" }, { "name": "Ruby", "bytes": "1307" }, { "name": "Shell", "bytes": "27916" }, { "name": "Smarty", "bytes": "3276" }, { "name": "XSLT", "bytes": "34105" } ], "symlink_target": "" }
FOUNDATION_EXPORT double Pods_DESCrypt_TestsVersionNumber; FOUNDATION_EXPORT const unsigned char Pods_DESCrypt_TestsVersionString[];
{ "content_hash": "2e3d8c13d466c9e8260d50c380df8785", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 73, "avg_line_length": 44.666666666666664, "alnum_prop": 0.8582089552238806, "repo_name": "MaximKeegan/DESCrypt", "id": "f2f3070cd42f4623d147c2cd8d281a6e4c71be9b", "size": "160", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Example/Pods/Target Support Files/Pods-DESCrypt_Tests/Pods-DESCrypt_Tests-umbrella.h", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "5158" }, { "name": "Objective-C", "bytes": "115905" }, { "name": "Objective-C++", "bytes": "1353" }, { "name": "Ruby", "bytes": "1646" }, { "name": "Shell", "bytes": "14624" } ], "symlink_target": "" }
// // inAppPurchaseView.m // BlackBook // // Created by zaahtechnologiesindiapvt on 11/25/11. // Copyright 2011 __MyCompanyName__. All rights reserved. // #import "inAppPurchaseView.h" #import "epubstore_svcAppDelegate.h" @implementation inAppPurchaseView @synthesize _delegate; - (id)initWithFrame:(CGRect)frame { NSLog(@"iniT"); self = [super initWithFrame:CGRectMake(0, 0, frame.size.width, frame.size.height)]; if (self) { // Initialization code. self.backgroundColor=[UIColor blackColor]; self.alpha = 0.6; UIActivityIndicatorView *loader=[[UIActivityIndicatorView alloc]initWithFrame:CGRectMake(0, 0, 80, 80)]; loader.center=self.center; loader.activityIndicatorViewStyle=UIActivityIndicatorViewStyleWhiteLarge; [self addSubview:loader]; [loader startAnimating]; //[[SKPaymentQueue defaultQueue] addTransactionObserver:self]; self.exclusiveTouch=TRUE; } return self; } -(void)purchaseSelectedItem:(NSString *)productId { NSLog(@"purchSelITEM"); kInAppPurchaseProUpgradeProductId=@"FlickFootBall01"; // [self requestProUpgradeProductData]; [self loadStore]; /// [self.superview setUserInteractionEnabled:FALSE]; } //.................In-App purchase Code........Starts here................................ - (void)requestProUpgradeProductData { NSLog(@"Requesting %@ Purchase",kInAppPurchaseProUpgradeProductId); NSSet *productIdentifiers = [NSSet setWithObject:kInAppPurchaseProUpgradeProductId ]; productsRequest = [[SKProductsRequest alloc] initWithProductIdentifiers:productIdentifiers]; productsRequest.delegate = self; [productsRequest start]; // we will release the request object in the delegate callback } #pragma mark - #pragma mark SKProductsRequestDelegate methods - (void)productsRequest:(SKProductsRequest *)request didReceiveResponse:(SKProductsResponse *)response { NSLog(@"proReqqq"); NSArray *products = response.products; proUpgradeProduct = [products count] == 1 ? [[products firstObject] retain] : nil; if (proUpgradeProduct) { NSLog(@"Product title: %@" , proUpgradeProduct.localizedTitle); NSLog(@"Product description: %@" , proUpgradeProduct.localizedDescription); NSLog(@"Product price: %@" , proUpgradeProduct.price); NSLog(@"Product id: %@" , proUpgradeProduct.productIdentifier); //UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"title:%@, desc:%@, price:%@, id:%@",proUpgradeProduct.localizedTitle,proUpgradeProduct.localizedDescription,proUpgradeProduct.price,proUpgradeProduct.productIdentifier] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; //UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:@"ERROR" delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; //[alert show]; //[alert release]; if([self canMakePurchases]) [self purchaseProUpgrade]; } for (NSString *invalidProductId in response.invalidProductIdentifiers) { NSLog(@"Invalid product id: %@" , invalidProductId); //UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"invalid product id ! : %@",invalidProductId] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:@"ERROR" delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; [alert show]; [alert release]; /// [self.superview setUserInteractionEnabled:TRUE]; // [_delegate deactivatePurchaseMode]; [[SKPaymentQueue defaultQueue] removeTransactionObserver:self]; [self removeFromSuperview]; } // finally release the reqest we alloc/init’ed in requestProUpgradeProductData //// [productsRequest release]; //[[NSNotificationCenter defaultCenter] postNotificationName:kInAppPurchaseManagerProductsFetchedNotification object:self userInfo:nil]; } - (void)request:(SKRequest *)request didFailWithError:(NSError *)error { NSLog(@"reqFailed"); UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"ERROR :1 %@" , [error description] ] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; [alert show]; [alert release]; //// [self.superview setUserInteractionEnabled:TRUE]; // [_delegate deactivatePurchaseMode]; [[SKPaymentQueue defaultQueue] removeTransactionObserver:self]; [self removeFromSuperview]; } - (void)requestDidFinish:(SKRequest *)request { } #pragma - #pragma Public methods // // call this method once on startup // - (void)loadStore { NSLog(@"loadSt"); // restarts any purchases if they were interrupted last time the app was open // get the product description (defined in early sections) [self requestProUpgradeProductData]; } // // call this before making a purchase // - (BOOL)canMakePurchases { NSLog(@"CanMake"); return [SKPaymentQueue canMakePayments]; } // // kick off the upgrade transaction // - (void)purchaseProUpgrade { NSLog(@"purchasePro"); NSLog(@"productId===%@",kInAppPurchaseProUpgradeProductId); [[SKPaymentQueue defaultQueue] addTransactionObserver:self]; SKPayment *payment = [SKPayment paymentWithProductIdentifier:kInAppPurchaseProUpgradeProductId]; [[SKPaymentQueue defaultQueue] addPayment:payment]; } #pragma - #pragma Purchase helpers // // saves a record of the transaction by storing the receipt to disk // - (void)recordTransaction:(SKPaymentTransaction *)transaction { NSLog(@"recordT"); if ([transaction.payment.productIdentifier isEqualToString:kInAppPurchaseProUpgradeProductId]) { // save the transaction receipt to disk [[NSUserDefaults standardUserDefaults] setValue:transaction.transactionReceipt forKey:@"proUpgradeTransactionReceipt" ]; [[NSUserDefaults standardUserDefaults] synchronize]; } } // // enable pro features // - (void)provideContent:(NSString *)productId { NSLog(@"ProvidCont"); // UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"transaction success : %@",productId] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; // //[alert show]; // [alert release]; epubstore_svcAppDelegate *delg = (epubstore_svcAppDelegate*)[UIApplication sharedApplication].delegate; if(delg.gameController.isProVersion == TRUE)return; NSLog(@"Product purchased..."); if ([productId isEqualToString:kInAppPurchaseProUpgradeProductId]) { // enable the pro features UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"Successfully Purchased. Thank you!"] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; [alert show]; [alert release]; [[NSUserDefaults standardUserDefaults] setBool:YES forKey:@"isProUpgradePurchased" ]; [[NSUserDefaults standardUserDefaults] synchronize]; //TBONEAppDelegate *delg = (TBONEAppDelegate*)[UIApplication sharedApplication].delegate; delg.gameController.isProVersion = TRUE; purchasedprd=productId; //// [self.superview setUserInteractionEnabled:TRUE]; [_delegate updatePurchasedProduct:kInAppPurchaseProUpgradeProductId]; [delg.gameController hideButton]; } [[SKPaymentQueue defaultQueue] removeTransactionObserver:self]; [self removeFromSuperview]; } -(void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex{ } - (void)finishTransaction:(SKPaymentTransaction *)transaction wasSuccessful:(BOOL)wasSuccessful { NSLog(@"finishT"); // remove the transaction from the payment queue. [[SKPaymentQueue defaultQueue] finishTransaction:transaction]; // NSDictionary *userInfo = [NSDictionary dictionaryWithObjectsAndKeys:transaction, @"transaction" , nil]; if (wasSuccessful) { // send out a notification that we’ve finished the transaction //[[NSNotificationCenter defaultCenter] postNotificationName:kInAppPurchaseManagerTransactionSucceededNotification object:self userInfo:userInfo]; // UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"transaction success : %@",transaction.payment.productIdentifier ] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; // [alert show]; //[alert release]; } else { // send out a notification for the failed transaction // [[NSNotificationCenter defaultCenter] postNotificationName:kInAppPurchaseManagerTransactionFailedNotification object:self userInfo:userInfo]; UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"transaction failed : %@",transaction.payment.productIdentifier ] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; [alert show]; [alert release]; // [self removeFromSuperview]; } //// [self.superview setUserInteractionEnabled:TRUE]; /// [_delegate deactivatePurchaseMode]; // [self removeFromSuperview]; } // // called when the transaction was successful // - (void)completeTransaction:(SKPaymentTransaction *)transaction { NSLog(@"compleT"); [self recordTransaction:transaction]; [self provideContent:transaction.payment.productIdentifier]; // [self finishTransaction:transaction wasSuccessful:YES]; } // // called when a transaction has been restored and and successfully completed // - (void)restoreTransaction:(SKPaymentTransaction *)transaction { NSLog(@"restT"); [self recordTransaction:transaction.originalTransaction]; [self provideContent:transaction.originalTransaction.payment.productIdentifier]; [self finishTransaction:transaction wasSuccessful:YES]; } // // called when a transaction has failed // - (void)failedTransaction:(SKPaymentTransaction *)transaction { NSLog(@"failedT"); if (transaction.error.code != SKErrorPaymentCancelled) { UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil message:[NSString stringWithFormat:@"%@",[transaction.error description] ] delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil]; [alert show]; [alert release]; [self finishTransaction:transaction wasSuccessful:NO]; } else { // this is fine, the user just cancelled, so don’t notify [[SKPaymentQueue defaultQueue] finishTransaction:transaction]; NSLog(@"user Cancelled request...."); } //// [self.superview setUserInteractionEnabled:TRUE]; // [_delegate deactivatePurchaseMode]; [[SKPaymentQueue defaultQueue] removeTransactionObserver:self]; [self removeFromSuperview]; } #pragma mark - #pragma mark SKPaymentTransactionObserver methods // // called when the transaction status is updated // - (void)paymentQueue:(SKPaymentQueue *)queue updatedTransactions:(NSArray *)transactions { NSLog(@"pq-updT"); for (SKPaymentTransaction *transaction in transactions) { switch (transaction.transactionState) { case SKPaymentTransactionStatePurchased: [self completeTransaction:transaction]; break; case SKPaymentTransactionStateFailed: [self failedTransaction:transaction]; break; case SKPaymentTransactionStateRestored: [self restoreTransaction:transaction]; break; default: break; } } } // Sent when transactions are removed from the queue (via finishTransaction:). - (void)paymentQueue:(SKPaymentQueue *)queue removedTransactions:(NSArray *)transactions { NSLog(@"pq-rT"); } // Sent when an error is encountered while adding transactions from the user's purchase history back to the queue. - (void)paymentQueue:(SKPaymentQueue *)queue restoreCompletedTransactionsFailedWithError:(NSError *)error { NSLog(@"pq-fT"); } // Sent when all transactions from the user's purchase history have successfully been added back to the queue. - (void)paymentQueueRestoreCompletedTransactionsFinished:(SKPaymentQueue *)queue { NSLog(@"pq-cT"); } // // removes the transaction from the queue and posts a notification with the transaction result // /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code. } */ - (void)dealloc { [super dealloc]; [[SKPaymentQueue defaultQueue] removeTransactionObserver:self]; // [productsRequest release]; // _delegate=nil; } @end
{ "content_hash": "7c2c3087fded70e871bcf0364c9f0c19", "timestamp": "", "source": "github", "line_count": 378, "max_line_length": 332, "avg_line_length": 33.21957671957672, "alnum_prop": 0.7330572588994186, "repo_name": "KurianSaji/DASHBOARD---GSC", "id": "3a8d3fbe816b080f50630ea6fcc97eead2876e0c", "size": "12563", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Classes/inAppPurchaseView.m", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "167662" }, { "name": "C++", "bytes": "735" }, { "name": "Objective-C", "bytes": "2505278" }, { "name": "Perl", "bytes": "54" } ], "symlink_target": "" }
package iavanish.collegepal.CommonClasses; import iavanish.collegepal.Courses.Feedback; import iavanish.collegepal.Deadlines.Deadline; import iavanish.collegepal.DiscussionForum.Comment; import iavanish.collegepal.DiscussionForum.Discussion; import iavanish.collegepal.Resource.Resource; /** * * @author himanshu */ /** * * All the database write operations occur here */ public class DataBaseWrite extends DataBaseRead { public DataBaseWrite() { super(); } public boolean enterDeadline(Deadline deadline) { return false; } public boolean updateDeadline(Deadline deadline) { return false; } public boolean updateCourse(Course course) { return false; } public boolean deleteCourse(Course course) { return false; } public boolean registerFeedback(Feedback feedback) { return false; } public boolean selectCourse(Student student, Course course) { return false; } public boolean leaveCourse(Student student, Course course) { return false; } public boolean newDiscussion(Discussion discussion) { return false; } public boolean newComment(Comment comment) { return false; } public boolean voteOnDiscussion(Discussion discussion, Student student, int vote) { return false; } public boolean voteOnComment(Comment comment, Student student, int vote) { return false; } public boolean deleteDiscussion(Discussion discussion, Student student) { return false; } public boolean deleteComment(Comment comment, Student student) { return false; } public boolean newNotes(Resource resource) { return false; } public boolean newSnapshot(Resource resource) { return false; } public boolean newUpload(Resource resource) { return false; } public boolean updateResource(Resource resource, Student student) { return false; } }
{ "content_hash": "6da14d0bd9408927973b09fb2fff5951", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 87, "avg_line_length": 22.319148936170212, "alnum_prop": 0.6515729265967588, "repo_name": "iavanish/CollegePal", "id": "c684891a4ae77b85086a75c31d246d8519538939", "size": "2098", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CollegePal/app/src/main/java/iavanish/collegepal/CommonClasses/DataBaseWrite.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "252990" } ], "symlink_target": "" }
using std::string; class SpreadsheetApplication; class Spreadsheet { public: class SpreadsheetCell { public: SpreadsheetCell(); SpreadsheetCell(double initialValue); SpreadsheetCell(const string& initialValue); SpreadsheetCell(const SpreadsheetCell& src); SpreadsheetCell& operator=(const SpreadsheetCell& rhs); void set(double inValue); void set(const string& inString); double getValue() const { mNumAccesses++; return (mValue); } string getString() const { mNumAccesses++; return (mString); } static string doubleToString(double inValue); static double stringToDouble(const string& inString); protected: double mValue; string mString; mutable int mNumAccesses; }; Spreadsheet(const SpreadsheetApplication& theApp, int inWidth = kMaxWidth, int inHeight = kMaxHeight); Spreadsheet(const Spreadsheet& src); ~Spreadsheet(); Spreadsheet& operator=(const Spreadsheet& ths); void setCellAt(int x, int y, const SpreadsheetCell& cell); SpreadsheetCell getCellAt(int x, int y); int getId() const; static const int kMaxHeight = 100; static const int kMaxWidth = 100; protected: bool inRange(int val, int upper); void copyFrom(const Spreadsheet& src); int mWidth, mHeight; int mId; SpreadsheetCell** mCells; const SpreadsheetApplication& mTheApp; static int sCounter; }; typedef Spreadsheet::SpreadsheetCell SCell;
{ "content_hash": "0c26789c75e7d72b9d77906c34a92a44", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 64, "avg_line_length": 22.983333333333334, "alnum_prop": 0.751269035532995, "repo_name": "zzragida/CppExamples", "id": "5b01e7d12c858fedc1541382ee5823a561c1ec94", "size": "1398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ProfessionalC++/NestedClasses/Spreadsheet.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "256" }, { "name": "C++", "bytes": "232768" }, { "name": "Makefile", "bytes": "134" } ], "symlink_target": "" }
if(typeof dojo == 'undefined') throw 'Unable to load Shadowbox adapter, Dojo not found'; if(typeof Shadowbox == 'undefined') throw 'Unable to load Shadowbox adapter, Shadowbox not found'; (function(S){ /** * Holds all registered event handlers. * * @property Array events * @private */ var events = []; S.lib = { /** * Gets the value of the style on the given element. * * @param HTMLElement el The DOM element * @param String style The script name of the style * (e.g. marginTop, not margin-top) * @return mixed The value of the given style * @public */ getStyle: function(el, style){ return dojo.style(el, style); }, /** * Removes an element from the DOM. * * @param HTMLElement el The element to remove * @return void * @public */ remove: function(el){ dojo._destroyElement(el); }, /** * Gets the target of the given event. The event object passed will be * the same object that is passed to listeners registered with * addEvent(). * * @param mixed e The event object * @return HTMLElement The event's target element * @public */ getTarget: function(e){ return e.target; }, /** * Gets the page X/Y coordinates of the mouse event in an [x, y] array. * The page coordinates should be relative to the document, and not the * viewport. The event object provided here will be the same object that * is passed to listeners registered with addEvent(). * * @param mixed e The event object * @return Array The page X/Y coordinates * @public */ getPageXY: function(e){ return [e.pageX, e.pageY]; }, /** * Prevents the event's default behavior. The event object passed will * be the same object that is passed to listeners registered with * addEvent(). * * @param mixed e The event object * @return void * @public */ preventDefault: function(e){ e.preventDefault(); }, /** * Gets the key code of the given event object (keydown). The event * object here will be the same object that is passed to listeners * registered with addEvent(). * * @param mixed e The event object * @return Number The key code of the event * @public */ keyCode: function(e){ return e.keyCode; }, /** * Adds an event listener to the given element. It is expected that this * function will be passed the event as its first argument. * * @param HTMLElement el The DOM element to listen to * @param String name The name of the event to register * (i.e. 'click', 'scroll', etc.) * @param Function handler The event handler function * @return void * @public */ addEvent: function(el, name, handler){ var t = dojo.connect(el, name, handler); // we need to store a handle to later disconnect events.push({ el: el, name: name, handle: t }); }, /** * Removes an event listener from the given element. * * @param HTMLElement el The DOM element to stop listening to * @param String name The name of the event to stop * listening for (i.e. 'click') * @param Function handler The event handler function * @return void * @public */ removeEvent: function(el, name, handler){ // probably a quicker way to match this dojo.forEach(events, function(ev, idx){ if(ev && ev.el == el && ev.name == name){ dojo.disconnect(ev.handle); events[idx] = null; } }); }, /** * Appends an HTML fragment to the given element. * * @param HTMLElement el The element to append to * @param String html The HTML fragment to use * @return void * @public */ append: function(el, html){ if(el.insertAdjacentHTML){ el.insertAdjacentHTML('BeforeEnd', html); }else if(el.lastChild){ var range = el.ownerDocument.createRange(); range.setStartAfter(el.lastChild); var frag = range.createContextualFragment(html); el.appendChild(frag); }else{ el.innerHTML = html; } } }; })(Shadowbox);
{ "content_hash": "e216dc8cc1d2dd5a9d6754b876e64b47", "timestamp": "", "source": "github", "line_count": 162, "max_line_length": 84, "avg_line_length": 33.53703703703704, "alnum_prop": 0.47505981962083565, "repo_name": "felladrin/joinuo", "id": "aedef68dc58d192e5fd67ba7d078239c7d967047", "size": "6296", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "protected/extensions/shadowbox/source/adapters/shadowbox-dojo.js", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "423" }, { "name": "Batchfile", "bytes": "957" }, { "name": "CSS", "bytes": "271128" }, { "name": "JavaScript", "bytes": "534396" }, { "name": "PHP", "bytes": "21755439" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("MMDB.Permissions")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("MMDB.Permissions")] [assembly: AssemblyCopyright("Copyright © 2013")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("d5c83240-32c9-496a-9236-8524dde13ad5")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "c65c4b4f52fb104802f4a2fbca0cbd4b", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 84, "avg_line_length": 40.02777777777778, "alnum_prop": 0.7265787647467037, "repo_name": "mmooney/Sriracha.Deploy", "id": "d457385917e1fba2b6ff6aa0da5c602bee16fb06", "size": "1444", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MMDB.Permissions/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "110" }, { "name": "C#", "bytes": "2451931" }, { "name": "CSS", "bytes": "1949" }, { "name": "HTML", "bytes": "119595" }, { "name": "JavaScript", "bytes": "179550" }, { "name": "Shell", "bytes": "1447" }, { "name": "XSLT", "bytes": "351" } ], "symlink_target": "" }
// Template Source: BaseEntityCollectionPage.java.tt // ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.requests; import com.microsoft.graph.models.ThumbnailSet; import com.microsoft.graph.requests.ThumbnailSetCollectionRequestBuilder; import javax.annotation.Nullable; import javax.annotation.Nonnull; import com.microsoft.graph.requests.ThumbnailSetCollectionResponse; import com.microsoft.graph.http.BaseCollectionPage; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The class for the Thumbnail Set Collection Page. */ public class ThumbnailSetCollectionPage extends BaseCollectionPage<ThumbnailSet, ThumbnailSetCollectionRequestBuilder> { /** * A collection page for ThumbnailSet * * @param response the serialized ThumbnailSetCollectionResponse from the service * @param builder the request builder for the next collection page */ public ThumbnailSetCollectionPage(@Nonnull final ThumbnailSetCollectionResponse response, @Nonnull final ThumbnailSetCollectionRequestBuilder builder) { super(response, builder); } /** * Creates the collection page for ThumbnailSet * * @param pageContents the contents of this page * @param nextRequestBuilder the request builder for the next page */ public ThumbnailSetCollectionPage(@Nonnull final java.util.List<ThumbnailSet> pageContents, @Nullable final ThumbnailSetCollectionRequestBuilder nextRequestBuilder) { super(pageContents, nextRequestBuilder); } }
{ "content_hash": "206f452b99b8e9b225e6bb05cbc59e70", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 170, "avg_line_length": 45.975, "alnum_prop": 0.7074497009244154, "repo_name": "microsoftgraph/msgraph-sdk-java", "id": "0027a2e60cf0d75cea4476b01599a8d1843703c8", "size": "1839", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "src/main/java/com/microsoft/graph/requests/ThumbnailSetCollectionPage.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "27286837" }, { "name": "PowerShell", "bytes": "5635" } ], "symlink_target": "" }
package cg.m.nodetika; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.io.IOException; import java.io.FileNotFoundException; import java.io.File; import java.io.InputStream; import java.io.FileInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.net.URL; import java.net.URLConnection; import java.net.MalformedURLException; import java.lang.Exception; import org.apache.tika.io.TikaInputStream; import org.apache.tika.metadata.Metadata; import org.apache.tika.metadata.HttpHeaders; import org.apache.tika.metadata.TikaMetadataKeys; import org.apache.tika.parser.Parser; import org.apache.tika.parser.ParseContext; import org.apache.tika.parser.AutoDetectParser; import org.apache.tika.parser.PasswordProvider; import org.apache.tika.parser.html.HtmlParser; import org.apache.tika.parser.ocr.TesseractOCRConfig; import org.apache.tika.parser.pdf.PDFParserConfig; import org.apache.tika.mime.MediaType; import org.apache.tika.detect.Detector; import org.apache.tika.detect.AutoDetectReader; import org.apache.tika.language.LanguageIdentifier; import org.apache.tika.sax.BodyContentHandler; import org.apache.tika.sax.ExpandedTitleContentHandler; import org.apache.tika.config.TikaConfig; import org.apache.tika.exception.TikaException; import org.apache.tika.exception.EncryptedDocumentException; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.OutputKeys; import javax.xml.transform.stream.StreamResult; import com.google.gson.Gson; public class NodeTika { private static final TikaConfig config = TikaConfig.getDefaultConfig(); private static TikaInputStream createInputStream(String uri) throws FileNotFoundException, MalformedURLException, IOException { return createInputStream(uri, null); } private static TikaInputStream createInputStream(String uri, Metadata metadata) throws FileNotFoundException, MalformedURLException, IOException { InputStream inputStream; if (uri.startsWith("http://") || uri.startsWith("https://") || uri.startsWith("ftp://")) { final URLConnection urlConnection = new URL(uri).openConnection(); // If a metadata object was passed, fill it with the content-type returned from the server. if (metadata != null) { fillMetadata(metadata, urlConnection.getContentType()); } inputStream = urlConnection.getInputStream(); } else { inputStream = new FileInputStream(uri); } return TikaInputStream.get(inputStream); } private static AutoDetectParser createParser() { final AutoDetectParser parser = new AutoDetectParser(config); Map<MediaType, Parser> parsers = parser.getParsers(); parsers.put(MediaType.APPLICATION_XML, new HtmlParser()); parser.setParsers(parsers); parser.setFallback(new Parser() { public Set<MediaType> getSupportedTypes(ParseContext parseContext) { return parser.getSupportedTypes(parseContext); } public void parse(InputStream inputStream, ContentHandler contentHandler, Metadata metadata, ParseContext parseContext) throws TikaException { throw new TikaException("Unsupported Media Type: " + metadata.get(HttpHeaders.CONTENT_TYPE)); } }); return parser; } private static void fillMetadata(AutoDetectParser parser, Metadata metadata, String contentType, String uri) { fillMetadata(metadata, contentType, uri); final Detector detector = parser.getDetector(); parser.setDetector(new Detector() { public MediaType detect(InputStream inputStream, Metadata metadata) throws IOException { String contentType = metadata.get(HttpHeaders.CONTENT_TYPE); if (contentType != null) { return MediaType.parse(contentType); } else { return detector.detect(inputStream, metadata); } } }); } private static void fillMetadata(Metadata metadata, String contentType) { fillMetadata(metadata, contentType, null); } private static void fillMetadata(Metadata metadata, String contentType, String uri) { // Set the file name. if (uri != null) { metadata.set(TikaMetadataKeys.RESOURCE_NAME_KEY, new File(uri).getName()); } // Normalise the content-type. contentType = normalizeContentType(contentType); // Set the content-type. if (contentType != null) { metadata.add(HttpHeaders.CONTENT_TYPE, contentType); } } private static String normalizeContentType(String contentType) { if (contentType == null) { return null; } // URLConnection returns content/unknown as the default content-type. if (contentType.equals("content/unknown")) { return null; } if (contentType.equals(MediaType.OCTET_STREAM)) { return null; } if ("xml".equals(MediaType.parse(contentType).getSubtype())) { return null; } return contentType; } private static void fillParseContext(ParseContext parseContext, Map<String, Object> options) { final TesseractOCRConfig ocrConfig = new TesseractOCRConfig(); if (options == null) { // Disable OCR and return if no options are specified. disableOcr(ocrConfig); parseContext.set(TesseractOCRConfig.class, ocrConfig); return; } fillOcrOptions(ocrConfig, options); parseContext.set(TesseractOCRConfig.class, ocrConfig); final PDFParserConfig pdfParserConfig = new PDFParserConfig(); fillPdfOptions(pdfParserConfig, options); parseContext.set(PDFParserConfig.class, pdfParserConfig); // Allow a password to be specified for encrypted files. fillPassword(parseContext, options); } private static void fillPassword(ParseContext parseContext, Map<String, Object> options) { final Object password = options.get("password"); if (password == null) { return; } parseContext.set(PasswordProvider.class, new PasswordProvider() { @Override public String getPassword(Metadata metadata) { return password.toString(); } }); } private static void fillPdfOptions(PDFParserConfig pdfParserConfig, Map<String, Object> options) { final Object averageCharTolerance = options.get("pdfAverageCharTolerance"); final Object enableAutoSpace = options.get("pdfEnableAutoSpace"); final Object extractAcroFormContent = options.get("pdfExtractAcroFormContent"); final Object extractAnnotationText = options.get("pdfExtractAnnotationText"); final Object extractInlineImages = options.get("pdfExtractInlineImages"); final Object extractUniqueInlineImagesOnly = options.get("pdfExtractUniqueInlineImagesOnly"); final Object sortByPosition = options.get("pdfSortByPosition"); final Object spacingTolerance = options.get("pdfSpacingTolerance"); final Object suppressDuplicateOverlappingText = options.get("pdfSuppressDuplicateOverlappingText"); final Object useNonSequentialParser = options.get("pdfUseNonSequentialParser"); if (averageCharTolerance != null) { pdfParserConfig.setAverageCharTolerance(Float.parseFloat(averageCharTolerance.toString())); } if (enableAutoSpace != null) { pdfParserConfig.setEnableAutoSpace((Boolean) enableAutoSpace); } if (extractAcroFormContent != null) { pdfParserConfig.setExtractAcroFormContent((Boolean) extractAcroFormContent); } if (extractAnnotationText != null) { pdfParserConfig.setExtractAnnotationText((Boolean) extractAnnotationText); } if (extractInlineImages != null) { pdfParserConfig.setExtractInlineImages((Boolean) extractInlineImages); } if (extractUniqueInlineImagesOnly != null) { pdfParserConfig.setExtractUniqueInlineImagesOnly((Boolean) extractUniqueInlineImagesOnly); } if (sortByPosition != null) { pdfParserConfig.setSortByPosition((Boolean) sortByPosition); } if (spacingTolerance != null) { pdfParserConfig.setSpacingTolerance(Float.parseFloat(spacingTolerance.toString())); } if (suppressDuplicateOverlappingText != null) { pdfParserConfig.setSuppressDuplicateOverlappingText((Boolean) suppressDuplicateOverlappingText); } if (useNonSequentialParser != null) { pdfParserConfig.setUseNonSequentialParser((Boolean) useNonSequentialParser); } } private static void fillOcrOptions(TesseractOCRConfig ocrConfig, Map<String, Object> options) { // Only set the OCR config object on the context if the language is specified. // OCR is disabled by default as it can give unexpected results. final Object ocrLanguage = options.get("ocrLanguage"); if (ocrLanguage == null) { disableOcr(ocrConfig); return; } ocrConfig.setLanguage(ocrLanguage.toString()); final Object ocrPath = options.get("ocrPath"); final Object ocrMaxFileSize = options.get("ocrMaxFileSize"); final Object ocrMinFileSize = options.get("ocrMinFileSize"); final Object ocrPageSegmentationMode = options.get("ocrPageSegmentationMode"); final Object ocrTimeout = options.get("ocrTimeout"); if (ocrPath != null) { ocrConfig.setTesseractPath(ocrPath.toString()); } if (ocrMaxFileSize != null) { ocrConfig.setMaxFileSizeToOcr(Integer.parseInt(ocrMaxFileSize.toString())); } if (ocrMinFileSize != null) { ocrConfig.setMinFileSizeToOcr(Integer.parseInt(ocrMinFileSize.toString())); } if (ocrPageSegmentationMode != null) { ocrConfig.setPageSegMode(ocrPageSegmentationMode.toString()); } if (ocrTimeout != null) { ocrConfig.setTimeout(Integer.parseInt(ocrTimeout.toString())); } } private static void disableOcr(TesseractOCRConfig ocrConfig) { // This is necessary until Tika introduces a way to blacklist parsers. // See https://issues.apache.org/jira/browse/TIKA-1557 if (System.getProperty("os.name").startsWith("Windows")) { ocrConfig.setTesseractPath("\\Device\\Null\\"); } else { ocrConfig.setTesseractPath("/dev/null/"); } } public static String extractText(String uri, String optionsJson) throws Exception { Map<String, Object> options = null; if (optionsJson != null) { options = new Gson().fromJson(optionsJson, HashMap.class); } return extractText(uri, options); } public static String extractText(String uri, Map<String, Object> options) throws Exception { final AutoDetectParser parser = createParser(); final Metadata metadata = new Metadata(); final ParseContext context = new ParseContext(); String outputEncoding = null; String contentType = null; if (options != null) { Object option; option = options.get("outputEncoding"); if (option != null) { outputEncoding = option.toString(); } option = options.get("contentType"); if (option != null) { contentType = option.toString(); } } if (outputEncoding == null) { outputEncoding = "UTF-8"; } fillMetadata(parser, metadata, contentType, uri); fillParseContext(context, options); final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); final OutputStreamWriter writer = new OutputStreamWriter(outputStream, outputEncoding); final BodyContentHandler body = new BodyContentHandler(new RichTextContentHandler(writer)); final TikaInputStream inputStream = createInputStream(uri, metadata); // Set up recursive parsing of archives. // See: http://wiki.apache.org/tika/RecursiveMetadata context.set(Parser.class, parser); try { parser.parse(inputStream, body, metadata, context); } catch (SAXException e) { throw e; } catch (EncryptedDocumentException e) { throw e; } catch (TikaException e) { throw e; } finally { inputStream.close(); } return outputStream.toString(outputEncoding); } public static String extractXml(String uri, String outputFormat, String optionsJson) throws Exception { Map<String, Object> options = null; if (optionsJson != null) { options = new Gson().fromJson(optionsJson, HashMap.class); } return extractXml(uri, outputFormat, options); } public static String extractXml(String uri, String outputFormat, Map<String, Object> options) throws Exception { final AutoDetectParser parser = createParser(); final Metadata metadata = new Metadata(); final ParseContext context = new ParseContext(); String outputEncoding = null; String contentType = null; if (options != null) { Object option; option = options.get("outputEncoding"); if (option != null) { outputEncoding = option.toString(); } option = options.get("contentType"); if (option != null) { contentType = option.toString(); } } if (outputEncoding == null) { outputEncoding = "UTF-8"; } fillMetadata(parser, metadata, contentType, uri); fillParseContext(context, options); final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); final OutputStreamWriter writer = new OutputStreamWriter(outputStream, outputEncoding); ContentHandler content; SAXTransformerFactory factory = (SAXTransformerFactory)SAXTransformerFactory.newInstance(); TransformerHandler handler = factory.newTransformerHandler(); handler.getTransformer().setOutputProperty(OutputKeys.METHOD, outputFormat); handler.getTransformer().setOutputProperty(OutputKeys.INDENT, "no"); handler.getTransformer().setOutputProperty(OutputKeys.ENCODING, outputEncoding); handler.setResult(new StreamResult(writer)); content = new ExpandedTitleContentHandler(handler); final TikaInputStream inputStream = createInputStream(uri, metadata); // Set up recursive parsing of archives. // See: http://wiki.apache.org/tika/RecursiveMetadata context.set(Parser.class, parser); try { parser.parse(inputStream, content, metadata, context); } catch (SAXException e) { throw e; } catch (EncryptedDocumentException e) { throw e; } catch (TikaException e) { throw e; } finally { inputStream.close(); } return outputStream.toString(outputEncoding); } public static String extractMeta(String uri) throws Exception { return extractMeta(uri, null); } public static String extractMeta(String uri, String contentType) throws Exception { final AutoDetectParser parser = createParser(); final Metadata metadata = new Metadata(); fillMetadata(parser, metadata, contentType, uri); final TikaInputStream inputStream = createInputStream(uri, metadata); parser.parse(inputStream, new DefaultHandler(), metadata); Map meta = new HashMap(); for (String name : metadata.names()) { String[] values = metadata.getValues(name); meta.put(name, values); } inputStream.close(); return new Gson().toJson(meta); } public static String detectCharset(String uri) throws FileNotFoundException, IOException, TikaException { return detectCharset(uri, null); } public static String detectCharset(String uri, String contentType) throws FileNotFoundException, IOException, TikaException { final Metadata metadata = new Metadata(); // Use metadata to provide type-hinting to the AutoDetectReader. fillMetadata(metadata, contentType, uri); final TikaInputStream inputStream = createInputStream(uri, metadata); // Detect the character set. final AutoDetectReader reader = new AutoDetectReader(inputStream, metadata); String charset = reader.getCharset().toString(); inputStream.close(); return charset; } public static String detectContentType(String uri) throws FileNotFoundException, IOException, TikaException { final Detector detector = config.getDetector(); final TikaInputStream inputStream = createInputStream(uri); final Metadata metadata = new Metadata(); // Set the file name. This provides some level of type-hinting. metadata.add(TikaMetadataKeys.RESOURCE_NAME_KEY, new File(uri).getName()); // Detect the content type. String contentType = detector.detect(inputStream, metadata).toString(); inputStream.close(); // Return the default content-type if undetermined. if (contentType == null || contentType.isEmpty()) { return MediaType.OCTET_STREAM.toString(); } return contentType; } public static String detectContentTypeAndCharset(String uri) throws FileNotFoundException, IOException, TikaException { final Detector detector = config.getDetector(); final TikaInputStream inputStream = createInputStream(uri); final Metadata metadata = new Metadata(); // Set the file name. This provides some level of type-hinting. metadata.add(TikaMetadataKeys.RESOURCE_NAME_KEY, new File(uri).getName()); // Detect the content type. String contentType = detector.detect(inputStream, metadata).toString(); // Use metadata to provide type-hinting to the AutoDetectReader. fillMetadata(metadata, contentType, uri); // Detect the character set. final AutoDetectReader reader = new AutoDetectReader(inputStream, metadata); String charset = reader.getCharset().toString(); inputStream.close(); // Return the default content-type if undetermined. if (contentType == null || contentType.isEmpty()) { return MediaType.OCTET_STREAM.toString(); } // Append the charset if the content-type was determined. if (charset != null && !charset.isEmpty()) { return contentType + "; charset=" + charset; } return contentType; } public static String detectLanguage(String text) { LanguageIdentifier identifier = new LanguageIdentifier(text); Map language = new HashMap(); language.put("language", identifier.getLanguage()); language.put("reasonablyCertain", identifier.isReasonablyCertain()); return new Gson().toJson(language); } }
{ "content_hash": "c22fa45a366fc994f2122e7f63f6c4d1", "timestamp": "", "source": "github", "line_count": 550, "max_line_length": 147, "avg_line_length": 31.625454545454545, "alnum_prop": 0.7542255950327699, "repo_name": "Foldesk/node-tika", "id": "c3d74a923b1be932523710d2865faa729ce4658e", "size": "17470", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/cg/m/nodetika/NodeTika.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "19127" }, { "name": "JavaScript", "bytes": "16463" }, { "name": "Makefile", "bytes": "1142" } ], "symlink_target": "" }
<!doctype html public "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd"> <html> <head> <title>PHPXRef 0.7.1 : Unnamed Project : Function Reference: getoutputaslist()</title> <link rel="stylesheet" href="../sample.css" type="text/css"> <link rel="stylesheet" href="../sample-print.css" type="text/css" media="print"> <style id="hilight" type="text/css"></style> <meta http-equiv="content-type" content="text/html;charset=iso-8859-1"> </head> <body bgcolor="#ffffff" text="#000000" link="#801800" vlink="#300540" alink="#ffffff"> <table class="pagetitle" width="100%"> <tr> <td valign="top" class="pagetitle"> [ <a href="../index.html">Index</a> ] </td> <td align="right" class="pagetitle"> <h2 style="margin-bottom: 0px">PHP Cross Reference of Unnamed Project</h2> </td> </tr> </table> <!-- Generated by PHPXref 0.7.1 at Thu Oct 23 19:15:14 2014 --> <!-- PHPXref (c) 2000-2010 Gareth Watts - gareth@omnipotent.net --> <!-- http://phpxref.sourceforge.net/ --> <script src="../phpxref.js" type="text/javascript"></script> <script language="JavaScript" type="text/javascript"> <!-- ext='.html'; relbase='../'; subdir='_functions'; filename='index.html'; cookiekey='phpxref'; handleNavFrame(relbase, subdir, filename); logFunction('getoutputaslist'); // --> </script> <script language="JavaScript" type="text/javascript"> if (gwGetCookie('xrefnav')=='off') document.write('<p class="navlinks">[ <a href="javascript:navOn()">Show Explorer<\/a> ]<\/p>'); else document.write('<p class="navlinks">[ <a href="javascript:navOff()">Hide Explorer<\/a> ]<\/p>'); </script> <noscript> <p class="navlinks"> [ <a href="../nav.html" target="_top">Show Explorer</a> ] [ <a href="index.html" target="_top">Hide Navbar</a> ] </p> </noscript> [<a href="../index.html">Top level directory</a>]<br> <script language="JavaScript" type="text/javascript"> <!-- document.writeln('<table align="right" class="searchbox-link"><tr><td><a class="searchbox-link" href="javascript:void(0)" onMouseOver="showSearchBox()">Search</a><br>'); document.writeln('<table border="0" cellspacing="0" cellpadding="0" class="searchbox" id="searchbox">'); document.writeln('<tr><td class="searchbox-title">'); document.writeln('<a class="searchbox-title" href="javascript:showSearchPopup()">Search History +</a>'); document.writeln('<\/td><\/tr>'); document.writeln('<tr><td class="searchbox-body" id="searchbox-body">'); document.writeln('<form name="search" style="margin:0px; padding:0px" onSubmit=\'return jump()\'>'); document.writeln('<a class="searchbox-body" href="../_classes/index.html">Class<\/a>: '); document.writeln('<input type="text" size=10 value="" name="classname"><br>'); document.writeln('<a id="funcsearchlink" class="searchbox-body" href="../_functions/index.html">Function<\/a>: '); document.writeln('<input type="text" size=10 value="" name="funcname"><br>'); document.writeln('<a class="searchbox-body" href="../_variables/index.html">Variable<\/a>: '); document.writeln('<input type="text" size=10 value="" name="varname"><br>'); document.writeln('<a class="searchbox-body" href="../_constants/index.html">Constant<\/a>: '); document.writeln('<input type="text" size=10 value="" name="constname"><br>'); document.writeln('<a class="searchbox-body" href="../_tables/index.html">Table<\/a>: '); document.writeln('<input type="text" size=10 value="" name="tablename"><br>'); document.writeln('<input type="submit" class="searchbox-button" value="Search">'); document.writeln('<\/form>'); document.writeln('<\/td><\/tr><\/table>'); document.writeln('<\/td><\/tr><\/table>'); // --> </script> <div id="search-popup" class="searchpopup"><p id="searchpopup-title" class="searchpopup-title">title</p><div id="searchpopup-body" class="searchpopup-body">Body</div><p class="searchpopup-close"><a href="javascript:gwCloseActive()">[close]</a></p></div> <h3>Function and Method Cross Reference</h3> <h2><a href="index.html#getoutputaslist">getoutputaslist()</a></h2> <b>Defined at:</b><ul> <li><a href="../tests/simpletest/shell_tester.php.html#getoutputaslist">/tests/simpletest/shell_tester.php</a> -> <a onClick="logFunction('getoutputaslist', '/tests/simpletest/shell_tester.php.source.html#l54')" href="../tests/simpletest/shell_tester.php.source.html#l54"> line 54</a></li> <li><a href="../tests/simpletest/shell_tester.php.html#getoutputaslist">/tests/simpletest/shell_tester.php</a> -> <a onClick="logFunction('getoutputaslist', '/tests/simpletest/shell_tester.php.source.html#l121')" href="../tests/simpletest/shell_tester.php.source.html#l121"> line 121</a></li> </ul> <b>Referenced 2 times:</b><ul> <li><a href="../tests/simpletest/shell_tester.php.html">/tests/simpletest/shell_tester.php</a> -> <a href="../tests/simpletest/shell_tester.php.source.html#l128"> line 128</a></li> <li><a href="../tests/simpletest/test/visual_test.php.html">/tests/simpletest/test/visual_test.php</a> -> <a href="../tests/simpletest/test/visual_test.php.source.html#l399"> line 399</a></li> </ul> <!-- A link to the phpxref site in your customized footer file is appreciated ;-) --> <br><hr> <table width="100%"> <tr><td>Generated: Thu Oct 23 19:15:14 2014</td> <td align="right"><i>Cross-referenced by <a href="http://phpxref.sourceforge.net/">PHPXref 0.7.1</a></i></td> </tr> </table> </body></html>
{ "content_hash": "1579304819182f9b94f774aa6c3571c6", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 293, "avg_line_length": 54.96938775510204, "alnum_prop": 0.677742713940969, "repo_name": "inputx/code-ref-doc", "id": "d069b56445cc80af61b6912d4fde77774e44fc52", "size": "5387", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rebbit/_functions/getoutputaslist.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "17952" }, { "name": "JavaScript", "bytes": "255489" } ], "symlink_target": "" }
@protocol OrgHamcrestDescription; @protocol OrgHamcrestMatcher; #include "J2ObjC_header.h" #include "org/hamcrest/BaseMatcher.h" @interface OrgHamcrestCoreIsSame : OrgHamcrestBaseMatcher { } - (instancetype)initWithId:(id)object; - (jboolean)matchesWithId:(id)arg; - (void)describeToWithOrgHamcrestDescription:(id<OrgHamcrestDescription>)description_; + (id<OrgHamcrestMatcher>)sameInstanceWithId:(id)object; @end J2OBJC_EMPTY_STATIC_INIT(OrgHamcrestCoreIsSame) CF_EXTERN_C_BEGIN FOUNDATION_EXPORT id<OrgHamcrestMatcher> OrgHamcrestCoreIsSame_sameInstanceWithId_(id object); CF_EXTERN_C_END J2OBJC_TYPE_LITERAL_HEADER(OrgHamcrestCoreIsSame) #endif // _OrgHamcrestCoreIsSame_H_
{ "content_hash": "9f538d500747cda77ecc5a026f48dceb", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 94, "avg_line_length": 23.75862068965517, "alnum_prop": 0.8127721335268505, "repo_name": "hambroperks/pollexor", "id": "24d68790bc118dcf67e1a51c3ba67027ca2af062", "size": "909", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Pods/J2ObjC/dist/include/org/hamcrest/core/IsSame.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1040" }, { "name": "HTML", "bytes": "4169" }, { "name": "Java", "bytes": "59016" }, { "name": "Objective-C", "bytes": "87524" }, { "name": "Ruby", "bytes": "1420" }, { "name": "Shell", "bytes": "2482" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.maven</groupId> <artifactId>maven</artifactId> <version>3.5.1-SNAPSHOT</version> </parent> <artifactId>maven-model</artifactId> <name>Maven Model</name> <description>Model for Maven POM (Project Object Model)</description> <properties> <checkstyle.violation.ignore>FileLength</checkstyle.violation.ignore> </properties> <dependencies> <dependency> <groupId>org.codehaus.plexus</groupId> <artifactId>plexus-utils</artifactId> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-lang3</artifactId> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.codehaus.modello</groupId> <artifactId>modello-maven-plugin</artifactId> <configuration> <version>4.0.0</version> <models> <model>src/main/mdo/maven.mdo</model> </models> </configuration> <executions> <execution> <id>modello</id> <goals> <goal>java</goal> <goal>xpp3-reader</goal> <goal>xpp3-extended-reader</goal> <goal>xpp3-writer</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-site-plugin</artifactId> <configuration> <!-- Exclude the navigation file for Maven 1 sites as it interferes with the site generation. --> <moduleExcludes> <xdoc>navigation.xml</xdoc> </moduleExcludes> </configuration> </plugin> </plugins> </build> <profiles> <profile> <id>all-models</id> <build> <plugins> <plugin> <groupId>org.codehaus.modello</groupId> <artifactId>modello-maven-plugin</artifactId> <executions> <execution> <id>v3</id> <goals> <goal>java</goal> <goal>xpp3-writer</goal> <goal>xpp3-reader</goal> <goal>xsd</goal> </goals> <configuration> <version>3.0.0</version> <packageWithVersion>true</packageWithVersion> </configuration> </execution> </executions> </plugin> <plugin> <artifactId>maven-jar-plugin</artifactId> <executions> <execution> <phase>package</phase> <goals> <goal>jar</goal> </goals> <configuration> <classifier>all</classifier> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> </profiles> </project>
{ "content_hash": "f7043c0f1c7d602806c1ea94b972cee1", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 204, "avg_line_length": 31.084615384615386, "alnum_prop": 0.5803019054689433, "repo_name": "vedmishr/demo1", "id": "581e927a4a91a453290fe422aa0dd6ce3247e0f5", "size": "4041", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "maven-model/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7778" }, { "name": "Groovy", "bytes": "2034" }, { "name": "HTML", "bytes": "2422" }, { "name": "Java", "bytes": "4535780" }, { "name": "Shell", "bytes": "9618" } ], "symlink_target": "" }
package org.jetbrains.plugins.ideavim.option; import com.maddyhome.idea.vim.VimPlugin; import com.maddyhome.idea.vim.api.VimInjectorKt; import com.maddyhome.idea.vim.helper.CharacterHelper; import com.maddyhome.idea.vim.options.OptionConstants; import com.maddyhome.idea.vim.options.OptionScope; import com.maddyhome.idea.vim.options.helpers.KeywordOptionHelper; import com.maddyhome.idea.vim.vimscript.model.datatypes.VimString; import org.jetbrains.plugins.ideavim.VimTestCase; import java.util.ArrayList; import java.util.List; public class KeywordOptionTest extends VimTestCase { private List<String> getValues() { return KeywordOptionHelper.INSTANCE.parseValues(getOptionValue()); } private String getOptionValue() { return ((VimString)VimPlugin.getOptionService() .getOptionValue(OptionScope.GLOBAL.INSTANCE, OptionConstants.iskeywordName, OptionConstants.iskeywordName)).getValue(); } private void setKeyword(String val) { VimPlugin.getOptionService() .setOptionValue(OptionScope.GLOBAL.INSTANCE, OptionConstants.iskeywordName, new VimString(val), "testToken"); } private void assertIsKeyword(char c) { CharacterHelper.CharacterType charType = CharacterHelper.charType(c, false); assertSame(CharacterHelper.CharacterType.KEYWORD, charType); } private void assertIsNotKeyword(char c) { CharacterHelper.CharacterType charType = CharacterHelper.charType(c, false); assertSame(CharacterHelper.CharacterType.PUNCTUATION, charType); } public void testSingleCommaIsAValue() { setKeyword(","); assertEquals(",", getValues().get(0)); } public void testSingleCommaIsAValueAsAppend() { VimInjectorKt.getInjector().getVimscriptExecutor().execute("set iskeyword^=,", false); assertTrue(getValues().contains(",")); } public void testSingleNegatedCommaIsAValue() { setKeyword("^,"); assertEquals("^,", getValues().get(0)); } public void testCommaInARangeIsAValue() { setKeyword("+-,"); assertEquals("+-,", getValues().get(0)); } public void testSecondCommaIsASeparator() { setKeyword(",,a"); assertEquals(",", getValues().get(0)); assertEquals("a", getValues().get(1)); } public void testSingleHyphenIsAValue() { setKeyword("-"); assertEquals("-", getValues().get(0)); } public void testHyphenBetweenCharNumsIsARange() { setKeyword("a-b"); assertEquals("a-b", getValues().get(0)); } public void testRangeInWhichLeftValueIsHigherThanRightValueIsInvalid() { try { setKeyword("b-a"); fail("exception missing"); } catch (Exception e) { assertEquals("E474: Invalid argument: testToken", e.getMessage()); } assertDoesntContain(getValues(), new ArrayList<>() {{ add("b-a"); }}); } public void testTwoAdjacentLettersAreInvalid() { try { setKeyword("ab"); fail("exception missing"); } catch (Exception e) { assertEquals("E474: Invalid argument: testToken", e.getMessage()); } assertDoesntContain(getValues(), new ArrayList<>() {{ add("ab"); }}); } public void testAddsACharByChar() { setKeyword("-"); assertIsKeyword('-'); } public void testAddsACharByUnicodeCodePoint() { setKeyword("" + (int)'-'); assertIsKeyword('-'); } public void testAddsARange() { setKeyword("a-c"); assertIsKeyword('a'); assertIsKeyword('b'); assertIsKeyword('c'); } public void testAtSignRepresentsAllLetters() { setKeyword("@"); assertIsKeyword('A'); assertIsKeyword('Ā'); } public void testRangeOfAtSignToAtSignRepresentsAtSign() { setKeyword("@-@"); assertIsKeyword('@'); } public void testCaretRemovesAChar() { setKeyword("a"); VimInjectorKt.getInjector().getVimscriptExecutor().execute("set iskeyword+=^a", true); assertIsNotKeyword('a'); } public void testCaretRemovesARange() { setKeyword("a-c"); VimInjectorKt.getInjector().getVimscriptExecutor().execute("set iskeyword+=^b-c,d", true); assertIsKeyword('a'); assertIsNotKeyword('b'); assertIsNotKeyword('c'); } public void testCaretAloneRepresentsACaret() { setKeyword("^"); assertIsKeyword('^'); } public void testMultibyteCharactersAreKeywords() { assertIsKeyword('Ź'); } public void testToRegex() { setKeyword("-,a-c"); final List<String> res = KeywordOptionHelper.INSTANCE.toRegex(); assertEquals(2, res.size()); assertTrue(res.contains("-")); assertTrue(res.contains("[a-c]")); } public void testAllLettersToRegex() { setKeyword("@"); final List<String> res = KeywordOptionHelper.INSTANCE.toRegex(); assertEquals(res.get(0), "\\p{L}"); } }
{ "content_hash": "1acb5b74013c8ed488c796f33edd4651", "timestamp": "", "source": "github", "line_count": 167, "max_line_length": 125, "avg_line_length": 28.191616766467067, "alnum_prop": 0.6901019541206457, "repo_name": "JetBrains/ideavim", "id": "d7307db008da4937333ea16d34a99286b957d8a2", "size": "4916", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/java/org/jetbrains/plugins/ideavim/option/KeywordOptionTest.java", "mode": "33188", "license": "mit", "language": [ { "name": "ANTLR", "bytes": "36945" }, { "name": "Java", "bytes": "625468" }, { "name": "Kotlin", "bytes": "4156281" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <manifest xmlns:android="http://schemas.android.com/apk/res/android" package="studios.codelight.smartlogin"> <uses-permission android:name="android.permission.INTERNET" /> <application android:allowBackup="true" android:icon="@mipmap/ic_launcher" android:label="@string/app_name" android:theme="@style/AppTheme"> <meta-data android:name="com.facebook.sdk.ApplicationId" android:value="@string/facebook_app_id"/> <activity android:name=".MainActivity" android:label="@string/app_name"> <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> </intent-filter> </activity> </application> </manifest>
{ "content_hash": "be0224cf027b5b644cd18f27758a765c", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 106, "avg_line_length": 36.041666666666664, "alnum_prop": 0.6277456647398844, "repo_name": "weiwenqiang/GitHub", "id": "183b2edc27272736c4baff2487085f0d0952d431", "size": "865", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Login/Android-Smart-Login-master/app/src/main/AndroidManifest.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "87" }, { "name": "C", "bytes": "42062" }, { "name": "C++", "bytes": "12137" }, { "name": "CMake", "bytes": "202" }, { "name": "CSS", "bytes": "75087" }, { "name": "Clojure", "bytes": "12036" }, { "name": "FreeMarker", "bytes": "21704" }, { "name": "Groovy", "bytes": "55083" }, { "name": "HTML", "bytes": "61549" }, { "name": "Java", "bytes": "42222825" }, { "name": "JavaScript", "bytes": "216823" }, { "name": "Kotlin", "bytes": "24319" }, { "name": "Makefile", "bytes": "19490" }, { "name": "Perl", "bytes": "280" }, { "name": "Prolog", "bytes": "1030" }, { "name": "Python", "bytes": "13032" }, { "name": "Scala", "bytes": "310450" }, { "name": "Shell", "bytes": "27802" } ], "symlink_target": "" }
<!-- @license Apache-2.0 Copyright (c) 2018 The Stdlib Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <title></title> <style> /* http://meyerweb.com/eric/tools/css/reset/ v2.0 | 20110126 License: none (public domain) */ html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, pre, a, abbr, acronym, address, big, cite, code, del, dfn, em, img, ins, kbd, q, s, samp, small, strike, strong, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td, article, aside, canvas, details, embed, figure, figcaption, footer, header, hgroup, menu, nav, output, ruby, section, summary, time, mark, audio, video { margin: 0; padding: 0; border: 0; font-size: 100%; font: inherit; vertical-align: baseline; } /* HTML5 display-role reset for older browsers */ article, aside, details, figcaption, figure, footer, header, hgroup, menu, nav, section { display: block; } body { line-height: 1; } ol, ul { list-style: none; } blockquote, q { quotes: none; } blockquote:before, blockquote:after, q:before, q:after { content: ''; content: none; } table { border-collapse: collapse; border-spacing: 0; } </style> <style> body { box-sizing: border-box; width: 100%; padding: 40px; } #console { width: 100%; } </style> </head> <body> <p id="status">Running...</p> <br> <div id="console"></div> <script type="text/javascript"> (function() { 'use strict'; // VARIABLES // var methods = [ 'log', 'error', 'warn', 'dir', 'debug', 'info', 'trace' ]; // MAIN // /** * Main. * * @private */ function main() { var console; var str; var el; var i; // FIXME: IE9 has a non-standard `console.log` object (http://stackoverflow.com/questions/5538972/console-log-apply-not-working-in-ie9) console = window.console || {}; for ( i = 0; i < methods.length; i++ ) { console[ methods[ i ] ] = write; } el = document.querySelector( '#console' ); str = el.innerHTML; /** * Writes content to a DOM element. Note that this assumes a single argument and no substitution strings. * * @private * @param {string} message - message */ function write( message ) { str += '<p>'+message+'</p>'; el.innerHTML = str; } } main(); })(); </script> <script type="text/javascript" src="/docs/api/latest/@stdlib/math/iter/special/trunc2/benchmark_bundle.js"></script> </body> </html>
{ "content_hash": "0ed56d74cb34b4e003663388c02350f1", "timestamp": "", "source": "github", "line_count": 150, "max_line_length": 139, "avg_line_length": 22.32, "alnum_prop": 0.6230585424133811, "repo_name": "stdlib-js/www", "id": "e99b984abd43557fdb1e044e31925adb7f675d95", "size": "3348", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/docs/api/latest/@stdlib/math/iter/special/trunc2/benchmark.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "190538" }, { "name": "HTML", "bytes": "158086013" }, { "name": "Io", "bytes": "14873" }, { "name": "JavaScript", "bytes": "5395746994" }, { "name": "Makefile", "bytes": "40479" }, { "name": "Shell", "bytes": "9744" } ], "symlink_target": "" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE78_OS_Command_Injection__char_environment_w32_spawnv_16.c Label Definition File: CWE78_OS_Command_Injection.strings.label.xml Template File: sources-sink-16.tmpl.c */ /* * @description * CWE: 78 OS Command Injection * BadSource: environment Read input from an environment variable * GoodSource: Fixed string * Sink: w32_spawnv * BadSink : execute command with spawnv * Flow Variant: 16 Control flow: while(1) * * */ #include "std_testcase.h" #include <wchar.h> #ifdef _WIN32 #define COMMAND_INT_PATH "%WINDIR%\\system32\\cmd.exe" #define COMMAND_INT "cmd.exe" #define COMMAND_ARG1 "/c" #define COMMAND_ARG2 "dir " #define COMMAND_ARG3 data #else /* NOT _WIN32 */ #include <unistd.h> #define COMMAND_INT_PATH "/bin/sh" #define COMMAND_INT "sh" #define COMMAND_ARG1 "-c" #define COMMAND_ARG2 "ls " #define COMMAND_ARG3 data #endif #define ENV_VARIABLE "ADD" #ifdef _WIN32 #define GETENV getenv #else #define GETENV getenv #endif #include <process.h> #ifndef OMITBAD void CWE78_OS_Command_Injection__char_environment_w32_spawnv_16_bad() { char * data; char dataBuffer[100] = COMMAND_ARG2; data = dataBuffer; while(1) { { /* Append input from an environment variable to data */ size_t dataLen = strlen(data); char * environment = GETENV(ENV_VARIABLE); /* If there is data in the environment variable */ if (environment != NULL) { /* POTENTIAL FLAW: Read data from an environment variable */ strncat(data+dataLen, environment, 100-dataLen-1); } } break; } { char *args[] = {COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL}; /* spawnv - specify the path where the command is located */ /* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */ _spawnv(_P_WAIT, COMMAND_INT_PATH, args); } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B() - use goodsource and badsink by changing the conditions on the while statements */ static void goodG2B() { char * data; char dataBuffer[100] = COMMAND_ARG2; data = dataBuffer; while(1) { /* FIX: Append a fixed string to data (not user / external input) */ strcat(data, "*.*"); break; } { char *args[] = {COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL}; /* spawnv - specify the path where the command is located */ /* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */ _spawnv(_P_WAIT, COMMAND_INT_PATH, args); } } void CWE78_OS_Command_Injection__char_environment_w32_spawnv_16_good() { goodG2B(); } #endif /* OMITGOOD */ /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); CWE78_OS_Command_Injection__char_environment_w32_spawnv_16_good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); CWE78_OS_Command_Injection__char_environment_w32_spawnv_16_bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
{ "content_hash": "b6cb8ee81f2b669155e1fe998fcba909", "timestamp": "", "source": "github", "line_count": 132, "max_line_length": 108, "avg_line_length": 28.363636363636363, "alnum_prop": 0.6316773504273504, "repo_name": "JianpingZeng/xcc", "id": "fbd0326ea026f9c3d59c2f27c2bd0b9ab93a07f1", "size": "3744", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xcc/test/juliet/testcases/CWE78_OS_Command_Injection/s03/CWE78_OS_Command_Injection__char_environment_w32_spawnv_16.c", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<?php namespace Go\Aop; /** * Interface-enumeration of framework features to use in checking and configuration */ interface Features { /** * Enables interception of system function. * By default this feature is disabled, because this option is very expensive. */ const INTERCEPT_FUNCTIONS = 1; /** * Enables interception of "new" operator in the source code * By default this feature is disabled, because it's very tricky */ const INTERCEPT_INITIALIZATIONS = 2; /** * Enables interception of "include"/"require" operations in legacy code * By default this feature is disabled, because only composer should be used */ const INTERCEPT_INCLUDES = 4; /** * Enables usage of splat '...' operator, available since PHP5.6 */ const USE_SPLAT_OPERATOR = 32; /** * Do not check the cache presence and assume that cache is already prepared * * This flag is usable for read-only file systems (GAE, phar, etc) */ const PREBUILT_CACHE = 64; }
{ "content_hash": "1d09539cb206aafdcfa9d14aec10b447", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 83, "avg_line_length": 26.325, "alnum_prop": 0.6562203228869895, "repo_name": "ayqy/aop", "id": "a04e6e4760230aea2372c9d33da95d4797be399b", "size": "1271", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "vendor/goaop/framework/src/Aop/Features.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "9682" } ], "symlink_target": "" }
const assert = require('assert'); const {spawn} = require('child_process'); const {request} = require('gaxios'); const uuid = require('uuid'); const waitPort = require('wait-port'); // [END functions_pubsub_integration_test] describe('functions_helloworld_pubsub integration test', () => { // [START functions_pubsub_integration_test] it('helloPubSub: should print a name', async () => { const name = uuid.v4(); const PORT = 8088; // Each running framework instance needs a unique port const encodedName = Buffer.from(name).toString('base64'); const pubsubMessage = {data: {data: encodedName}}; const ffProc = spawn('npx', [ 'functions-framework', '--target', 'helloPubSub', '--signature-type', 'event', '--port', PORT, ]); try { const ffProcHandler = new Promise((resolve, reject) => { let stdout = ''; let stderr = ''; ffProc.stdout.on('data', data => (stdout += data)); ffProc.stderr.on('data', data => (stderr += data)); ffProc.on('exit', code => { if (code === 0 || code === null) { // code === null corresponds to a signal-kill // (which doesn't necessarily indicate a test failure) resolve(stdout); } else { stderr = `Error code: ${code}\n${stderr}`; reject(new Error(stderr)); } }); }); await waitPort({host: 'localhost', port: PORT}); // Send HTTP request simulating Pub/Sub message // (GCF translates Pub/Sub messages to HTTP requests internally) const response = await request({ url: `http://localhost:${PORT}/`, method: 'POST', data: pubsubMessage, }); ffProc.kill(); assert.strictEqual(response.status, 204); // Wait for the functions framework to stop const stdout = await ffProcHandler; assert.match(stdout, new RegExp(`Hello, ${name}!`)); } catch { if (ffProc) { // Make sure the functions framework is stopped ffProc.kill(); } } }); // [END functions_pubsub_integration_test] it('helloPubSub: should print hello world', async () => { const pubsubMessage = {data: {}}; const PORT = 8089; // Each running framework instance needs a unique port const ffProc = spawn('npx', [ 'functions-framework', '--target', 'helloPubSub', '--signature-type', 'event', '--port', PORT, ]); try { const ffProcHandler = new Promise((resolve, reject) => { let stdout = ''; let stderr = ''; ffProc.stdout.on('data', data => (stdout += data)); ffProc.stderr.on('data', data => (stderr += data)); ffProc.on('error', reject); ffProc.on('exit', code => { if (code === 0 || code === null) { // code === null corresponds to a signal-kill // (which doesn't necessarily indicate a test failure) resolve(stdout); } else { stderr = `Error code: ${code}\n${stderr}`; reject(new Error(stderr)); } }); }); await waitPort({host: 'localhost', port: PORT}); // Send HTTP request simulating Pub/Sub message // (GCF translates Pub/Sub messages to HTTP requests internally) const response = await request({ url: `http://localhost:${PORT}/`, method: 'POST', data: pubsubMessage, }); ffProc.kill(); assert.strictEqual(response.status, 204); // Wait for functions-framework process to exit const stdout = await ffProcHandler; assert.match(stdout, /Hello, World!/); } catch { if (ffProc) { // Make sure the functions framework is stopped ffProc.kill(); } } }); // [START functions_pubsub_integration_test] }); // [END functions_pubsub_integration_test]
{ "content_hash": "ac9c7dc1b207cc962dee9271e8440c26", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 77, "avg_line_length": 31.44, "alnum_prop": 0.5615776081424937, "repo_name": "GoogleCloudPlatform/nodejs-docs-samples", "id": "cc51ece71365642f6da3ec85ddc4a554901cbb52", "size": "4564", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "functions/helloworld/helloPubSub/test/sample.integration.pubsub.test.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2935" }, { "name": "Dockerfile", "bytes": "20956" }, { "name": "HTML", "bytes": "13643" }, { "name": "JavaScript", "bytes": "3173432" }, { "name": "PowerShell", "bytes": "2187" }, { "name": "Procfile", "bytes": "153" }, { "name": "Pug", "bytes": "11328" }, { "name": "Shell", "bytes": "50762" }, { "name": "TypeScript", "bytes": "884" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_91) on Tue Dec 29 12:44:17 AEDT 2015 --> <title>org.bouncycastle.asn1.isismtt.x509 (Bouncy Castle Library 1.54 API Specification)</title> <meta name="date" content="2015-12-29"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.bouncycastle.asn1.isismtt.x509 (Bouncy Castle Library 1.54 API Specification)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage"><em><b>Bouncy Castle Cryptography Library 1.54</b></em></div> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/bouncycastle/asn1/isismtt/ocsp/package-summary.html">Prev Package</a></li> <li><a href="../../../../../org/bouncycastle/asn1/iso/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/bouncycastle/asn1/isismtt/x509/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;org.bouncycastle.asn1.isismtt.x509</h1> <div class="docSummary"> <div class="block">Support classes for the ISIS-MTT X.509 Certificate Extensions.</div> </div> <p>See:&nbsp;<a href="#package_description">Description</a></p> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/AdditionalInformationSyntax.html" title="class in org.bouncycastle.asn1.isismtt.x509">AdditionalInformationSyntax</a></td> <td class="colLast"> <div class="block">Some other information of non-restrictive nature regarding the usage of this certificate.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/Admissions.html" title="class in org.bouncycastle.asn1.isismtt.x509">Admissions</a></td> <td class="colLast"> <div class="block">An Admissions structure.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/AdmissionSyntax.html" title="class in org.bouncycastle.asn1.isismtt.x509">AdmissionSyntax</a></td> <td class="colLast"> <div class="block">Attribute to indicate admissions to certain professions.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/DeclarationOfMajority.html" title="class in org.bouncycastle.asn1.isismtt.x509">DeclarationOfMajority</a></td> <td class="colLast"> <div class="block">A declaration of majority.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/MonetaryLimit.html" title="class in org.bouncycastle.asn1.isismtt.x509">MonetaryLimit</a></td> <td class="colLast"> <div class="block">Monetary limit for transactions.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/NamingAuthority.html" title="class in org.bouncycastle.asn1.isismtt.x509">NamingAuthority</a></td> <td class="colLast"> <div class="block">Names of authorities which are responsible for the administration of title registers.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/ProcurationSyntax.html" title="class in org.bouncycastle.asn1.isismtt.x509">ProcurationSyntax</a></td> <td class="colLast"> <div class="block">Attribute to indicate that the certificate holder may sign in the name of a third person.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/ProfessionInfo.html" title="class in org.bouncycastle.asn1.isismtt.x509">ProfessionInfo</a></td> <td class="colLast"> <div class="block">Professions, specializations, disciplines, fields of activity, etc.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../../org/bouncycastle/asn1/isismtt/x509/Restriction.html" title="class in org.bouncycastle.asn1.isismtt.x509">Restriction</a></td> <td class="colLast"> <div class="block">Some other restriction regarding the usage of this certificate.</div> </td> </tr> </tbody> </table> </li> </ul> <a name="package_description"> <!-- --> </a> <h2 title="Package org.bouncycastle.asn1.isismtt.x509 Description">Package org.bouncycastle.asn1.isismtt.x509 Description</h2> <div class="block">Support classes for the ISIS-MTT X.509 Certificate Extensions.</div> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage"><em><b>Bouncy Castle Cryptography Library 1.54</b></em></div> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/bouncycastle/asn1/isismtt/ocsp/package-summary.html">Prev Package</a></li> <li><a href="../../../../../org/bouncycastle/asn1/iso/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/bouncycastle/asn1/isismtt/x509/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
{ "content_hash": "19e9f9970ce9f2f582b1e1de955760c0", "timestamp": "", "source": "github", "line_count": 195, "max_line_length": 202, "avg_line_length": 41.261538461538464, "alnum_prop": 0.6624409644543873, "repo_name": "GaloisInc/hacrypto", "id": "fdfbdd8585e297bd6e88b3e90bf8143a6d5937aa", "size": "8046", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Java/BouncyCastle/BouncyCastle-1.54/lcrypto-jdk15on-154/javadoc/org/bouncycastle/asn1/isismtt/x509/package-summary.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "AGS Script", "bytes": "62991" }, { "name": "Ada", "bytes": "443" }, { "name": "AppleScript", "bytes": "4518" }, { "name": "Assembly", "bytes": "25398957" }, { "name": "Awk", "bytes": "36188" }, { "name": "Batchfile", "bytes": "530568" }, { "name": "C", "bytes": "344517599" }, { "name": "C#", "bytes": "7553169" }, { "name": "C++", "bytes": "36635617" }, { "name": "CMake", "bytes": "213895" }, { "name": "CSS", "bytes": "139462" }, { "name": "Coq", "bytes": "320964" }, { "name": "Cuda", "bytes": "103316" }, { "name": "DIGITAL Command Language", "bytes": "1545539" }, { "name": "DTrace", "bytes": "33228" }, { "name": "Emacs Lisp", "bytes": "22827" }, { "name": "GDB", "bytes": "93449" }, { "name": "Gnuplot", "bytes": "7195" }, { "name": "Go", "bytes": "393057" }, { "name": "HTML", "bytes": "41466430" }, { "name": "Hack", "bytes": "22842" }, { "name": "Haskell", "bytes": "64053" }, { "name": "IDL", "bytes": "3205" }, { "name": "Java", "bytes": "49060925" }, { "name": "JavaScript", "bytes": "3476841" }, { "name": "Jolie", "bytes": "412" }, { "name": "Lex", "bytes": "26290" }, { "name": "Logos", "bytes": "108920" }, { "name": "Lua", "bytes": "427" }, { "name": "M4", "bytes": "2508986" }, { "name": "Makefile", "bytes": "29393197" }, { "name": "Mathematica", "bytes": "48978" }, { "name": "Mercury", "bytes": "2053" }, { "name": "Module Management System", "bytes": "1313" }, { "name": "NSIS", "bytes": "19051" }, { "name": "OCaml", "bytes": "981255" }, { "name": "Objective-C", "bytes": "4099236" }, { "name": "Objective-C++", "bytes": "243505" }, { "name": "PHP", "bytes": "22677635" }, { "name": "Pascal", "bytes": "99565" }, { "name": "Perl", "bytes": "35079773" }, { "name": "Prolog", "bytes": "350124" }, { "name": "Python", "bytes": "1242241" }, { "name": "Rebol", "bytes": "106436" }, { "name": "Roff", "bytes": "16457446" }, { "name": "Ruby", "bytes": "49694" }, { "name": "Scheme", "bytes": "138999" }, { "name": "Shell", "bytes": "10192290" }, { "name": "Smalltalk", "bytes": "22630" }, { "name": "Smarty", "bytes": "51246" }, { "name": "SourcePawn", "bytes": "542790" }, { "name": "SystemVerilog", "bytes": "95379" }, { "name": "Tcl", "bytes": "35696" }, { "name": "TeX", "bytes": "2351627" }, { "name": "Verilog", "bytes": "91541" }, { "name": "Visual Basic", "bytes": "88541" }, { "name": "XS", "bytes": "38300" }, { "name": "Yacc", "bytes": "132970" }, { "name": "eC", "bytes": "33673" }, { "name": "q", "bytes": "145272" }, { "name": "sed", "bytes": "1196" } ], "symlink_target": "" }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef __MASTER_HPP__ #define __MASTER_HPP__ #include <stdint.h> #include <list> #include <memory> #include <set> #include <string> #include <vector> #include <mesos/mesos.hpp> #include <mesos/resources.hpp> #include <mesos/type_utils.hpp> #include <mesos/maintenance/maintenance.hpp> #include <mesos/allocator/allocator.hpp> #include <mesos/master/contender.hpp> #include <mesos/master/detector.hpp> #include <mesos/master/master.hpp> #include <mesos/module/authenticator.hpp> #include <mesos/quota/quota.hpp> #include <mesos/scheduler/scheduler.hpp> #include <process/collect.hpp> #include <process/future.hpp> #include <process/limiter.hpp> #include <process/http.hpp> #include <process/owned.hpp> #include <process/process.hpp> #include <process/protobuf.hpp> #include <process/timer.hpp> #include <process/metrics/counter.hpp> #include <stout/boundedhashmap.hpp> #include <stout/cache.hpp> #include <stout/circular_buffer.hpp> #include <stout/foreach.hpp> #include <stout/hashmap.hpp> #include <stout/hashset.hpp> #include <stout/linkedhashmap.hpp> #include <stout/multihashmap.hpp> #include <stout/nothing.hpp> #include <stout/option.hpp> #include <stout/recordio.hpp> #include <stout/try.hpp> #include <stout/uuid.hpp> #include "common/heartbeater.hpp" #include "common/http.hpp" #include "common/resources_utils.hpp" #include "files/files.hpp" #include "internal/devolve.hpp" #include "internal/evolve.hpp" #include "master/constants.hpp" #include "master/flags.hpp" #include "master/machine.hpp" #include "master/metrics.hpp" #include "master/validation.hpp" #include "messages/messages.hpp" namespace process { class RateLimiter; // Forward declaration. } namespace mesos { // Forward declarations. class Authorizer; class ObjectApprovers; namespace internal { // Forward declarations. namespace registry { class Slaves; } class Registry; class WhitelistWatcher; namespace master { class Master; class Registrar; class SlaveObserver; struct BoundedRateLimiter; struct Framework; struct Role; struct Slave { Slave(Master* const _master, SlaveInfo _info, const process::UPID& _pid, const MachineID& _machineId, const std::string& _version, std::vector<SlaveInfo::Capability> _capabilites, const process::Time& _registeredTime, std::vector<Resource> _checkpointedResources, const Option<UUID>& _resourceVersion, std::vector<ExecutorInfo> executorInfos = std::vector<ExecutorInfo>(), std::vector<Task> tasks = std::vector<Task>()); ~Slave(); Task* getTask( const FrameworkID& frameworkId, const TaskID& taskId) const; void addTask(Task* task); // Update slave to recover the resources that were previously // being used by `task`. // // TODO(bmahler): This is a hack for performance. We need to // maintain resource counters because computing task resources // functionally for all tasks is expensive, for now. void recoverResources(Task* task); void removeTask(Task* task); void addOperation(Operation* operation); void recoverResources(Operation* operation); void removeOperation(Operation* operation); // Marks a non-speculative operation as an orphan when the originating // framework is torn down by the master, or when an agent reregisters // with operations from unknown frameworks. If the operation is // non-terminal, this has the side effect of modifying the agent's // total resources, and should therefore be followed by // `allocator->updateSlave()`. void markOperationAsOrphan(Operation* operation); Operation* getOperation(const UUID& uuid) const; void addOffer(Offer* offer); void removeOffer(Offer* offer); void addInverseOffer(InverseOffer* inverseOffer); void removeInverseOffer(InverseOffer* inverseOffer); bool hasExecutor( const FrameworkID& frameworkId, const ExecutorID& executorId) const; void addExecutor( const FrameworkID& frameworkId, const ExecutorInfo& executorInfo); void removeExecutor( const FrameworkID& frameworkId, const ExecutorID& executorId); void apply(const std::vector<ResourceConversion>& conversions); Try<Nothing> update( const SlaveInfo& info, const std::string& _version, const std::vector<SlaveInfo::Capability>& _capabilites, const Resources& _checkpointedResources, const Option<UUID>& resourceVersion); Master* const master; const SlaveID id; SlaveInfo info; const MachineID machineId; process::UPID pid; // TODO(bmahler): Use stout's Version when it can parse labels, etc. std::string version; // Agent capabilities. protobuf::slave::Capabilities capabilities; process::Time registeredTime; Option<process::Time> reregisteredTime; // Slave becomes disconnected when the socket closes. bool connected; // Slave becomes deactivated when it gets disconnected. In the // future this might also happen via HTTP endpoint. // No offers will be made for a deactivated slave. bool active; // Timer for marking slaves unreachable that become disconnected and // don't reregister. This timeout is larger than the slave // observer's timeout, so typically the slave observer will be the // one to mark such slaves unreachable; this timer is a backup for // when a slave responds to pings but does not reregister (e.g., // because agent recovery has hung). Option<process::Timer> reregistrationTimer; // Executors running on this slave. // // TODO(bmahler): Make this private to enforce that `addExecutor()` // and `removeExecutor()` are used, and provide a const view into // the executors. hashmap<FrameworkID, hashmap<ExecutorID, ExecutorInfo>> executors; // Tasks that have not yet been launched because they are currently // being authorized. This is similar to Framework's pendingTasks but we // track pendingTasks per agent separately to determine if any offer // operation for this agent would change resources requested by these tasks. hashmap<FrameworkID, hashmap<TaskID, TaskInfo>> pendingTasks; // Tasks present on this slave. // // TODO(bmahler): Make this private to enforce that `addTask()` and // `removeTask()` are used, and provide a const view into the tasks. // // TODO(bmahler): The task pointer ownership complexity arises from the fact // that we own the pointer here, but it's shared with the Framework struct. // We should find a way to eliminate this. hashmap<FrameworkID, hashmap<TaskID, Task*>> tasks; // Tasks that were asked to kill by frameworks. // This is used for reconciliation when the slave reregisters. multihashmap<FrameworkID, TaskID> killedTasks; // Pending operations or terminal operations that have // unacknowledged status updates on this agent. hashmap<UUID, Operation*> operations; // Pending operations whose originating framework is unknown. // These operations could be pending, or terminal with unacknowledged // status updates. // // This list can be populated whenever a framework is torn down in the // lifetime of the master, or when an agent reregisters with an operation. // // If the originating framework is completed, the master will // acknowledge any status updates instead of the framework. // If an orphan does not belong to a completed framework, the master // will only acknowledge status updates after a fixed delay. hashset<UUID> orphanedOperations; // Active offers on this slave. hashset<Offer*> offers; // Active inverse offers on this slave. hashset<InverseOffer*> inverseOffers; // Resources for active task / executors / operations. // Note that we maintain multiple copies of each shared resource in // `usedResources` as they are used by multiple tasks. hashmap<FrameworkID, Resources> usedResources; Resources offeredResources; // Offers. // Resources that should be checkpointed by the slave (e.g., // persistent volumes, dynamic reservations, etc). These are either // in use by a task/executor, or are available for use and will be // re-offered to the framework. // TODO(jieyu): `checkpointedResources` is only for agent default // resources. Resources from resource providers are not included in // this field. Consider removing this field. Resources checkpointedResources; // The current total resources of the slave. Note that this is // different from 'info.resources()' because this also considers // operations (e.g., CREATE, RESERVE) that have been applied and // includes revocable resources and resources from resource // providers as well. Resources totalResources; // Used to establish the relationship between the operation and the // resources that the operation is operating on. Each resource // provider will keep a resource version UUID, and change it when it // believes that the resources from this resource provider are out // of sync from the master's view. The master will keep track of // the last known resource version UUID for each resource provider, // and attach the resource version UUID in each operation it sends // out. The resource provider should reject operations that have a // different resource version UUID than that it maintains, because // this means the operation is operating on resources that might // have already been invalidated. Option<UUID> resourceVersion; SlaveObserver* observer; struct ResourceProvider { ResourceProviderInfo info; Resources totalResources; // Used to establish the relationship between the operation and the // resources that the operation is operating on. Each resource // provider will keep a resource version UUID, and change it when it // believes that the resources from this resource provider are out // of sync from the master's view. The master will keep track of // the last known resource version UUID for each resource provider, // and attach the resource version UUID in each operation it sends // out. The resource provider should reject operations that have a // different resource version UUID than that it maintains, because // this means the operation is operating on resources that might // have already been invalidated. UUID resourceVersion; // Pending operations or terminal operations that have // unacknowledged status updates. hashmap<UUID, Operation*> operations; }; hashmap<ResourceProviderID, ResourceProvider> resourceProviders; private: Slave(const Slave&); // No copying. Slave& operator=(const Slave&); // No assigning. }; inline std::ostream& operator<<(std::ostream& stream, const Slave& slave) { return stream << slave.id << " at " << slave.pid << " (" << slave.info.hostname() << ")"; } class Master : public ProtobufProcess<Master> { public: Master(mesos::allocator::Allocator* allocator, Registrar* registrar, Files* files, mesos::master::contender::MasterContender* contender, mesos::master::detector::MasterDetector* detector, const Option<Authorizer*>& authorizer, const Option<std::shared_ptr<process::RateLimiter>>& slaveRemovalLimiter, const Flags& flags = Flags()); ~Master() override; // Compare this master's capabilities with registry's minimum capability. // Return the set of capabilities missing from this master. static hashset<std::string> misingMinimumCapabilities( const MasterInfo& masterInfo, const Registry& registry); // Message handlers. void submitScheduler( const std::string& name); void registerFramework( const process::UPID& from, RegisterFrameworkMessage&& registerFrameworkMessage); void reregisterFramework( const process::UPID& from, ReregisterFrameworkMessage&& reregisterFrameworkMessage); void unregisterFramework( const process::UPID& from, const FrameworkID& frameworkId); void deactivateFramework( const process::UPID& from, const FrameworkID& frameworkId); // TODO(vinod): Remove this once the old driver is removed. void resourceRequest( const process::UPID& from, const FrameworkID& frameworkId, const std::vector<Request>& requests); void launchTasks( const process::UPID& from, LaunchTasksMessage&& launchTasksMessage); void reviveOffers( const process::UPID& from, const FrameworkID& frameworkId, const std::vector<std::string>& role); void killTask( const process::UPID& from, const FrameworkID& frameworkId, const TaskID& taskId); void statusUpdateAcknowledgement( const process::UPID& from, StatusUpdateAcknowledgementMessage&& statusUpdateAcknowledgementMessage); void schedulerMessage( const process::UPID& from, FrameworkToExecutorMessage&& frameworkToExecutorMessage); void executorMessage( const process::UPID& from, ExecutorToFrameworkMessage&& executorToFrameworkMessage); void registerSlave( const process::UPID& from, RegisterSlaveMessage&& registerSlaveMessage); void reregisterSlave( const process::UPID& from, ReregisterSlaveMessage&& incomingMessage); void unregisterSlave( const process::UPID& from, const SlaveID& slaveId); void statusUpdate( StatusUpdateMessage&& statusUpdateMessage); void reconcileTasks( const process::UPID& from, ReconcileTasksMessage&& reconcileTasksMessage); void updateOperationStatus( UpdateOperationStatusMessage&& update); void exitedExecutor( const process::UPID& from, const SlaveID& slaveId, const FrameworkID& frameworkId, const ExecutorID& executorId, int32_t status); void updateSlave(UpdateSlaveMessage&& message); void updateUnavailability( const MachineID& machineId, const Option<Unavailability>& unavailability); // Marks the agent unreachable and returns whether the agent was // marked unreachable. Returns false if the agent is already // in a transitioning state or has transitioned into another // state (this includes already being marked unreachable). // The `duringMasterFailover` parameter specifies whether this // agent is transitioning from a recovered state (true) or a // registered state (false). // // Discarding currently not supported. // // Will not return a failure (this will crash the master // internally in the case of a registry failure). process::Future<bool> markUnreachable( const SlaveInfo& slave, bool duringMasterFailover, const std::string& message); void markGone(const SlaveID& slaveId, const TimeInfo& goneTime); void authenticate( const process::UPID& from, const process::UPID& pid); // TODO(bmahler): It would be preferred to use a unique libprocess // Process identifier (PID is not sufficient) for identifying the // framework instance, rather than relying on re-registration time. void frameworkFailoverTimeout( const FrameworkID& frameworkId, const process::Time& reregisteredTime); void offer( const FrameworkID& frameworkId, const hashmap<std::string, hashmap<SlaveID, Resources>>& resources); void inverseOffer( const FrameworkID& frameworkId, const hashmap<SlaveID, UnavailableResources>& resources); // Invoked when there is a newly elected leading master. // Made public for testing purposes. void detected(const process::Future<Option<MasterInfo>>& _leader); // Invoked when the contender has lost the candidacy. // Made public for testing purposes. void lostCandidacy(const process::Future<Nothing>& lost); // Continuation of recover(). // Made public for testing purposes. process::Future<Nothing> _recover(const Registry& registry); MasterInfo info() const { return info_; } protected: void initialize() override; void finalize() override; void consume(process::MessageEvent&& event) override; void consume(process::ExitedEvent&& event) override; void exited(const process::UPID& pid) override; void exited( const FrameworkID& frameworkId, const StreamingHttpConnection<v1::scheduler::Event>& http); void _exited(Framework* framework); // Invoked upon noticing a subscriber disconnection. void exited(const id::UUID& id); void agentReregisterTimeout(const SlaveID& slaveId); Nothing _agentReregisterTimeout(const SlaveID& slaveId); // Invoked when the message is ready to be executed after // being throttled. // 'principal' being None indicates it is throttled by // 'defaultLimiter'. void throttled( process::MessageEvent&& event, const Option<std::string>& principal); // Continuations of consume(). void _consume(process::MessageEvent&& event); void _consume(process::ExitedEvent&& event); // Helper method invoked when the capacity for a framework // principal is exceeded. void exceededCapacity( const process::MessageEvent& event, const Option<std::string>& principal, uint64_t capacity); // Recovers state from the registrar. process::Future<Nothing> recover(); void recoveredSlavesTimeout(const Registry& registry); void _registerSlave( const process::UPID& pid, RegisterSlaveMessage&& registerSlaveMessage, const Option<process::http::authentication::Principal>& principal, const process::Future<bool>& authorized); void __registerSlave( const process::UPID& pid, RegisterSlaveMessage&& registerSlaveMessage, const process::Future<bool>& admit); void _reregisterSlave( const process::UPID& pid, ReregisterSlaveMessage&& incomingMessage, const Option<process::http::authentication::Principal>& principal, const process::Future<bool>& authorized); void __reregisterSlave( const process::UPID& pid, ReregisterSlaveMessage&& incomingMessage, const process::Future<bool>& readmit); void ___reregisterSlave( const process::UPID& pid, ReregisterSlaveMessage&& incomingMessage, const process::Future<bool>& updated); void updateSlaveFrameworks( Slave* slave, const std::vector<FrameworkInfo>& frameworks); // 'future' is the future returned by the authenticator. void _authenticate( const process::UPID& pid, const process::Future<Option<std::string>>& future); void authenticationTimeout(process::Future<Option<std::string>> future); void fileAttached(const process::Future<Nothing>& result, const std::string& path); // Invoked when the contender has entered the contest. void contended(const process::Future<process::Future<Nothing>>& candidacy); // When a slave that was previously registered with this master // reregisters, we need to reconcile the master's view of the // slave's tasks and executors. This function also sends the // `SlaveReregisteredMessage`. void reconcileKnownSlave( Slave* slave, const std::vector<ExecutorInfo>& executors, const std::vector<Task>& tasks); // Add a framework. void addFramework( Framework* framework, const std::set<std::string>& suppressedRoles); // Recover a framework from its `FrameworkInfo`. This happens after // master failover, when an agent running one of the framework's // tasks reregisters or when the framework itself reregisters, // whichever happens first. The result of this function is a // registered, inactive framework with state `RECOVERED`. void recoverFramework( const FrameworkInfo& info, const std::set<std::string>& suppressedRoles); // Transition a framework from `RECOVERED` to `CONNECTED` state and // activate it. This happens at most once after master failover, the // first time that the framework reregisters with the new master. // Exactly one of `newPid` or `http` must be provided. Try<Nothing> activateRecoveredFramework( Framework* framework, const FrameworkInfo& frameworkInfo, const Option<process::UPID>& pid, const Option<StreamingHttpConnection<v1::scheduler::Event>>& http, const std::set<std::string>& suppressedRoles); // Replace the scheduler for a framework with a new process ID, in // the event of a scheduler failover. void failoverFramework(Framework* framework, const process::UPID& newPid); // Replace the scheduler for a framework with a new HTTP connection, // in the event of a scheduler failover. void failoverFramework( Framework* framework, const StreamingHttpConnection<v1::scheduler::Event>& http); void _failoverFramework(Framework* framework); // Kill all of a framework's tasks, delete the framework object, and // reschedule offers that were assigned to this framework. void removeFramework(Framework* framework); // Remove a framework from the slave, i.e., remove its tasks and // executors and recover the resources. void removeFramework(Slave* slave, Framework* framework); void updateFramework( Framework* framework, const FrameworkInfo& frameworkInfo, const std::set<std::string>& suppressedRoles); void disconnect(Framework* framework); void deactivate(Framework* framework, bool rescind); void disconnect(Slave* slave); void deactivate(Slave* slave); // Add a slave. void addSlave( Slave* slave, std::vector<Archive::Framework>&& completedFrameworks); void _markUnreachable( const SlaveInfo& slave, const TimeInfo& unreachableTime, bool duringMasterFailover, const std::string& message, bool registrarResult); void sendSlaveLost(const SlaveInfo& slaveInfo); // Remove the slave from the registrar and from the master's state. // // TODO(bmahler): 'reason' is optional until MESOS-2317 is resolved. void removeSlave( Slave* slave, const std::string& message, Option<process::metrics::Counter> reason = None()); // Removes an agent from the master's state in the following cases: // * When maintenance is started on an agent // * When an agent registers with a new ID from a previously-known IP + port // * When an agent unregisters itself with an `UnregisterSlaveMessage` void _removeSlave( Slave* slave, const process::Future<bool>& registrarResult, const std::string& removalCause, Option<process::metrics::Counter> reason = None()); // Removes an agent from the master's state in the following cases: // * When marking an agent unreachable // * When marking an agent gone // // NOTE that in spite of the name `__removeSlave()`, this function is NOT a // continuation of `_removeSlave()`. Rather, these two functions perform // similar logic for slightly different cases. // // TODO(greggomann): refactor `_removeSlave` and `__removeSlave` into a single // common helper function. (See MESOS-9550) void __removeSlave( Slave* slave, const std::string& message, const Option<TimeInfo>& unreachableTime); // Validates that the framework is authenticated, if required. Option<Error> validateFrameworkAuthentication( const FrameworkInfo& frameworkInfo, const process::UPID& from); // Returns whether the framework is authorized. // Returns failure for transient authorization failures. process::Future<bool> authorizeFramework( const FrameworkInfo& frameworkInfo); // Returns whether the principal is authorized to (re-)register an agent // and whether the `SlaveInfo` is authorized. process::Future<bool> authorizeSlave( const SlaveInfo& slaveInfo, const Option<process::http::authentication::Principal>& principal); // Returns whether the task is authorized. // Returns failure for transient authorization failures. process::Future<bool> authorizeTask( const TaskInfo& task, Framework* framework); /** * Authorizes a `RESERVE` operation. * * Returns whether the Reserve operation is authorized with the * provided principal. This function is used for authorization of * operations originating from both frameworks and operators. Note * that operations may be validated AFTER authorization, so it's * possible that `reserve` could be malformed. * * @param reserve The `RESERVE` operation to be performed. * @param principal An `Option` containing the principal attempting * this operation. * * @return A `Future` containing a boolean value representing the * success or failure of this authorization. A failed `Future` * implies that validation of the operation did not succeed. */ process::Future<bool> authorizeReserveResources( const Offer::Operation::Reserve& reserve, const Option<process::http::authentication::Principal>& principal); // Authorizes whether the provided `principal` is allowed to reserve // the specified `resources`. process::Future<bool> authorizeReserveResources( const Resources& resources, const Option<process::http::authentication::Principal>& principal); /** * Authorizes an `UNRESERVE` operation. * * Returns whether the Unreserve operation is authorized with the * provided principal. This function is used for authorization of * operations originating both from frameworks and operators. Note * that operations may be validated AFTER authorization, so it's * possible that `unreserve` could be malformed. * * @param unreserve The `UNRESERVE` operation to be performed. * @param principal An `Option` containing the principal attempting * this operation. * * @return A `Future` containing a boolean value representing the * success or failure of this authorization. A failed `Future` * implies that validation of the operation did not succeed. */ process::Future<bool> authorizeUnreserveResources( const Offer::Operation::Unreserve& unreserve, const Option<process::http::authentication::Principal>& principal); /** * Authorizes a `CREATE` operation. * * Returns whether the Create operation is authorized with the provided * principal. This function is used for authorization of operations * originating both from frameworks and operators. Note that operations may be * validated AFTER authorization, so it's possible that `create` could be * malformed. * * @param create The `CREATE` operation to be performed. * @param principal An `Option` containing the principal attempting this * operation. * * @return A `Future` containing a boolean value representing the success or * failure of this authorization. A failed `Future` implies that * validation of the operation did not succeed. */ process::Future<bool> authorizeCreateVolume( const Offer::Operation::Create& create, const Option<process::http::authentication::Principal>& principal); /** * Authorizes a `DESTROY` operation. * * Returns whether the Destroy operation is authorized with the provided * principal. This function is used for authorization of operations * originating both from frameworks and operators. Note that operations may be * validated AFTER authorization, so it's possible that `destroy` could be * malformed. * * @param destroy The `DESTROY` operation to be performed. * @param principal An `Option` containing the principal attempting this * operation. * * @return A `Future` containing a boolean value representing the success or * failure of this authorization. A failed `Future` implies that * validation of the operation did not succeed. */ process::Future<bool> authorizeDestroyVolume( const Offer::Operation::Destroy& destroy, const Option<process::http::authentication::Principal>& principal); /** * Authorizes resize of a volume triggered by either `GROW_VOLUME` or * `SHRINK_VOLUME` operations. * * Returns whether the triggering operation is authorized with the provided * principal. This function is used for authorization of operations * originating both from frameworks and operators. Note that operations may be * validated AFTER authorization, so it's possible that the operation could be * malformed. * * @param volume The volume being resized. * @param principal An `Option` containing the principal attempting this * operation. * * @return A `Future` containing a boolean value representing the success or * failure of this authorization. A failed `Future` implies that * validation of the operation did not succeed. */ process::Future<bool> authorizeResizeVolume( const Resource& volume, const Option<process::http::authentication::Principal>& principal); /** * Authorizes a `CREATE_DISK` operation. * * Returns whether the `CREATE_DISK` operation is authorized with the * provided principal. This function is used for authorization of operations * originating from frameworks. Note that operations may be validated AFTER * authorization, so it's possible that the operation could be malformed. * * @param createDisk The `CREATE_DISK` operation to be performed. * @param principal An `Option` containing the principal attempting this * operation. * * @return A `Future` containing a boolean value representing the success or * failure of this authorization. A failed `Future` implies that * validation of the operation did not succeed. */ process::Future<bool> authorizeCreateDisk( const Offer::Operation::CreateDisk& createDisk, const Option<process::http::authentication::Principal>& principal); /** * Authorizes a `DESTROY_DISK` operation. * * Returns whether the `DESTROY_DISK` operation is authorized with the * provided principal. This function is used for authorization of operations * originating from frameworks. Note that operations may be validated AFTER * authorization, so it's possible that the operation could be malformed. * * @param destroyDisk The `DESTROY_DISK` operation to be performed. * @param principal An `Option` containing the principal attempting this * operation. * * @return A `Future` containing a boolean value representing the success or * failure of this authorization. A failed `Future` implies that * validation of the operation did not succeed. */ process::Future<bool> authorizeDestroyDisk( const Offer::Operation::DestroyDisk& destroyDisk, const Option<process::http::authentication::Principal>& principal); // Determine if a new executor needs to be launched. bool isLaunchExecutor ( const ExecutorID& executorId, Framework* framework, Slave* slave) const; // Add executor to the framework and slave. void addExecutor( const ExecutorInfo& executorInfo, Framework* framework, Slave* slave); // Add task to the framework and slave. void addTask(const TaskInfo& task, Framework* framework, Slave* slave); // Transitions the task, and recovers resources if the task becomes // terminal. void updateTask(Task* task, const StatusUpdate& update); // Removes the task. `unreachable` indicates whether the task is removed due // to being unreachable. Note that we cannot rely on the task state because // it may not reflect unreachability due to being set to TASK_LOST for // backwards compatibility. void removeTask(Task* task, bool unreachable = false); // Remove an executor and recover its resources. void removeExecutor( Slave* slave, const FrameworkID& frameworkId, const ExecutorID& executorId); // Adds the given operation to the framework and the agent. void addOperation( Framework* framework, Slave* slave, Operation* operation); // Transitions the operation, and updates and recovers resources if // the operation becomes terminal. If `convertResources` is `false` // only the consumed resources of terminal operations are recovered, // but no resources are converted. void updateOperation( Operation* operation, const UpdateOperationStatusMessage& update, bool convertResources = true); // Remove the operation. void removeOperation(Operation* operation); // Send operation update for all operations on the agent. void sendBulkOperationFeedback( Slave* slave, OperationState operationState, const std::string& message); // Attempts to update the allocator by applying the given operation. // If successful, updates the slave's resources, sends a // 'CheckpointResourcesMessage' to the slave with the updated // checkpointed resources, and returns a 'Future' with 'Nothing'. // Otherwise, no action is taken and returns a failed 'Future'. process::Future<Nothing> apply( Slave* slave, const Offer::Operation& operation); // Forwards the update to the framework. void forward( const StatusUpdate& update, const process::UPID& acknowledgee, Framework* framework); // Remove an offer after specified timeout void offerTimeout(const OfferID& offerId); // Remove an offer and optionally rescind the offer as well. void removeOffer(Offer* offer, bool rescind = false); // Remove an inverse offer after specified timeout void inverseOfferTimeout(const OfferID& inverseOfferId); // Remove an inverse offer and optionally rescind it as well. void removeInverseOffer(InverseOffer* inverseOffer, bool rescind = false); bool isCompletedFramework(const FrameworkID& frameworkId) const; Framework* getFramework(const FrameworkID& frameworkId) const; Offer* getOffer(const OfferID& offerId) const; InverseOffer* getInverseOffer(const OfferID& inverseOfferId) const; FrameworkID newFrameworkId(); OfferID newOfferId(); SlaveID newSlaveId(); private: // Updates the agent's resources by applying the given operation. // Sends either `ApplyOperationMessage` or // `CheckpointResourcesMessage` (with updated checkpointed // resources) to the agent depending on if the agent has // `RESOURCE_PROVIDER` capability. void _apply( Slave* slave, Framework* framework, const Offer::Operation& operationInfo); void drop( const process::UPID& from, const mesos::scheduler::Call& call, const std::string& message); void drop( Framework* framework, const Offer::Operation& operation, const std::string& message); void drop( Framework* framework, const mesos::scheduler::Call& call, const std::string& message); void drop( Framework* framework, const mesos::scheduler::Call::Suppress& suppress, const std::string& message); void drop( Framework* framework, const mesos::scheduler::Call::Revive& revive, const std::string& message); // Call handlers. void receive( const process::UPID& from, mesos::scheduler::Call&& call); void subscribe( StreamingHttpConnection<v1::scheduler::Event> http, const mesos::scheduler::Call::Subscribe& subscribe); void _subscribe( StreamingHttpConnection<v1::scheduler::Event> http, const FrameworkInfo& frameworkInfo, bool force, const std::set<std::string>& suppressedRoles, const process::Future<bool>& authorized); void subscribe( const process::UPID& from, const mesos::scheduler::Call::Subscribe& subscribe); void _subscribe( const process::UPID& from, const FrameworkInfo& frameworkInfo, bool force, const std::set<std::string>& suppressedRoles, const process::Future<bool>& authorized); // Subscribes a client to the 'api/vX' endpoint. void subscribe( const StreamingHttpConnection<v1::master::Event>& http, const Option<process::http::authentication::Principal>& principal); void teardown(Framework* framework); void accept( Framework* framework, mesos::scheduler::Call::Accept&& accept); void _accept( const FrameworkID& frameworkId, const SlaveID& slaveId, const Resources& offeredResources, mesos::scheduler::Call::Accept&& accept, const process::Future< std::vector<process::Future<bool>>>& authorizations); void acceptInverseOffers( Framework* framework, const mesos::scheduler::Call::AcceptInverseOffers& accept); void decline( Framework* framework, mesos::scheduler::Call::Decline&& decline); void declineInverseOffers( Framework* framework, const mesos::scheduler::Call::DeclineInverseOffers& decline); void revive( Framework* framework, const mesos::scheduler::Call::Revive& revive); void kill( Framework* framework, const mesos::scheduler::Call::Kill& kill); void shutdown( Framework* framework, const mesos::scheduler::Call::Shutdown& shutdown); void acknowledge( Framework* framework, mesos::scheduler::Call::Acknowledge&& acknowledge); void acknowledgeOperationStatus( Framework* framework, mesos::scheduler::Call::AcknowledgeOperationStatus&& acknowledge); void reconcile( Framework* framework, mesos::scheduler::Call::Reconcile&& reconcile); void reconcileOperations( Framework* framework, mesos::scheduler::Call::ReconcileOperations&& reconcile); void message( Framework* framework, mesos::scheduler::Call::Message&& message); void request( Framework* framework, const mesos::scheduler::Call::Request& request); void suppress( Framework* framework, const mesos::scheduler::Call::Suppress& suppress); bool elected() const { return leader.isSome() && leader.get() == info_; } void scheduleRegistryGc(); void doRegistryGc(); void _doRegistryGc( const hashset<SlaveID>& toRemoveUnreachable, const hashset<SlaveID>& toRemoveGone, const process::Future<bool>& registrarResult); std::vector<std::string> filterRoles( const process::Owned<ObjectApprovers>& approvers) const; /** * Returns whether the given role is on the whitelist. * * When using explicit roles, this consults the configured (static) * role whitelist. When using implicit roles, any role is allowed * (and access control is done via ACLs). */ bool isWhitelistedRole(const std::string& name) const; // Validates subscription prerequisites common // both for HTTP and driver-based frameworks. Option<Error> validateFrameworkSubscription( const scheduler::Call::Subscribe&) const; /** * Inner class used to namespace the handling of quota requests. * * It operates inside the Master actor. It is responsible for validating * and persisting quota requests, and exposing quota status. * @see master/quota_handler.cpp for implementations. */ class QuotaHandler { public: explicit QuotaHandler(Master* _master) : master(_master) { CHECK_NOTNULL(master); } // Returns a list of set quotas. process::Future<process::http::Response> status( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> status( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> update( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> set( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> set( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> remove( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> remove( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; private: // Returns an error if the total quota guarantees overcommits // the cluster. This is not a quota satisfiability check: it's // possible that quota is unsatisfiable even if the quota // does not overcommit the cluster. // Returns an error if the total quota guarantees overcommits // the cluster. This is not a quota satisfiability check: it's // possible that quota is unsatisfiable even if the quota // does not overcommit the cluster. Specifically, we verify that // the following inequality holds: // // total cluster capacity >= total quota w/ quota request applied // // Note, total cluster capacity accounts resources of all the // registered agents, including resources from resource providers // as well as reservations (both static and dynamic ones). static Option<Error> overcommitCheck( const std::vector<Resources>& agents, const hashmap<std::string, Quota>& quotas, const mesos::quota::QuotaInfo& request); // We always want to rescind offers after the capacity heuristic. The // reason for this is the race between the allocator and the master: // it can happen that there are not enough free resources at the // allocator's disposal when it is notified about the quota request, // but at this point it's too late to rescind. // // While rescinding, we adhere to the following rules: // * Rescind at least as many resources as there are in the quota request. // * Rescind all offers from an agent in order to make the potential // offer bigger, which increases the chances that a quota'ed framework // will be able to use the offer. // * Rescind offers from at least `numF` agents to make it possible // (but not guaranteed, due to fair sharing) that each framework in // the role for which quota is set gets an offer (`numF` is the // number of frameworks in the quota'ed role). Though this is not // strictly necessary, we think this will increase the debugability // and will improve user experience. // // TODO(alexr): Consider removing this function once offer management // (including rescinding) is moved to allocator. void rescindOffers(const mesos::quota::QuotaInfo& request) const; process::Future<bool> authorizeGetQuota( const Option<process::http::authentication::Principal>& principal, const mesos::quota::QuotaInfo& quotaInfo) const; process::Future<bool> authorizeUpdateQuota( const Option<process::http::authentication::Principal>& principal, const mesos::quota::QuotaInfo& quotaInfo) const; process::Future<mesos::quota::QuotaStatus> _status( const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> _set( const mesos::quota::QuotaRequest& quotaRequest, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> __set( const mesos::quota::QuotaInfo& quotaInfo, bool forced) const; process::Future<process::http::Response> _remove( const std::string& role, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> __remove( const std::string& role) const; // To perform actions related to quota management, we require access to the // master data structures. No synchronization primitives are needed here // since `QuotaHandler`'s functions are invoked in the Master's actor. Master* master; }; /** * Inner class used to namespace the handling of /weights requests. * * It operates inside the Master actor. It is responsible for validating * and persisting /weights requests. * @see master/weights_handler.cpp for implementations. */ class WeightsHandler { public: explicit WeightsHandler(Master* _master) : master(_master) { CHECK_NOTNULL(master); } process::Future<process::http::Response> get( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> get( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> update( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> update( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; private: process::Future<bool> authorizeGetWeight( const Option<process::http::authentication::Principal>& principal, const WeightInfo& weight) const; process::Future<bool> authorizeUpdateWeights( const Option<process::http::authentication::Principal>& principal, const std::vector<std::string>& roles) const; process::Future<std::vector<WeightInfo>> _filterWeights( const std::vector<WeightInfo>& weightInfos, const std::vector<bool>& roleAuthorizations) const; process::Future<std::vector<WeightInfo>> _getWeights( const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response>_updateWeights( const Option<process::http::authentication::Principal>& principal, const google::protobuf::RepeatedPtrField<WeightInfo>& weightInfos) const; process::Future<process::http::Response> __updateWeights( const std::vector<WeightInfo>& weightInfos) const; // Rescind all outstanding offers if any of the 'weightInfos' roles has // an active framework. void rescindOffers(const std::vector<WeightInfo>& weightInfos) const; Master* master; }; public: // Inner class used to namespace HTTP handlers that do not change the // underlying master object. // // Endpoints served by this handler are only permitted to depend on // the request query parameters and the authorization filters to // make caching of responses possible. // // NOTE: Most member functions of this class are not routed directly but // dispatched from their corresponding handlers in the outer `Http` class. // This is because deciding whether an incoming request is read-only often // requires some inspection, e.g. distinguishing between "GET" and "POST" // requests to the same endpoint. class ReadOnlyHandler { public: explicit ReadOnlyHandler(const Master* _master) : master(_master) {} // /frameworks process::http::Response frameworks( const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; // /roles process::http::Response roles( const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; // /slaves process::http::Response slaves( const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; // /state process::http::Response state( const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; // /state-summary process::http::Response stateSummary( const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; // /tasks process::http::Response tasks( const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; private: const Master* master; }; private: // Inner class used to namespace HTTP route handlers (see // master/http.cpp for implementations). class Http { public: explicit Http(Master* _master) : master(_master), readonlyHandler(_master), quotaHandler(_master), weightsHandler(_master) {} // /api/v1 process::Future<process::http::Response> api( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /api/v1/scheduler process::Future<process::http::Response> scheduler( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/create-volumes process::Future<process::http::Response> createVolumes( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/destroy-volumes process::Future<process::http::Response> destroyVolumes( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/flags process::Future<process::http::Response> flags( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/frameworks // // NOTE: Requests to this endpoint are batched. process::Future<process::http::Response> frameworks( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/health process::Future<process::http::Response> health( const process::http::Request& request) const; // /master/redirect process::Future<process::http::Response> redirect( const process::http::Request& request) const; // /master/reserve process::Future<process::http::Response> reserve( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/roles // // NOTE: Requests to this endpoint are batched. process::Future<process::http::Response> roles( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/teardown process::Future<process::http::Response> teardown( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/slaves // // NOTE: Requests to this endpoint are batched. process::Future<process::http::Response> slaves( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/state // // NOTE: Requests to this endpoint are batched. process::Future<process::http::Response> state( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/state-summary // // NOTE: Requests to this endpoint are batched. process::Future<process::http::Response> stateSummary( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/tasks // // NOTE: Requests to this endpoint are batched. process::Future<process::http::Response> tasks( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/maintenance/schedule process::Future<process::http::Response> maintenanceSchedule( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/maintenance/status process::Future<process::http::Response> maintenanceStatus( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/machine/down process::Future<process::http::Response> machineDown( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/machine/up process::Future<process::http::Response> machineUp( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/unreserve process::Future<process::http::Response> unreserve( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/quota process::Future<process::http::Response> quota( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; // /master/weights process::Future<process::http::Response> weights( const process::http::Request& request, const Option<process::http::authentication::Principal>& principal) const; static std::string API_HELP(); static std::string SCHEDULER_HELP(); static std::string FLAGS_HELP(); static std::string FRAMEWORKS_HELP(); static std::string HEALTH_HELP(); static std::string REDIRECT_HELP(); static std::string ROLES_HELP(); static std::string TEARDOWN_HELP(); static std::string SLAVES_HELP(); static std::string STATE_HELP(); static std::string STATESUMMARY_HELP(); static std::string TASKS_HELP(); static std::string MAINTENANCE_SCHEDULE_HELP(); static std::string MAINTENANCE_STATUS_HELP(); static std::string MACHINE_DOWN_HELP(); static std::string MACHINE_UP_HELP(); static std::string CREATE_VOLUMES_HELP(); static std::string DESTROY_VOLUMES_HELP(); static std::string RESERVE_HELP(); static std::string UNRESERVE_HELP(); static std::string QUOTA_HELP(); static std::string WEIGHTS_HELP(); private: JSON::Object __flags() const; class FlagsError; // Forward declaration. process::Future<Try<JSON::Object, FlagsError>> _flags( const Option<process::http::authentication::Principal>& principal) const; process::Future<std::vector<const Task*>> _tasks( const size_t limit, const size_t offset, const std::string& order, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> _teardown( const FrameworkID& id, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> __teardown( const FrameworkID& id) const; process::Future<process::http::Response> _updateMaintenanceSchedule( const mesos::maintenance::Schedule& schedule, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> __updateMaintenanceSchedule( const mesos::maintenance::Schedule& schedule, const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> ___updateMaintenanceSchedule( const mesos::maintenance::Schedule& schedule, bool applied) const; mesos::maintenance::Schedule _getMaintenanceSchedule( const process::Owned<ObjectApprovers>& approvers) const; process::Future<mesos::maintenance::ClusterStatus> _getMaintenanceStatus( const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> _startMaintenance( const google::protobuf::RepeatedPtrField<MachineID>& machineIds, const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> _stopMaintenance( const google::protobuf::RepeatedPtrField<MachineID>& machineIds, const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> _reserve( const SlaveID& slaveId, const google::protobuf::RepeatedPtrField<Resource>& resources, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> _unreserve( const SlaveID& slaveId, const google::protobuf::RepeatedPtrField<Resource>& resources, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> _createVolumes( const SlaveID& slaveId, const google::protobuf::RepeatedPtrField<Resource>& volumes, const Option<process::http::authentication::Principal>& principal) const; process::Future<process::http::Response> _destroyVolumes( const SlaveID& slaveId, const google::protobuf::RepeatedPtrField<Resource>& volumes, const Option<process::http::authentication::Principal>& principal) const; /** * Continuation for operations: /reserve, /unreserve, * /create-volumes and /destroy-volumes. First tries to recover * 'required' amount of resources by rescinding outstanding * offers, then tries to apply the operation by calling * 'master->apply' and propagates the 'Future<Nothing>' as * 'Future<Response>' where 'Nothing' -> 'OK' and Failed -> * 'Conflict'. * * @param slaveId The ID of the slave that the operation is * updating. * @param operation The operation to be performed. * * @return Returns 'OK' if successful, 'BadRequest' if the * operation is malformed, 'Conflict' otherwise. */ process::Future<process::http::Response> _operation( const SlaveID& slaveId, const Offer::Operation& operation) const; // Master API handlers. process::Future<process::http::Response> getAgents( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; mesos::master::Response::GetAgents _getAgents( const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> getFlags( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getHealth( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getVersion( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getRoles( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getMetrics( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getLoggingLevel( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> setLoggingLevel( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> listFiles( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getMaster( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> updateMaintenanceSchedule( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getMaintenanceSchedule( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getMaintenanceStatus( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> startMaintenance( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> stopMaintenance( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getOperations( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getTasks( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; mesos::master::Response::GetTasks _getTasks( const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> createVolumes( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> destroyVolumes( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> growVolume( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> shrinkVolume( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> reserveResources( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> unreserveResources( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> getFrameworks( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; mesos::master::Response::GetFrameworks _getFrameworks( const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> getExecutors( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; mesos::master::Response::GetExecutors _getExecutors( const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> getState( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; mesos::master::Response::GetState _getState( const process::Owned<ObjectApprovers>& approvers) const; process::Future<process::http::Response> subscribe( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> readFile( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> teardown( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> markAgentGone( const mesos::master::Call& call, const Option<process::http::authentication::Principal>& principal, ContentType contentType) const; process::Future<process::http::Response> _markAgentGone( const SlaveID& slaveId) const; process::Future<process::http::Response> reconcileOperations( Framework* framework, const mesos::scheduler::Call::ReconcileOperations& call, ContentType contentType) const; Master* master; ReadOnlyHandler readonlyHandler; // NOTE: The quota specific pieces of the Operator API are factored // out into this separate class. QuotaHandler quotaHandler; // NOTE: The weights specific pieces of the Operator API are factored // out into this separate class. WeightsHandler weightsHandler; // Since the Master actor is one of the most loaded in a typical Mesos // installation, we take some extra care to keep the backlog small. // In particular, all read-only requests are batched and executed in // parallel, instead of going through the master queue separately. typedef process::http::Response (Master::ReadOnlyHandler::*ReadOnlyRequestHandler)( const hashmap<std::string, std::string>&, const process::Owned<ObjectApprovers>&) const; process::Future<process::http::Response> deferBatchedRequest( ReadOnlyRequestHandler handler, const Option<process::http::authentication::Principal>& principal, const hashmap<std::string, std::string>& queryParameters, const process::Owned<ObjectApprovers>& approvers) const; void processRequestsBatch() const; struct BatchedRequest { ReadOnlyRequestHandler handler; hashmap<std::string, std::string> queryParameters; Option<process::http::authentication::Principal> principal; process::Owned<ObjectApprovers> approvers; // NOTE: The returned response should be either of type // `BODY` or `PATH`, since `PIPE`-type responses would // break the deduplication mechanism. process::Promise<process::http::Response> promise; }; mutable std::vector<BatchedRequest> batchedRequests; }; Master(const Master&); // No copying. Master& operator=(const Master&); // No assigning. friend struct Framework; friend struct FrameworkMetrics; friend struct Metrics; friend struct Slave; friend struct SlavesWriter; friend struct Subscriber; // NOTE: Since 'getOffer', 'getInverseOffer' and 'slaves' are // protected, we need to make the following functions friends. friend Offer* validation::offer::getOffer( Master* master, const OfferID& offerId); friend InverseOffer* validation::offer::getInverseOffer( Master* master, const OfferID& offerId); friend Slave* validation::offer::getSlave( Master* master, const SlaveID& slaveId); const Flags flags; Http http; Option<MasterInfo> leader; // Current leading master. mesos::allocator::Allocator* allocator; WhitelistWatcher* whitelistWatcher; Registrar* registrar; Files* files; mesos::master::contender::MasterContender* contender; mesos::master::detector::MasterDetector* detector; const Option<Authorizer*> authorizer; MasterInfo info_; // Holds some info which affects how a machine behaves, as well as state that // represent the master's view of this machine. See the `MachineInfo` protobuf // and `Machine` struct for more information. hashmap<MachineID, Machine> machines; struct Maintenance { // Holds the maintenance schedule, as given by the operator. std::list<mesos::maintenance::Schedule> schedules; } maintenance; // Indicates when recovery is complete. Recovery begins once the // master is elected as a leader. Option<process::Future<Nothing>> recovered; // If this is the leading master, we periodically check whether we // should GC some information from the registry. Option<process::Timer> registryGcTimer; struct Slaves { Slaves() : removed(MAX_REMOVED_SLAVES) {} // Imposes a time limit for slaves that we recover from the // registry to reregister with the master. Option<process::Timer> recoveredTimer; // Slaves that have been recovered from the registrar after master // failover. Slaves are removed from this collection when they // either reregister with the master or are marked unreachable // because they do not reregister before `recoveredTimer` fires. // We must not answer questions related to these slaves (e.g., // during task reconciliation) until we determine their fate // because their are in this transitioning state. hashmap<SlaveID, SlaveInfo> recovered; // Agents that are in the process of (re-)registering. They are // maintained here while the (re-)registration is in progress and // possibly pending in the authorizer or the registrar in order // to help deduplicate (re-)registration requests. hashset<process::UPID> registering; hashset<SlaveID> reregistering; // Registered slaves are indexed by SlaveID and UPID. Note that // iteration is supported but is exposed as iteration over a // hashmap<SlaveID, Slave*> since it is tedious to convert // the map's key/value iterator into a value iterator. // // TODO(bmahler): Consider pulling in boost's multi_index, // or creating a simpler indexing abstraction in stout. struct { bool contains(const SlaveID& slaveId) const { return ids.contains(slaveId); } bool contains(const process::UPID& pid) const { return pids.contains(pid); } Slave* get(const SlaveID& slaveId) const { return ids.get(slaveId).getOrElse(nullptr); } Slave* get(const process::UPID& pid) const { return pids.get(pid).getOrElse(nullptr); } void put(Slave* slave) { CHECK_NOTNULL(slave); ids[slave->id] = slave; pids[slave->pid] = slave; } void remove(Slave* slave) { CHECK_NOTNULL(slave); ids.erase(slave->id); pids.erase(slave->pid); } void clear() { ids.clear(); pids.clear(); } size_t size() const { return ids.size(); } typedef hashmap<SlaveID, Slave*>::iterator iterator; typedef hashmap<SlaveID, Slave*>::const_iterator const_iterator; iterator begin() { return ids.begin(); } iterator end() { return ids.end(); } const_iterator begin() const { return ids.begin(); } const_iterator end() const { return ids.end(); } private: hashmap<SlaveID, Slave*> ids; hashmap<process::UPID, Slave*> pids; } registered; // Slaves that are in the process of being removed from the // registrar. hashset<SlaveID> removing; // Slaves that are in the process of being marked unreachable. hashset<SlaveID> markingUnreachable; // Slaves that are in the process of being marked gone. hashset<SlaveID> markingGone; // This collection includes agents that have gracefully shutdown, // as well as those that have been marked unreachable or gone. We // keep a cache here to prevent this from growing in an unbounded // manner. // // TODO(bmahler): Ideally we could use a cache with set semantics. // // TODO(neilc): Consider storing all agent IDs that have been // marked unreachable by this master. Cache<SlaveID, Nothing> removed; // Slaves that have been marked unreachable. We recover this from // the registry, so it includes slaves marked as unreachable by // other instances of the master. Note that we use a LinkedHashMap // to ensure the order of elements here matches the order in the // registry's unreachable list, which matches the order in which // agents are marked unreachable. This list is garbage collected; // GC behavior is governed by the `registry_gc_interval`, // `registry_max_agent_age`, and `registry_max_agent_count` flags. LinkedHashMap<SlaveID, TimeInfo> unreachable; // This helps us look up all unreachable tasks on an agent so we can remove // them from their primary storage `framework.unreachableTasks` when an // agent reregisters. This map is bounded by the same GC behavior as // `unreachable`. When the agent is GC'd from unreachable it's also // erased from `unreachableTasks`. hashmap<SlaveID, multihashmap<FrameworkID, TaskID>> unreachableTasks; // Slaves that have been marked gone. We recover this from the // registry, so it includes slaves marked as gone by other instances // of the master. Note that we use a LinkedHashMap to ensure the order // of elements here matches the order in the registry's gone list, which // matches the order in which agents are marked gone. LinkedHashMap<SlaveID, TimeInfo> gone; // This rate limiter is used to limit the removal of slaves failing // health checks. // NOTE: Using a 'shared_ptr' here is OK because 'RateLimiter' is // a wrapper around libprocess process which is thread safe. Option<std::shared_ptr<process::RateLimiter>> limiter; } slaves; struct Frameworks { Frameworks(const Flags& masterFlags) : completed(masterFlags.max_completed_frameworks) {} hashmap<FrameworkID, Framework*> registered; BoundedHashMap<FrameworkID, process::Owned<Framework>> completed; // Principals of frameworks keyed by PID. // NOTE: Multiple PIDs can map to the same principal. The // principal is None when the framework doesn't specify it. // The differences between this map and 'authenticated' are: // 1) This map only includes *registered* frameworks. The mapping // is added when a framework (re-)registers. // 2) This map includes unauthenticated frameworks (when Master // allows them) if they have principals specified in // FrameworkInfo. hashmap<process::UPID, Option<std::string>> principals; // BoundedRateLimiters keyed by the framework principal. // Like Metrics::Frameworks, all frameworks of the same principal // are throttled together at a common rate limit. hashmap<std::string, Option<process::Owned<BoundedRateLimiter>>> limiters; // The default limiter is for frameworks not specified in // 'flags.rate_limits'. Option<process::Owned<BoundedRateLimiter>> defaultLimiter; } frameworks; struct Subscribers { Subscribers(Master* _master, size_t maxSubscribers) : master(_master), subscribed(maxSubscribers) {}; // Represents a client subscribed to the 'api/vX' endpoint. // // TODO(anand): Add support for filtering. Some subscribers // might only be interested in a subset of events. struct Subscriber { Subscriber( const StreamingHttpConnection<v1::master::Event>& _http, const Option<process::http::authentication::Principal> _principal) : http(_http), heartbeater( "subscriber " + stringify(http.streamId), []() { mesos::master::Event event; event.set_type(mesos::master::Event::HEARTBEAT); return event; }(), http, DEFAULT_HEARTBEAT_INTERVAL, DEFAULT_HEARTBEAT_INTERVAL), principal(_principal) {} // Not copyable, not assignable. Subscriber(const Subscriber&) = delete; Subscriber& operator=(const Subscriber&) = delete; // TODO(greggomann): Refactor this function into multiple event-specific // overloads. See MESOS-8475. void send( const process::Shared<mesos::master::Event>& event, const process::Owned<ObjectApprovers>& approvers, const process::Shared<FrameworkInfo>& frameworkInfo, const process::Shared<Task>& task); ~Subscriber() { // TODO(anand): Refactor `HttpConnection` to being a RAII class instead. // It is possible that a caller might accidentally invoke `close()` // after passing ownership to the `Subscriber` object. See MESOS-5843 // for more details. http.close(); } StreamingHttpConnection<v1::master::Event> http; ResponseHeartbeater<mesos::master::Event, v1::master::Event> heartbeater; const Option<process::http::authentication::Principal> principal; }; // Sends the event to all subscribers connected to the 'api/vX' endpoint. void send( mesos::master::Event&& event, const Option<FrameworkInfo>& frameworkInfo = None(), const Option<Task>& task = None()); Master* master; // Active subscribers to the 'api/vX' endpoint keyed by the stream // identifier. BoundedHashMap<id::UUID, process::Owned<Subscriber>> subscribed; }; Subscribers subscribers; hashmap<OfferID, Offer*> offers; hashmap<OfferID, process::Timer> offerTimers; hashmap<OfferID, InverseOffer*> inverseOffers; hashmap<OfferID, process::Timer> inverseOfferTimers; // We track information about roles that we're aware of in the system. // Specifically, we keep track of the roles when a framework subscribes to // the role, and/or when there are resources allocated to the role // (e.g. some tasks and/or executors are consuming resources under the role). hashmap<std::string, Role*> roles; // Configured role whitelist if using the (deprecated) "explicit // roles" feature. If this is `None`, any role is allowed. Option<hashset<std::string>> roleWhitelist; // Configured weight for each role, if any. If a role does not // appear here, it has the default weight of 1. hashmap<std::string, double> weights; // Configured quota for each role, if any. We store quotas by role // because we set them at the role level. hashmap<std::string, Quota> quotas; // Authenticator names as supplied via flags. std::vector<std::string> authenticatorNames; Option<Authenticator*> authenticator; // Frameworks/slaves that are currently in the process of authentication. // 'authenticating' future is completed when authenticator // completes authentication. // The future is removed from the map when master completes authentication. hashmap<process::UPID, process::Future<Option<std::string>>> authenticating; // Principals of authenticated frameworks/slaves keyed by PID. hashmap<process::UPID, std::string> authenticated; int64_t nextFrameworkId; // Used to give each framework a unique ID. int64_t nextOfferId; // Used to give each slot offer a unique ID. int64_t nextSlaveId; // Used to give each slave a unique ID. // NOTE: It is safe to use a 'shared_ptr' because 'Metrics' is // thread safe. // TODO(dhamon): This does not need to be a shared_ptr. Metrics contains // copyable metric types only. std::shared_ptr<Metrics> metrics; // PullGauge handlers. double _uptime_secs() { return (process::Clock::now() - startTime).secs(); } double _elected() { return elected() ? 1 : 0; } double _slaves_connected(); double _slaves_disconnected(); double _slaves_active(); double _slaves_inactive(); double _slaves_unreachable(); // TODO(bevers): Remove these and make the above functions // const instead after MESOS-4995 is resolved. double _const_slaves_connected() const; double _const_slaves_disconnected() const; double _const_slaves_active() const; double _const_slaves_inactive() const; double _const_slaves_unreachable() const; double _frameworks_connected(); double _frameworks_disconnected(); double _frameworks_active(); double _frameworks_inactive(); double _outstanding_offers() { return static_cast<double>(offers.size()); } double _event_queue_messages() { return static_cast<double>(eventCount<process::MessageEvent>()); } double _event_queue_dispatches() { return static_cast<double>(eventCount<process::DispatchEvent>()); } double _event_queue_http_requests() { return static_cast<double>(eventCount<process::HttpEvent>()); } double _tasks_staging(); double _tasks_starting(); double _tasks_running(); double _tasks_unreachable(); double _tasks_killing(); double _resources_total(const std::string& name); double _resources_used(const std::string& name); double _resources_percent(const std::string& name); double _resources_revocable_total(const std::string& name); double _resources_revocable_used(const std::string& name); double _resources_revocable_percent(const std::string& name); process::Time startTime; // Start time used to calculate uptime. Option<process::Time> electedTime; // Time when this master is elected. // Validates the framework including authorization. // Returns None if the framework is valid. // Returns Error if the framework is invalid. // Returns Failure if authorization returns 'Failure'. process::Future<Option<Error>> validate( const FrameworkInfo& frameworkInfo, const process::UPID& from); }; inline std::ostream& operator<<( std::ostream& stream, const Framework& framework); // TODO(bmahler): Keeping the task and executor information in sync // across the Slave and Framework structs is error prone! struct Framework { enum State { // Framework has never connected to this master. This implies the // master failed over and the framework has not yet reregistered, // but some framework state has been recovered from reregistering // agents that are running tasks for the framework. RECOVERED, // Framework was previously connected to this master. A framework // becomes disconnected when there is a socket error. DISCONNECTED, // The framework is connected but not active. INACTIVE, // Framework is connected and eligible to receive offers. No // offers will be made to frameworks that are not active. ACTIVE }; Framework(Master* const master, const Flags& masterFlags, const FrameworkInfo& info, const process::UPID& _pid, const process::Time& time = process::Clock::now()); Framework(Master* const master, const Flags& masterFlags, const FrameworkInfo& info, const StreamingHttpConnection<v1::scheduler::Event>& _http, const process::Time& time = process::Clock::now()); Framework(Master* const master, const Flags& masterFlags, const FrameworkInfo& info); ~Framework(); Task* getTask(const TaskID& taskId); void addTask(Task* task); // Update framework to recover the resources that were previously // being used by `task`. // // TODO(bmahler): This is a hack for performance. We need to // maintain resource counters because computing task resources // functionally for all tasks is expensive, for now. void recoverResources(Task* task); // Sends a message to the connected framework. template <typename Message> void send(const Message& message); void addCompletedTask(Task&& task); void addUnreachableTask(const Task& task); // Removes the task. `unreachable` indicates whether the task is removed due // to being unreachable. Note that we cannot rely on the task state because // it may not reflect unreachability due to being set to TASK_LOST for // backwards compatibility. void removeTask(Task* task, bool unreachable); void addOffer(Offer* offer); void removeOffer(Offer* offer); void addInverseOffer(InverseOffer* inverseOffer); void removeInverseOffer(InverseOffer* inverseOffer); bool hasExecutor(const SlaveID& slaveId, const ExecutorID& executorId); void addExecutor(const SlaveID& slaveId, const ExecutorInfo& executorInfo); void removeExecutor(const SlaveID& slaveId, const ExecutorID& executorId); void addOperation(Operation* operation); Option<Operation*> getOperation(const OperationID& id); void recoverResources(Operation* operation); void removeOperation(Operation* operation); const FrameworkID id() const; // Update fields in 'info' using those in 'newInfo'. Currently this // only updates `role`/`roles`, 'name', 'failover_timeout', 'hostname', // 'webui_url', 'capabilities', and 'labels'. void update(const FrameworkInfo& newInfo); void updateConnection(const process::UPID& newPid); void updateConnection( const StreamingHttpConnection<v1::scheduler::Event>& newHttp); // Closes the HTTP connection and stops the heartbeat. // // TODO(vinod): Currently `state` variable is set separately // from this method. We need to make sure these are in sync. void closeHttpConnection(); void heartbeat(); bool active() const; bool connected() const; bool recovered() const; bool isTrackedUnderRole(const std::string& role) const; void trackUnderRole(const std::string& role); void untrackUnderRole(const std::string& role); void setFrameworkState(const State& _state); Master* const master; FrameworkInfo info; std::set<std::string> roles; protobuf::framework::Capabilities capabilities; // Frameworks can either be connected via HTTP or by message passing // (scheduler driver). At most one of `http` and `pid` will be set // according to the last connection made by the framework; neither // field will be set if the framework is in state `RECOVERED`. Option<StreamingHttpConnection<v1::scheduler::Event>> http; Option<process::UPID> pid; State state; process::Time registeredTime; process::Time reregisteredTime; process::Time unregisteredTime; // Tasks that have not yet been launched because they are currently // being authorized. hashmap<TaskID, TaskInfo> pendingTasks; // TODO(bmahler): Make this private to enforce that `addTask()` and // `removeTask()` are used, and provide a const view into the tasks. hashmap<TaskID, Task*> tasks; // Tasks launched by this framework that have reached a terminal // state and have had all their updates acknowledged. We only keep a // fixed-size cache to avoid consuming too much memory. We use // circular_buffer rather than BoundedHashMap because there // can be multiple completed tasks with the same task ID. circular_buffer<process::Owned<Task>> completedTasks; // When an agent is marked unreachable, tasks running on it are stored // here. We only keep a fixed-size cache to avoid consuming too much memory. // NOTE: Non-partition-aware unreachable tasks in this map are marked // TASK_LOST instead of TASK_UNREACHABLE for backward compatibility. BoundedHashMap<TaskID, process::Owned<Task>> unreachableTasks; hashset<Offer*> offers; // Active offers for framework. hashset<InverseOffer*> inverseOffers; // Active inverse offers for framework. // TODO(bmahler): Make this private to enforce that `addExecutor()` // and `removeExecutor()` are used, and provide a const view into // the executors. hashmap<SlaveID, hashmap<ExecutorID, ExecutorInfo>> executors; // Pending operations or terminal operations that have // unacknowledged status updates. hashmap<UUID, Operation*> operations; // The map from the framework-specified operation ID to the // corresponding internal operation UUID. hashmap<OperationID, UUID> operationUUIDs; // NOTE: For the used and offered resources below, we keep the // total as well as partitioned by SlaveID. // We expose the total resources via the HTTP endpoint, and we // keep a running total of the resources because looping over the // slaves to sum the resources has led to perf issues (MESOS-1862). // We keep the resources partitioned by SlaveID because non-scalar // resources can be lost when summing them up across multiple // slaves (MESOS-2373). // // Also note that keeping the totals is safe even though it yields // incorrect results for non-scalar resources. // (1) For overlapping set items / ranges across slaves, these // will get added N times but only represented once. // (2) When an initial subtraction occurs (N-1), the resource is // no longer represented. (This is the source of the bug). // (3) When any further subtractions occur (N-(1+M)), the // Resources simply ignores the subtraction since there's // nothing to remove, so this is safe for now. // TODO(mpark): Strip the non-scalar resources out of the totals // in order to avoid reporting incorrect statistics (MESOS-2623). // Active task / executor / operation resources. Resources totalUsedResources; // Note that we maintain multiple copies of each shared resource in // `usedResources` as they are used by multiple tasks. hashmap<SlaveID, Resources> usedResources; // Offered resources. Resources totalOfferedResources; hashmap<SlaveID, Resources> offeredResources; // This is only set for HTTP frameworks. process::Owned<ResponseHeartbeater<scheduler::Event, v1::scheduler::Event>> heartbeater; // This is used for per-framework metrics. FrameworkMetrics metrics; private: Framework(Master* const _master, const Flags& masterFlags, const FrameworkInfo& _info, State state, const process::Time& time); Framework(const Framework&); // No copying. Framework& operator=(const Framework&); // No assigning. }; // Sends a message to the connected framework. template <typename Message> void Framework::send(const Message& message) { metrics.incrementEvent(message); if (!connected()) { LOG(WARNING) << "Master attempting to send message to disconnected" << " framework " << *this; // NOTE: We proceed here without returning to support the case where a // "disconnected" framework is still talking to the master and the master // wants to shut it down by sending a `FrameworkErrorMessage`. This can // occur in a one-way network partition where the master -> framework link // is broken but the framework -> master link remains intact. Note that we // have no periodic heartbeats between the master and pid-based schedulers. // // TODO(chhsiao): Update the `FrameworkErrorMessage` call-sites that rely on // the lack of a `return` here to directly call `process::send` so that this // function doesn't need to deal with the special case. Then we can check // that one of `http` or `pid` is set if the framework is connected. } if (http.isSome()) { if (!http->send(message)) { LOG(WARNING) << "Unable to send message to framework " << *this << ":" << " connection closed"; } } else if (pid.isSome()) { master->send(pid.get(), message); } else { LOG(WARNING) << "Unable to send message to framework " << *this << ":" << " framework is recovered but has not reregistered"; } } // TODO(bevers): Check if there is anything preventing us from // returning a const reference here. inline const FrameworkID Framework::id() const { return info.id(); } inline bool Framework::active() const { return state == ACTIVE; } inline bool Framework::connected() const { return state == ACTIVE || state == INACTIVE; } inline bool Framework::recovered() const { return state == RECOVERED; } inline std::ostream& operator<<( std::ostream& stream, const Framework& framework) { // TODO(vinod): Also log the hostname once FrameworkInfo is properly // updated on framework failover (MESOS-1784). stream << framework.id() << " (" << framework.info.name() << ")"; if (framework.pid.isSome()) { stream << " at " << framework.pid.get(); } return stream; } // Information about an active role. struct Role { Role() = delete; Role(const std::string& _role) : role(_role) {} void addFramework(Framework* framework) { frameworks[framework->id()] = framework; } void removeFramework(Framework* framework) { frameworks.erase(framework->id()); } Resources allocatedResources() const { Resources resources; auto allocatedTo = [](const std::string& role) { return [role](const Resource& resource) { CHECK(resource.has_allocation_info()); return resource.allocation_info().role() == role; }; }; foreachvalue (Framework* framework, frameworks) { resources += framework->totalUsedResources.filter(allocatedTo(role)); resources += framework->totalOfferedResources.filter(allocatedTo(role)); } return resources; } const std::string role; // NOTE: The dynamic role/quota relation is stored in and administrated // by the master. There is no direct representation of quota information // here to avoid duplication and to support that an operator can associate // quota with a role before the role is created. Such ordering of operator // requests prevents a race of premature unbounded allocation that setting // quota first is intended to contain. hashmap<FrameworkID, Framework*> frameworks; }; } // namespace master { } // namespace internal { } // namespace mesos { #endif // __MASTER_HPP__
{ "content_hash": "dfc233c0b1666a615f5d03b3768a6d41", "timestamp": "", "source": "github", "line_count": 2681, "max_line_length": 80, "avg_line_length": 35.75233121969414, "alnum_prop": 0.6981283645620331, "repo_name": "jpeach/mesos", "id": "7d9732f1e432f6f0290d234242864cbdbf381fa8", "size": "95852", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/master/master.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7957" }, { "name": "C++", "bytes": "14346110" }, { "name": "CMake", "bytes": "101897" }, { "name": "CSS", "bytes": "8085" }, { "name": "Dockerfile", "bytes": "16785" }, { "name": "Groovy", "bytes": "4284" }, { "name": "HTML", "bytes": "93799" }, { "name": "Java", "bytes": "142806" }, { "name": "JavaScript", "bytes": "93108" }, { "name": "M4", "bytes": "200631" }, { "name": "Makefile", "bytes": "116649" }, { "name": "PowerShell", "bytes": "2547" }, { "name": "Python", "bytes": "380663" }, { "name": "Ruby", "bytes": "10047" }, { "name": "Shell", "bytes": "156002" } ], "symlink_target": "" }
package org.apache.camel.spring.xml; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.util.ObjectHelper; public class ConfiguredRouteBuilder extends RouteBuilder { private String fromUri; private String toUri; public void configure() throws Exception { ObjectHelper.notNull(fromUri, "fromUri"); ObjectHelper.notNull(toUri, "toUri"); from(fromUri).to(toUri); } public String getFromUri() { return fromUri; } public void setFromUri(String fromUri) { this.fromUri = fromUri; } public String getToUri() { return toUri; } public void setToUri(String toUri) { this.toUri = toUri; } }
{ "content_hash": "7feb2aaef1bf105042f0858a8bf4a865", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 58, "avg_line_length": 21.575757575757574, "alnum_prop": 0.6587078651685393, "repo_name": "Fabryprog/camel", "id": "f26b045115b188fbec04cd4a795a6fc4229630c3", "size": "1514", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "components/camel-spring/src/test/java/org/apache/camel/spring/xml/ConfiguredRouteBuilder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Apex", "bytes": "6521" }, { "name": "Batchfile", "bytes": "2353" }, { "name": "CSS", "bytes": "17204" }, { "name": "Elm", "bytes": "10852" }, { "name": "FreeMarker", "bytes": "8015" }, { "name": "Groovy", "bytes": "14479" }, { "name": "HTML", "bytes": "909437" }, { "name": "Java", "bytes": "82182194" }, { "name": "JavaScript", "bytes": "102432" }, { "name": "Makefile", "bytes": "513" }, { "name": "Shell", "bytes": "17240" }, { "name": "TSQL", "bytes": "28835" }, { "name": "Tcl", "bytes": "4974" }, { "name": "Thrift", "bytes": "6979" }, { "name": "XQuery", "bytes": "546" }, { "name": "XSLT", "bytes": "271473" } ], "symlink_target": "" }
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.openx.data</groupId> <artifactId>json-serde-parent</artifactId> <version>1.3</version> <packaging>pom</packaging> <name>openx-json-serde</name> <url>https://github.com/rcongiu/Hive-JSON-Serde</url> <scm> <!-- Replace the connection below with your project connection --> <connection>scm:git:git@github.com:rcongiu/Hive-JSON-Serde.git</connection> <developerConnection>scm:git:git@github.com:rcongiu/Hive-JSON-Serde.git</developerConnection> <url>scm:git:git@github.com:rcongiu/Hive-JSON-Serde.git</url> <tag>HEAD</tag> </scm> <licenses> <license> <name>The Apache Software License, Version 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> </license> </licenses> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <maven-jar-plugin.version>2.4</maven-jar-plugin.version> <!-- cdh4 versions --> <cdh4.version>4.6.0</cdh4.version> <cdh4.hive.version>0.10.0-cdh${cdh4.version}</cdh4.hive.version> <cdh4.hadoop.version>2.0.0-cdh${cdh4.version}</cdh4.hadoop.version> <!-- cdh5 versions --> <cdh5.version>5.0.0</cdh5.version> <cdh5.hive.version>0.12.0-cdh${cdh5.version}</cdh5.hive.version> <cdh5.hadoop.version>2.3.0-cdh${cdh5.version}</cdh5.hadoop.version> </properties> <profiles> <profile> <id>cdh4</id> <activation> <activeByDefault>true</activeByDefault> </activation> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <serde.shim>json-serde-cdh4-shim</serde.shim> <cdh.version>${cdh4.version}</cdh.version> <cdh.hive.version>${cdh4.hive.version}</cdh.hive.version> <cdh.hadoop.version>${cdh4.hadoop.version}</cdh.hadoop.version> <cdh.shim>cdh4</cdh.shim> </properties> </profile> <profile> <id>cdh5</id> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <serde.shim>json-serde-cdh5-shim</serde.shim> <cdh.version>${cdh5.version}</cdh.version> <cdh.hive.version>${cdh5.hive.version}</cdh.hive.version> <cdh.hadoop.version>${cdh5.hadoop.version}</cdh.hadoop.version> <cdh.shim>cdh5</cdh.shim> </properties> </profile> </profiles> <modules> <module>json</module> <module>json-serde-cdh4-shim</module> <module>json-serde-cdh5-shim</module> <module>json-serde</module> </modules> <repositories> <repository> <id>Cloudera</id> <name>Cloudera Maven Repo</name> <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url> </repository> </repositories> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.10</version> <scope>test</scope> </dependency> </dependencies> <reporting> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>2.9</version> <reportSets> <reportSet><!-- by default, id = "default" --> <reports><!-- select non-aggregate reports --> <report>javadoc</report> <report>test-javadoc</report> </reports> </reportSet> <reportSet><!-- aggregate reportSet, to define in poms having modules --> <id>aggregate</id> <inherited>false</inherited><!-- don't run aggregate in child modules --> <reports> <report>aggregate</report> </reports> </reportSet> </reportSets> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>2.5.2</version> <configuration> <formats> <format>html</format> </formats> <aggregate>true</aggregate> <instrumentation> <ignores> <ignore>java.lang.UnsupportedOperationException.*</ignore> </ignores> <excludes> <exclude>**/*Exception.class</exclude> </excludes> </instrumentation> </configuration> </plugin> </plugins> </reporting> <build> <extensions> <extension> <groupId>org.apache.maven.wagon</groupId> <artifactId>wagon-ssh-external</artifactId> <version>2.2</version> </extension> </extensions> <pluginManagement> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>2.1</version> <configuration> <mavenExecutorId>forked-path</mavenExecutorId> <useReleaseProfile>false</useReleaseProfile> <arguments>-Psonatype-oss-release</arguments> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId> <version>${maven-jar-plugin.version}</version> <configuration> <archive> <manifestEntries> <git-SHA-1>${buildNumber}</git-SHA-1> </manifestEntries> </archive> </configuration> <executions> <execution> <goals> <goal>test-jar</goal> </goals> </execution> </executions> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>com.atlassian.maven.plugins</groupId> <artifactId>maven-jgitflow-plugin</artifactId> <version>1.0-alpha20</version> <configuration> <allowSnapshots>false</allowSnapshots> <enableFeatureVersions>false</enableFeatureVersions> <pushFeatures>false</pushFeatures> <pushReleases>true</pushReleases> <allowUntracked>true</allowUntracked> <noDeploy>true</noDeploy> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>2.0.2</version> <configuration> <source>1.6</source> <target>1.6</target> <encoding>UTF-8</encoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>2.10</version> <configuration> <argLine>-Xmx512m</argLine> <excludes> <exclude>**/benchmark/*.java</exclude> </excludes> </configuration> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>buildnumber-maven-plugin</artifactId> <version>1.1</version> <executions> <execution> <phase>validate</phase> <goals> <goal>create</goal> </goals> </execution> </executions> </plugin> </plugins> </build> </project>
{ "content_hash": "2ebf9ba2949db7fbef36cf8cf30a0be8", "timestamp": "", "source": "github", "line_count": 234, "max_line_length": 204, "avg_line_length": 28.73076923076923, "alnum_prop": 0.6571471069463037, "repo_name": "vinothsekar/HiveSerde", "id": "21f9f53f4e90bf81bcea9d046773037229252254", "size": "6723", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pom.xml", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "365604" } ], "symlink_target": "" }
module Giraffi class Client # Defines methods related to the regions module Regions # Returns all available regions # # @requires_apikey Yes # @return [HTTParty::Response] def find_regions self.class.get("/regions.json?apikey=#{apikey}") end end end end
{ "content_hash": "6d6bd52835269525260b4cf18e00ca12", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 56, "avg_line_length": 21, "alnum_prop": 0.6222222222222222, "repo_name": "giraffi/giraffi", "id": "071c414c2083a2cdad13c45071bdf4dd74ecbe69", "size": "315", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/giraffi/client/regions.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "79548" } ], "symlink_target": "" }
This is a simple Go HTTP server that exposes an API for managing a ToDo list. ### Dependencies The only external dependency is https://github.com/mattn/go-sqlite3. ### Design thought process I intentionally didn't use an ORM. I agree with the logic in [Golang, ORMs, and why I am still not using one](http://www.hydrogen18.com/blog/golang-orms-and-why-im-still-not-using-one.html) and believe that ORMs don't fit well with statically typed languages and the benefit they offer doesn't outweigh the performance hit or the black-box they create in the app. I wrote a very simple router (`server/router.go`) to handle routes by path and HTTP method. Normally, I would have used [httprouter](https://github.com/julienschmidt/httprouter) but I didn't because I wasn't sure if that would be considered cheating on the first rule. :) ### Building ``` $ git clone git@github.com:ahare/shaleapps_todo.git $ cd shaleapps_todo $ go get && go build ``` ### Running the tests ``` $ go test ./... ? shaleapps_todo [no test files] ok shaleapps_todo/db 0.018s ok shaleapps_todo/server 0.009s ``` ### Running the server ``` $ ./shaleapps_todo ToDo server listening on :8080... ``` ### Using the server Create a new ToDo: ``` $ curl -XPOST -d '{"text": "Pick up milk"}' -H 'Accept: application/json' -i ':8080/todos' HTTP/1.1 201 Created Content-Type: application/json Date: Wed, 03 Feb 2016 18:20:24 GMT Content-Length: 44 {"id":1,"text":"Pick up milk","done":false} ``` Update a ToDo: ``` $ curl -XPUT -d '{"id":1,"text":"Pick up milk","done":true}' -H 'Accept: application/json' -i ':8080/todos/1' HTTP/1.1 200 OK Content-Type: application/json Date: Wed, 03 Feb 2016 18:46:59 GMT Content-Length: 43 {"id":1,"text":"Pick up milk","done":true} ``` Add several more ToDos: ``` $ curl -XPOST -d '{"text": "Drink milk"}' -H 'Accept: application/json' ':8080/todos' {"id":2,"text":"Drink milk","done":false} $ curl -XPOST -d '{"text": "Throw away milk jug"}' -H 'Accept: application/json' ':8080/todos' {"id":3,"text":"Throw away milk jug","done":false} $ curl -XPOST -d '{"text": "Wash the car"}' -H 'Accept: application/json' ':8080/todos' {"id":4,"text":"Wash the car","done":false} ``` Find a ToDo by ID: ``` $ curl -i ':8080/todos/4' HTTP/1.1 200 OK Content-Type: application/json Date: Wed, 03 Feb 2016 19:38:14 GMT Content-Length: 44 {"id":4,"text":"Wash the car","done":false} ``` Find all ToDos that are done: ``` $ curl -i ':8080/todos?done=true' HTTP/1.1 200 OK Content-Type: application/json Date: Wed, 03 Feb 2016 19:39:10 GMT Content-Length: 45 [{"id":1,"text":"Pick up milk","done":true}] ``` Search for ToDos by text: ``` $ curl -i ':8080/todos?text=milk' HTTP/1.1 200 OK Content-Type: application/json Date: Wed, 03 Feb 2016 19:32:44 GMT Content-Length: 138 [{"id":1,"text":"Pick up milk","done":true},{"id":2,"text":"Drink milk","done":false},{"id":3,"text":"Throw away milk jug","done":false}] ``` Get all ToDos: ``` $ curl -i ':8080/todos?text=milk' HTTP/1.1 200 OK Content-Type: application/json Date: Wed, 03 Feb 2016 19:32:47 GMT Content-Length: 138 [{"id":1,"text":"Pick up milk","done":true},{"id":2,"text":"Drink milk","done":false},{"id":3,"text":"Throw away milk jug","done":false},{"id":4,"text":"Wash the car","done":false}] ``` Delete a ToDo: ``` $ curl -XDELETE -i ':8080/todos/1' HTTP/1.1 200 OK Date: Wed, 03 Feb 2016 19:01:18 GMT Content-Length: 0 Content-Type: text/plain; charset=utf-8 ```
{ "content_hash": "11ce44c3f018c42446977bdd8c9829a0", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 181, "avg_line_length": 25.036231884057973, "alnum_prop": 0.6723589001447178, "repo_name": "ahare/shaleapps_todo", "id": "5e397fd095b26528438d10708eb80c7af5786577", "size": "3485", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "11064" } ], "symlink_target": "" }
#ifndef WINPR_NT_H #define WINPR_NT_H #include <winpr/winpr.h> #include <winpr/wtypes.h> #include <winpr/windows.h> #ifndef _WIN32 /* Defined in winnt.h, do not redefine */ #define STATUS_WAIT_0 ((NTSTATUS)0x00000000L) #define STATUS_ABANDONED_WAIT_0 ((NTSTATUS)0x00000080L) #define STATUS_USER_APC ((NTSTATUS)0x000000C0L) #define STATUS_TIMEOUT ((NTSTATUS)0x00000102L) #define STATUS_PENDING ((NTSTATUS)0x00000103L) #define DBG_EXCEPTION_HANDLED ((NTSTATUS)0x00010001L) #define DBG_CONTINUE ((NTSTATUS)0x00010002L) #define STATUS_SEGMENT_NOTIFICATION ((NTSTATUS)0x40000005L) #define STATUS_FATAL_APP_EXIT ((NTSTATUS)0x40000015L) #define DBG_TERMINATE_THREAD ((NTSTATUS)0x40010003L) #define DBG_TERMINATE_PROCESS ((NTSTATUS)0x40010004L) #define DBG_CONTROL_C ((NTSTATUS)0x40010005L) #define DBG_PRINTEXCEPTION_C ((NTSTATUS)0x40010006L) #define DBG_RIPEXCEPTION ((NTSTATUS)0x40010007L) #define DBG_CONTROL_BREAK ((NTSTATUS)0x40010008L) #define DBG_COMMAND_EXCEPTION ((NTSTATUS)0x40010009L) #define STATUS_GUARD_PAGE_VIOLATION ((NTSTATUS)0x80000001L) #define STATUS_DATATYPE_MISALIGNMENT ((NTSTATUS)0x80000002L) #define STATUS_BREAKPOINT ((NTSTATUS)0x80000003L) #define STATUS_SINGLE_STEP ((NTSTATUS)0x80000004L) #define STATUS_LONGJUMP ((NTSTATUS)0x80000026L) #define STATUS_UNWIND_CONSOLIDATE ((NTSTATUS)0x80000029L) #define DBG_EXCEPTION_NOT_HANDLED ((NTSTATUS)0x80010001L) #define STATUS_ACCESS_VIOLATION ((NTSTATUS)0xC0000005L) #define STATUS_IN_PAGE_ERROR ((NTSTATUS)0xC0000006L) #define STATUS_INVALID_HANDLE ((NTSTATUS)0xC0000008L) #define STATUS_INVALID_PARAMETER ((NTSTATUS)0xC000000DL) #define STATUS_NO_MEMORY ((NTSTATUS)0xC0000017L) #define STATUS_ILLEGAL_INSTRUCTION ((NTSTATUS)0xC000001DL) #define STATUS_NONCONTINUABLE_EXCEPTION ((NTSTATUS)0xC0000025L) #define STATUS_INVALID_DISPOSITION ((NTSTATUS)0xC0000026L) #define STATUS_ARRAY_BOUNDS_EXCEEDED ((NTSTATUS)0xC000008CL) #define STATUS_FLOAT_DENORMAL_OPERAND ((NTSTATUS)0xC000008DL) #define STATUS_FLOAT_DIVIDE_BY_ZERO ((NTSTATUS)0xC000008EL) #define STATUS_FLOAT_INEXACT_RESULT ((NTSTATUS)0xC000008FL) #define STATUS_FLOAT_INVALID_OPERATION ((NTSTATUS)0xC0000090L) #define STATUS_FLOAT_OVERFLOW ((NTSTATUS)0xC0000091L) #define STATUS_FLOAT_STACK_CHECK ((NTSTATUS)0xC0000092L) #define STATUS_FLOAT_UNDERFLOW ((NTSTATUS)0xC0000093L) #define STATUS_INTEGER_DIVIDE_BY_ZERO ((NTSTATUS)0xC0000094L) #define STATUS_INTEGER_OVERFLOW ((NTSTATUS)0xC0000095L) #define STATUS_PRIVILEGED_INSTRUCTION ((NTSTATUS)0xC0000096L) #define STATUS_STACK_OVERFLOW ((NTSTATUS)0xC00000FDL) #define STATUS_DLL_NOT_FOUND ((NTSTATUS)0xC0000135L) #define STATUS_ORDINAL_NOT_FOUND ((NTSTATUS)0xC0000138L) #define STATUS_ENTRYPOINT_NOT_FOUND ((NTSTATUS)0xC0000139L) #define STATUS_CONTROL_C_EXIT ((NTSTATUS)0xC000013AL) #define STATUS_DLL_INIT_FAILED ((NTSTATUS)0xC0000142L) #define STATUS_FLOAT_MULTIPLE_FAULTS ((NTSTATUS)0xC00002B4L) #define STATUS_FLOAT_MULTIPLE_TRAPS ((NTSTATUS)0xC00002B5L) #define STATUS_REG_NAT_CONSUMPTION ((NTSTATUS)0xC00002C9L) #define STATUS_STACK_BUFFER_OVERRUN ((NTSTATUS)0xC0000409L) #define STATUS_INVALID_CRUNTIME_PARAMETER ((NTSTATUS)0xC0000417L) #define STATUS_ASSERTION_FAILURE ((NTSTATUS)0xC0000420L) #define STATUS_SXS_EARLY_DEACTIVATION ((NTSTATUS)0xC015000FL) #define STATUS_SXS_INVALID_DEACTIVATION ((NTSTATUS)0xC0150010L) #endif /* Defined in wincred.h, do not redefine */ #if defined(_WIN32) && !defined(_UWP) #include <wincred.h> #else #define STATUS_LOGON_FAILURE ((NTSTATUS)0xC000006DL) #define STATUS_WRONG_PASSWORD ((NTSTATUS)0xC000006AL) #define STATUS_PASSWORD_EXPIRED ((NTSTATUS)0xC0000071L) #define STATUS_PASSWORD_MUST_CHANGE ((NTSTATUS)0xC0000224L) #define STATUS_ACCESS_DENIED ((NTSTATUS)0xC0000022L) #define STATUS_DOWNGRADE_DETECTED ((NTSTATUS)0xC0000388L) #define STATUS_AUTHENTICATION_FIREWALL_FAILED ((NTSTATUS)0xC0000413L) #define STATUS_ACCOUNT_DISABLED ((NTSTATUS)0xC0000072L) #define STATUS_ACCOUNT_RESTRICTION ((NTSTATUS)0xC000006EL) #define STATUS_ACCOUNT_LOCKED_OUT ((NTSTATUS)0xC0000234L) #define STATUS_ACCOUNT_EXPIRED ((NTSTATUS)0xC0000193L) #define STATUS_LOGON_TYPE_NOT_GRANTED ((NTSTATUS)0xC000015BL) #endif #define FACILITY_DEBUGGER 0x1 #define FACILITY_RPC_RUNTIME 0x2 #define FACILITY_RPC_STUBS 0x3 #define FACILITY_IO_ERROR_CODE 0x4 #define FACILITY_TERMINAL_SERVER 0xA #define FACILITY_USB_ERROR_CODE 0x10 #define FACILITY_HID_ERROR_CODE 0x11 #define FACILITY_FIREWIRE_ERROR_CODE 0x12 #define FACILITY_CLUSTER_ERROR_CODE 0x13 #define FACILITY_ACPI_ERROR_CODE 0x14 #define FACILITY_SXS_ERROR_CODE 0x15 /** * NTSTATUS codes */ #if!defined(STATUS_SUCCESS) #define STATUS_SUCCESS ((NTSTATUS)0x00000000) #endif #define STATUS_SEVERITY_SUCCESS 0x0 #define STATUS_SEVERITY_INFORMATIONAL 0x1 #define STATUS_SEVERITY_WARNING 0x2 #define STATUS_SEVERITY_ERROR 0x3 #define STATUS_WAIT_1 ((NTSTATUS)0x00000001) #define STATUS_WAIT_2 ((NTSTATUS)0x00000002) #define STATUS_WAIT_3 ((NTSTATUS)0x00000003) #define STATUS_WAIT_63 ((NTSTATUS)0x0000003f) #define STATUS_ABANDONED ((NTSTATUS)0x00000080) #define STATUS_ABANDONED_WAIT_63 ((NTSTATUS)0x000000BF) //#define STATUS_USER_APC ((NTSTATUS)0x000000C0) #define STATUS_KERNEL_APC ((NTSTATUS)0x00000100) #define STATUS_ALERTED ((NTSTATUS)0x00000101) //#define STATUS_TIMEOUT ((NTSTATUS)0x00000102) //#define STATUS_PENDING ((NTSTATUS)0x00000103) #define STATUS_REPARSE ((NTSTATUS)0x00000104) #define STATUS_MORE_ENTRIES ((NTSTATUS)0x00000105) #define STATUS_NOT_ALL_ASSIGNED ((NTSTATUS)0x00000106) #define STATUS_SOME_NOT_MAPPED ((NTSTATUS)0x00000107) #define STATUS_OPLOCK_BREAK_IN_PROGRESS ((NTSTATUS)0x00000108) #define STATUS_VOLUME_MOUNTED ((NTSTATUS)0x00000109) #define STATUS_RXACT_COMMITTED ((NTSTATUS)0x0000010A) #define STATUS_NOTIFY_CLEANUP ((NTSTATUS)0x0000010B) #define STATUS_NOTIFY_ENUM_DIR ((NTSTATUS)0x0000010C) #define STATUS_NO_QUOTAS_FOR_ACCOUNT ((NTSTATUS)0x0000010D) #define STATUS_PRIMARY_TRANSPORT_CONNECT_FAILED ((NTSTATUS)0x0000010E) #define STATUS_PAGE_FAULT_TRANSITION ((NTSTATUS)0x00000110) #define STATUS_PAGE_FAULT_DEMAND_ZERO ((NTSTATUS)0x00000111) #define STATUS_PAGE_FAULT_COPY_ON_WRITE ((NTSTATUS)0x00000112) #define STATUS_PAGE_FAULT_GUARD_PAGE ((NTSTATUS)0x00000113) #define STATUS_PAGE_FAULT_PAGING_FILE ((NTSTATUS)0x00000114) #define STATUS_CACHE_PAGE_LOCKED ((NTSTATUS)0x00000115) #define STATUS_CRASH_DUMP ((NTSTATUS)0x00000116) #define STATUS_BUFFER_ALL_ZEROS ((NTSTATUS)0x00000117) #define STATUS_REPARSE_OBJECT ((NTSTATUS)0x00000118) #define STATUS_RESOURCE_REQUIREMENTS_CHANGED ((NTSTATUS)0x00000119) #define STATUS_TRANSLATION_COMPLETE ((NTSTATUS)0x00000120) #define STATUS_DS_MEMBERSHIP_EVALUATED_LOCALLY ((NTSTATUS)0x00000121) #define STATUS_NOTHING_TO_TERMINATE ((NTSTATUS)0x00000122) #define STATUS_PROCESS_NOT_IN_JOB ((NTSTATUS)0x00000123) #define STATUS_PROCESS_IN_JOB ((NTSTATUS)0x00000124) #define STATUS_VOLSNAP_HIBERNATE_READY ((NTSTATUS)0x00000125) #define STATUS_FSFILTER_OP_COMPLETED_SUCCESSFULLY ((NTSTATUS)0x00000126) #define STATUS_OBJECT_NAME_EXISTS ((NTSTATUS)0x40000000) #define STATUS_THREAD_WAS_SUSPENDED ((NTSTATUS)0x40000001) #define STATUS_WORKING_SET_LIMIT_RANGE ((NTSTATUS)0x40000002) #define STATUS_IMAGE_NOT_AT_BASE ((NTSTATUS)0x40000003) #define STATUS_RXACT_STATE_CREATED ((NTSTATUS)0x40000004) //#define STATUS_SEGMENT_NOTIFICATION ((NTSTATUS)0x40000005) #define STATUS_LOCAL_USER_SESSION_KEY ((NTSTATUS)0x40000006) #define STATUS_BAD_CURRENT_DIRECTORY ((NTSTATUS)0x40000007) #define STATUS_SERIAL_MORE_WRITES ((NTSTATUS)0x40000008) #define STATUS_REGISTRY_RECOVERED ((NTSTATUS)0x40000009) #define STATUS_FT_READ_RECOVERY_FROM_BACKUP ((NTSTATUS)0x4000000A) #define STATUS_FT_WRITE_RECOVERY ((NTSTATUS)0x4000000B) #define STATUS_SERIAL_COUNTER_TIMEOUT ((NTSTATUS)0x4000000C) #define STATUS_NULL_LM_PASSWORD ((NTSTATUS)0x4000000D) #define STATUS_IMAGE_MACHINE_TYPE_MISMATCH ((NTSTATUS)0x4000000E) #define STATUS_RECEIVE_PARTIAL ((NTSTATUS)0x4000000F) #define STATUS_RECEIVE_EXPEDITED ((NTSTATUS)0x40000010) #define STATUS_RECEIVE_PARTIAL_EXPEDITED ((NTSTATUS)0x40000011) #define STATUS_EVENT_DONE ((NTSTATUS)0x40000012) #define STATUS_EVENT_PENDING ((NTSTATUS)0x40000013) #define STATUS_CHECKING_FILE_SYSTEM ((NTSTATUS)0x40000014) //#define STATUS_FATAL_APP_EXIT ((NTSTATUS)0x40000015) #define STATUS_PREDEFINED_HANDLE ((NTSTATUS)0x40000016) #define STATUS_WAS_UNLOCKED ((NTSTATUS)0x40000017) #define STATUS_SERVICE_NOTIFICATION ((NTSTATUS)0x40000018) #define STATUS_WAS_LOCKED ((NTSTATUS)0x40000019) #define STATUS_LOG_HARD_ERROR ((NTSTATUS)0x4000001A) #define STATUS_ALREADY_WIN32 ((NTSTATUS)0x4000001B) #define STATUS_WX86_UNSIMULATE ((NTSTATUS)0x4000001C) #define STATUS_WX86_CONTINUE ((NTSTATUS)0x4000001D) #define STATUS_WX86_SINGLE_STEP ((NTSTATUS)0x4000001E) #define STATUS_WX86_BREAKPOINT ((NTSTATUS)0x4000001F) #define STATUS_WX86_EXCEPTION_CONTINUE ((NTSTATUS)0x40000020) #define STATUS_WX86_EXCEPTION_LASTCHANCE ((NTSTATUS)0x40000021) #define STATUS_WX86_EXCEPTION_CHAIN ((NTSTATUS)0x40000022) #define STATUS_IMAGE_MACHINE_TYPE_MISMATCH_EXE ((NTSTATUS)0x40000023) #define STATUS_NO_YIELD_PERFORMED ((NTSTATUS)0x40000024) #define STATUS_TIMER_RESUME_IGNORED ((NTSTATUS)0x40000025) #define STATUS_ARBITRATION_UNHANDLED ((NTSTATUS)0x40000026) #define STATUS_CARDBUS_NOT_SUPPORTED ((NTSTATUS)0x40000027) #define STATUS_WX86_CREATEWX86TIB ((NTSTATUS)0x40000028) #define STATUS_MP_PROCESSOR_MISMATCH ((NTSTATUS)0x40000029) #define STATUS_HIBERNATED ((NTSTATUS)0x4000002A) #define STATUS_RESUME_HIBERNATION ((NTSTATUS)0x4000002B) #define STATUS_FIRMWARE_UPDATED ((NTSTATUS)0x4000002C) #define STATUS_WAKE_SYSTEM ((NTSTATUS)0x40000294) #define STATUS_DS_SHUTTING_DOWN ((NTSTATUS)0x40000370) #define RPC_NT_UUID_LOCAL_ONLY ((NTSTATUS)0x40020056) #define RPC_NT_SEND_INCOMPLETE ((NTSTATUS)0x400200AF) #define STATUS_CTX_CDM_CONNECT ((NTSTATUS)0x400A0004) #define STATUS_CTX_CDM_DISCONNECT ((NTSTATUS)0x400A0005) #define STATUS_SXS_RELEASE_ACTIVATION_CONTEXT ((NTSTATUS)0x4015000D) //#define STATUS_GUARD_PAGE_VIOLATION ((NTSTATUS)0x80000001) //#define STATUS_DATATYPE_MISALIGNMENT ((NTSTATUS)0x80000002) //#define STATUS_BREAKPOINT ((NTSTATUS)0x80000003) //#define STATUS_SINGLE_STEP ((NTSTATUS)0x80000004) #define STATUS_BUFFER_OVERFLOW ((NTSTATUS)0x80000005) #define STATUS_NO_MORE_FILES ((NTSTATUS)0x80000006) #define STATUS_WAKE_SYSTEM_DEBUGGER ((NTSTATUS)0x80000007) #define STATUS_HANDLES_CLOSED ((NTSTATUS)0x8000000A) #define STATUS_NO_INHERITANCE ((NTSTATUS)0x8000000B) #define STATUS_GUID_SUBSTITUTION_MADE ((NTSTATUS)0x8000000C) #define STATUS_PARTIAL_COPY ((NTSTATUS)0x8000000D) #define STATUS_DEVICE_PAPER_EMPTY ((NTSTATUS)0x8000000E) #define STATUS_DEVICE_POWERED_OFF ((NTSTATUS)0x8000000F) #define STATUS_DEVICE_OFF_LINE ((NTSTATUS)0x80000010) #define STATUS_DEVICE_BUSY ((NTSTATUS)0x80000011) #define STATUS_NO_MORE_EAS ((NTSTATUS)0x80000012) #define STATUS_INVALID_EA_NAME ((NTSTATUS)0x80000013) #define STATUS_EA_LIST_INCONSISTENT ((NTSTATUS)0x80000014) #define STATUS_INVALID_EA_FLAG ((NTSTATUS)0x80000015) #define STATUS_VERIFY_REQUIRED ((NTSTATUS)0x80000016) #define STATUS_EXTRANEOUS_INFORMATION ((NTSTATUS)0x80000017) #define STATUS_RXACT_COMMIT_NECESSARY ((NTSTATUS)0x80000018) #define STATUS_NO_MORE_ENTRIES ((NTSTATUS)0x8000001A) #define STATUS_FILEMARK_DETECTED ((NTSTATUS)0x8000001B) #define STATUS_MEDIA_CHANGED ((NTSTATUS)0x8000001C) #define STATUS_BUS_RESET ((NTSTATUS)0x8000001D) #define STATUS_END_OF_MEDIA ((NTSTATUS)0x8000001E) #define STATUS_BEGINNING_OF_MEDIA ((NTSTATUS)0x8000001F) #define STATUS_MEDIA_CHECK ((NTSTATUS)0x80000020) #define STATUS_SETMARK_DETECTED ((NTSTATUS)0x80000021) #define STATUS_NO_DATA_DETECTED ((NTSTATUS)0x80000022) #define STATUS_REDIRECTOR_HAS_OPEN_HANDLES ((NTSTATUS)0x80000023) #define STATUS_SERVER_HAS_OPEN_HANDLES ((NTSTATUS)0x80000024) #define STATUS_ALREADY_DISCONNECTED ((NTSTATUS)0x80000025) //#define STATUS_LONGJUMP ((NTSTATUS)0x80000026) #define STATUS_CLEANER_CARTRIDGE_INSTALLED ((NTSTATUS)0x80000027) #define STATUS_PLUGPLAY_QUERY_VETOED ((NTSTATUS)0x80000028) //#define STATUS_UNWIND_CONSOLIDATE ((NTSTATUS)0x80000029) #define STATUS_REGISTRY_HIVE_RECOVERED ((NTSTATUS)0x8000002A) #define STATUS_DLL_MIGHT_BE_INSECURE ((NTSTATUS)0x8000002B) #define STATUS_DLL_MIGHT_BE_INCOMPATIBLE ((NTSTATUS)0x8000002C) #define STATUS_DEVICE_REQUIRES_CLEANING ((NTSTATUS)0x80000288) #define STATUS_DEVICE_DOOR_OPEN ((NTSTATUS)0x80000289) #define STATUS_CLUSTER_NODE_ALREADY_UP ((NTSTATUS)0x80130001) #define STATUS_CLUSTER_NODE_ALREADY_DOWN ((NTSTATUS)0x80130002) #define STATUS_CLUSTER_NETWORK_ALREADY_ONLINE ((NTSTATUS)0x80130003) #define STATUS_CLUSTER_NETWORK_ALREADY_OFFLINE ((NTSTATUS)0x80130004) #define STATUS_CLUSTER_NODE_ALREADY_MEMBER ((NTSTATUS)0x80130005) //#define STATUS_WAIT_0 ((NTSTATUS)0x00000000) #define STATUS_UNSUCCESSFUL ((NTSTATUS)0xC0000001) #define STATUS_NOT_IMPLEMENTED ((NTSTATUS)0xC0000002) #define STATUS_INVALID_INFO_CLASS ((NTSTATUS)0xC0000003) #define STATUS_INFO_LENGTH_MISMATCH ((NTSTATUS)0xC0000004) //#define STATUS_ACCESS_VIOLATION ((NTSTATUS)0xC0000005) //#define STATUS_IN_PAGE_ERROR ((NTSTATUS)0xC0000006) #define STATUS_PAGEFILE_QUOTA ((NTSTATUS)0xC0000007) //#define STATUS_INVALID_HANDLE ((NTSTATUS)0xC0000008) #define STATUS_BAD_INITIAL_STACK ((NTSTATUS)0xC0000009) #define STATUS_BAD_INITIAL_PC ((NTSTATUS)0xC000000A) #define STATUS_INVALID_CID ((NTSTATUS)0xC000000B) #define STATUS_TIMER_NOT_CANCELED ((NTSTATUS)0xC000000C) //#define STATUS_INVALID_PARAMETER ((NTSTATUS)0xC000000D) #define STATUS_NO_SUCH_DEVICE ((NTSTATUS)0xC000000E) #define STATUS_NO_SUCH_FILE ((NTSTATUS)0xC000000F) #define STATUS_INVALID_DEVICE_REQUEST ((NTSTATUS)0xC0000010) #define STATUS_END_OF_FILE ((NTSTATUS)0xC0000011) #define STATUS_WRONG_VOLUME ((NTSTATUS)0xC0000012) #define STATUS_NO_MEDIA_IN_DEVICE ((NTSTATUS)0xC0000013) #define STATUS_UNRECOGNIZED_MEDIA ((NTSTATUS)0xC0000014) #define STATUS_NONEXISTENT_SECTOR ((NTSTATUS)0xC0000015) #define STATUS_MORE_PROCESSING_REQUIRED ((NTSTATUS)0xC0000016) //#define STATUS_NO_MEMORY ((NTSTATUS)0xC0000017) #define STATUS_CONFLICTING_ADDRESSES ((NTSTATUS)0xC0000018) #define STATUS_NOT_MAPPED_VIEW ((NTSTATUS)0xC0000019) #define STATUS_UNABLE_TO_FREE_VM ((NTSTATUS)0xC000001A) #define STATUS_UNABLE_TO_DELETE_SECTION ((NTSTATUS)0xC000001B) #define STATUS_INVALID_SYSTEM_SERVICE ((NTSTATUS)0xC000001C) //#define STATUS_ILLEGAL_INSTRUCTION ((NTSTATUS)0xC000001D) #define STATUS_INVALID_LOCK_SEQUENCE ((NTSTATUS)0xC000001E) #define STATUS_INVALID_VIEW_SIZE ((NTSTATUS)0xC000001F) #define STATUS_INVALID_FILE_FOR_SECTION ((NTSTATUS)0xC0000020) #define STATUS_ALREADY_COMMITTED ((NTSTATUS)0xC0000021) //#define STATUS_ACCESS_DENIED ((NTSTATUS)0xC0000022) #define STATUS_BUFFER_TOO_SMALL ((NTSTATUS)0xC0000023) #define STATUS_OBJECT_TYPE_MISMATCH ((NTSTATUS)0xC0000024) //#define STATUS_NONCONTINUABLE_EXCEPTION ((NTSTATUS)0xC0000025) //#define STATUS_INVALID_DISPOSITION ((NTSTATUS)0xC0000026) #define STATUS_UNWIND ((NTSTATUS)0xC0000027) #define STATUS_BAD_STACK ((NTSTATUS)0xC0000028) #define STATUS_INVALID_UNWIND_TARGET ((NTSTATUS)0xC0000029) #define STATUS_NOT_LOCKED ((NTSTATUS)0xC000002A) #define STATUS_PARITY_ERROR ((NTSTATUS)0xC000002B) #define STATUS_UNABLE_TO_DECOMMIT_VM ((NTSTATUS)0xC000002C) #define STATUS_NOT_COMMITTED ((NTSTATUS)0xC000002D) #define STATUS_INVALID_PORT_ATTRIBUTES ((NTSTATUS)0xC000002E) #define STATUS_PORT_MESSAGE_TOO_LONG ((NTSTATUS)0xC000002F) #define STATUS_INVALID_PARAMETER_MIX ((NTSTATUS)0xC0000030) #define STATUS_INVALID_QUOTA_LOWER ((NTSTATUS)0xC0000031) #define STATUS_DISK_CORRUPT_ERROR ((NTSTATUS)0xC0000032) #define STATUS_OBJECT_NAME_INVALID ((NTSTATUS)0xC0000033) #define STATUS_OBJECT_NAME_NOT_FOUND ((NTSTATUS)0xC0000034) #define STATUS_OBJECT_NAME_COLLISION ((NTSTATUS)0xC0000035) #define STATUS_PORT_DISCONNECTED ((NTSTATUS)0xC0000037) #define STATUS_DEVICE_ALREADY_ATTACHED ((NTSTATUS)0xC0000038) #define STATUS_OBJECT_PATH_INVALID ((NTSTATUS)0xC0000039) #define STATUS_OBJECT_PATH_NOT_FOUND ((NTSTATUS)0xC000003A) #define STATUS_OBJECT_PATH_SYNTAX_BAD ((NTSTATUS)0xC000003B) #define STATUS_DATA_OVERRUN ((NTSTATUS)0xC000003C) #define STATUS_DATA_LATE_ERROR ((NTSTATUS)0xC000003D) #define STATUS_DATA_ERROR ((NTSTATUS)0xC000003E) #define STATUS_CRC_ERROR ((NTSTATUS)0xC000003F) #define STATUS_SECTION_TOO_BIG ((NTSTATUS)0xC0000040) #define STATUS_PORT_CONNECTION_REFUSED ((NTSTATUS)0xC0000041) #define STATUS_INVALID_PORT_HANDLE ((NTSTATUS)0xC0000042) #define STATUS_SHARING_VIOLATION ((NTSTATUS)0xC0000043) #define STATUS_QUOTA_EXCEEDED ((NTSTATUS)0xC0000044) #define STATUS_INVALID_PAGE_PROTECTION ((NTSTATUS)0xC0000045) #define STATUS_MUTANT_NOT_OWNED ((NTSTATUS)0xC0000046) #define STATUS_SEMAPHORE_LIMIT_EXCEEDED ((NTSTATUS)0xC0000047) #define STATUS_PORT_ALREADY_SET ((NTSTATUS)0xC0000048) #define STATUS_SECTION_NOT_IMAGE ((NTSTATUS)0xC0000049) #define STATUS_SUSPEND_COUNT_EXCEEDED ((NTSTATUS)0xC000004A) #define STATUS_THREAD_IS_TERMINATING ((NTSTATUS)0xC000004B) #define STATUS_BAD_WORKING_SET_LIMIT ((NTSTATUS)0xC000004C) #define STATUS_INCOMPATIBLE_FILE_MAP ((NTSTATUS)0xC000004D) #define STATUS_SECTION_PROTECTION ((NTSTATUS)0xC000004E) #define STATUS_EAS_NOT_SUPPORTED ((NTSTATUS)0xC000004F) #define STATUS_EA_TOO_LARGE ((NTSTATUS)0xC0000050) #define STATUS_NONEXISTENT_EA_ENTRY ((NTSTATUS)0xC0000051) #define STATUS_NO_EAS_ON_FILE ((NTSTATUS)0xC0000052) #define STATUS_EA_CORRUPT_ERROR ((NTSTATUS)0xC0000053) #define STATUS_FILE_LOCK_CONFLICT ((NTSTATUS)0xC0000054) #define STATUS_LOCK_NOT_GRANTED ((NTSTATUS)0xC0000055) #define STATUS_DELETE_PENDING ((NTSTATUS)0xC0000056) #define STATUS_CTL_FILE_NOT_SUPPORTED ((NTSTATUS)0xC0000057) #define STATUS_UNKNOWN_REVISION ((NTSTATUS)0xC0000058) #define STATUS_REVISION_MISMATCH ((NTSTATUS)0xC0000059) #define STATUS_INVALID_OWNER ((NTSTATUS)0xC000005A) #define STATUS_INVALID_PRIMARY_GROUP ((NTSTATUS)0xC000005B) #define STATUS_NO_IMPERSONATION_TOKEN ((NTSTATUS)0xC000005C) #define STATUS_CANT_DISABLE_MANDATORY ((NTSTATUS)0xC000005D) #define STATUS_NO_LOGON_SERVERS ((NTSTATUS)0xC000005E) #ifndef STATUS_NO_SUCH_LOGON_SESSION #define STATUS_NO_SUCH_LOGON_SESSION ((NTSTATUS)0xC000005F) #endif #define STATUS_NO_SUCH_PRIVILEGE ((NTSTATUS)0xC0000060) #define STATUS_PRIVILEGE_NOT_HELD ((NTSTATUS)0xC0000061) #define STATUS_INVALID_ACCOUNT_NAME ((NTSTATUS)0xC0000062) #define STATUS_USER_EXISTS ((NTSTATUS)0xC0000063) #ifndef STATUS_NO_SUCH_USER #define STATUS_NO_SUCH_USER ((NTSTATUS)0xC0000064) #endif #define STATUS_GROUP_EXISTS ((NTSTATUS)0xC0000065) #define STATUS_NO_SUCH_GROUP ((NTSTATUS)0xC0000066) #define STATUS_MEMBER_IN_GROUP ((NTSTATUS)0xC0000067) #define STATUS_MEMBER_NOT_IN_GROUP ((NTSTATUS)0xC0000068) #define STATUS_LAST_ADMIN ((NTSTATUS)0xC0000069) //#define STATUS_WRONG_PASSWORD ((NTSTATUS)0xC000006A) #define STATUS_ILL_FORMED_PASSWORD ((NTSTATUS)0xC000006B) #define STATUS_PASSWORD_RESTRICTION ((NTSTATUS)0xC000006C) //#define STATUS_LOGON_FAILURE ((NTSTATUS)0xC000006D) //#define STATUS_ACCOUNT_RESTRICTION ((NTSTATUS)0xC000006E) #define STATUS_INVALID_LOGON_HOURS ((NTSTATUS)0xC000006F) #define STATUS_INVALID_WORKSTATION ((NTSTATUS)0xC0000070) //#define STATUS_PASSWORD_EXPIRED ((NTSTATUS)0xC0000071) //#define STATUS_ACCOUNT_DISABLED ((NTSTATUS)0xC0000072) #define STATUS_NONE_MAPPED ((NTSTATUS)0xC0000073) #define STATUS_TOO_MANY_LUIDS_REQUESTED ((NTSTATUS)0xC0000074) #define STATUS_LUIDS_EXHAUSTED ((NTSTATUS)0xC0000075) #define STATUS_INVALID_SUB_AUTHORITY ((NTSTATUS)0xC0000076) #define STATUS_INVALID_ACL ((NTSTATUS)0xC0000077) #define STATUS_INVALID_SID ((NTSTATUS)0xC0000078) #define STATUS_INVALID_SECURITY_DESCR ((NTSTATUS)0xC0000079) #define STATUS_PROCEDURE_NOT_FOUND ((NTSTATUS)0xC000007A) #define STATUS_INVALID_IMAGE_FORMAT ((NTSTATUS)0xC000007B) #define STATUS_NO_TOKEN ((NTSTATUS)0xC000007C) #define STATUS_BAD_INHERITANCE_ACL ((NTSTATUS)0xC000007D) #define STATUS_RANGE_NOT_LOCKED ((NTSTATUS)0xC000007E) #define STATUS_DISK_FULL ((NTSTATUS)0xC000007F) #define STATUS_SERVER_DISABLED ((NTSTATUS)0xC0000080) #define STATUS_SERVER_NOT_DISABLED ((NTSTATUS)0xC0000081) #define STATUS_TOO_MANY_GUIDS_REQUESTED ((NTSTATUS)0xC0000082) #define STATUS_GUIDS_EXHAUSTED ((NTSTATUS)0xC0000083) #define STATUS_INVALID_ID_AUTHORITY ((NTSTATUS)0xC0000084) #define STATUS_AGENTS_EXHAUSTED ((NTSTATUS)0xC0000085) #define STATUS_INVALID_VOLUME_LABEL ((NTSTATUS)0xC0000086) #define STATUS_SECTION_NOT_EXTENDED ((NTSTATUS)0xC0000087) #define STATUS_NOT_MAPPED_DATA ((NTSTATUS)0xC0000088) #define STATUS_RESOURCE_DATA_NOT_FOUND ((NTSTATUS)0xC0000089) #define STATUS_RESOURCE_TYPE_NOT_FOUND ((NTSTATUS)0xC000008A) #define STATUS_RESOURCE_NAME_NOT_FOUND ((NTSTATUS)0xC000008B) //#define STATUS_ARRAY_BOUNDS_EXCEEDED ((NTSTATUS)0xC000008C) //#define STATUS_FLOAT_DENORMAL_OPERAND ((NTSTATUS)0xC000008D) //#define STATUS_FLOAT_DIVIDE_BY_ZERO ((NTSTATUS)0xC000008E) //#define STATUS_FLOAT_INEXACT_RESULT ((NTSTATUS)0xC000008F) //#define STATUS_FLOAT_INVALID_OPERATION ((NTSTATUS)0xC0000090) //#define STATUS_FLOAT_OVERFLOW ((NTSTATUS)0xC0000091) //#define STATUS_FLOAT_STACK_CHECK ((NTSTATUS)0xC0000092) //#define STATUS_FLOAT_UNDERFLOW ((NTSTATUS)0xC0000093) //#define STATUS_INTEGER_DIVIDE_BY_ZERO ((NTSTATUS)0xC0000094) //#define STATUS_INTEGER_OVERFLOW ((NTSTATUS)0xC0000095) //#define STATUS_PRIVILEGED_INSTRUCTION ((NTSTATUS)0xC0000096) #define STATUS_TOO_MANY_PAGING_FILES ((NTSTATUS)0xC0000097) #define STATUS_FILE_INVALID ((NTSTATUS)0xC0000098) #define STATUS_ALLOTTED_SPACE_EXCEEDED ((NTSTATUS)0xC0000099) #define STATUS_INSUFFICIENT_RESOURCES ((NTSTATUS)0xC000009A) #define STATUS_DFS_EXIT_PATH_FOUND ((NTSTATUS)0xC000009B) #define STATUS_DEVICE_DATA_ERROR ((NTSTATUS)0xC000009C) #define STATUS_DEVICE_NOT_CONNECTED ((NTSTATUS)0xC000009D) #define STATUS_DEVICE_POWER_FAILURE ((NTSTATUS)0xC000009E) #define STATUS_FREE_VM_NOT_AT_BASE ((NTSTATUS)0xC000009F) #define STATUS_MEMORY_NOT_ALLOCATED ((NTSTATUS)0xC00000A0) #define STATUS_WORKING_SET_QUOTA ((NTSTATUS)0xC00000A1) #define STATUS_MEDIA_WRITE_PROTECTED ((NTSTATUS)0xC00000A2) #define STATUS_DEVICE_NOT_READY ((NTSTATUS)0xC00000A3) #define STATUS_INVALID_GROUP_ATTRIBUTES ((NTSTATUS)0xC00000A4) #define STATUS_BAD_IMPERSONATION_LEVEL ((NTSTATUS)0xC00000A5) #define STATUS_CANT_OPEN_ANONYMOUS ((NTSTATUS)0xC00000A6) #define STATUS_BAD_VALIDATION_CLASS ((NTSTATUS)0xC00000A7) #define STATUS_BAD_TOKEN_TYPE ((NTSTATUS)0xC00000A8) #define STATUS_BAD_MASTER_BOOT_RECORD ((NTSTATUS)0xC00000A9) #define STATUS_INSTRUCTION_MISALIGNMENT ((NTSTATUS)0xC00000AA) #define STATUS_INSTANCE_NOT_AVAILABLE ((NTSTATUS)0xC00000AB) #define STATUS_PIPE_NOT_AVAILABLE ((NTSTATUS)0xC00000AC) #define STATUS_INVALID_PIPE_STATE ((NTSTATUS)0xC00000AD) #define STATUS_PIPE_BUSY ((NTSTATUS)0xC00000AE) #define STATUS_ILLEGAL_FUNCTION ((NTSTATUS)0xC00000AF) #define STATUS_PIPE_DISCONNECTED ((NTSTATUS)0xC00000B0) #define STATUS_PIPE_CLOSING ((NTSTATUS)0xC00000B1) #define STATUS_PIPE_CONNECTED ((NTSTATUS)0xC00000B2) #define STATUS_PIPE_LISTENING ((NTSTATUS)0xC00000B3) #define STATUS_INVALID_READ_MODE ((NTSTATUS)0xC00000B4) #define STATUS_IO_TIMEOUT ((NTSTATUS)0xC00000B5) #define STATUS_FILE_FORCED_CLOSED ((NTSTATUS)0xC00000B6) #define STATUS_PROFILING_NOT_STARTED ((NTSTATUS)0xC00000B7) #define STATUS_PROFILING_NOT_STOPPED ((NTSTATUS)0xC00000B8) #define STATUS_COULD_NOT_INTERPRET ((NTSTATUS)0xC00000B9) #define STATUS_FILE_IS_A_DIRECTORY ((NTSTATUS)0xC00000BA) #define STATUS_NOT_SUPPORTED ((NTSTATUS)0xC00000BB) #define STATUS_REMOTE_NOT_LISTENING ((NTSTATUS)0xC00000BC) #define STATUS_DUPLICATE_NAME ((NTSTATUS)0xC00000BD) #define STATUS_BAD_NETWORK_PATH ((NTSTATUS)0xC00000BE) #define STATUS_NETWORK_BUSY ((NTSTATUS)0xC00000BF) #define STATUS_DEVICE_DOES_NOT_EXIST ((NTSTATUS)0xC00000C0) #define STATUS_TOO_MANY_COMMANDS ((NTSTATUS)0xC00000C1) #define STATUS_ADAPTER_HARDWARE_ERROR ((NTSTATUS)0xC00000C2) #define STATUS_INVALID_NETWORK_RESPONSE ((NTSTATUS)0xC00000C3) #define STATUS_UNEXPECTED_NETWORK_ERROR ((NTSTATUS)0xC00000C4) #define STATUS_BAD_REMOTE_ADAPTER ((NTSTATUS)0xC00000C5) #define STATUS_PRINT_QUEUE_FULL ((NTSTATUS)0xC00000C6) #define STATUS_NO_SPOOL_SPACE ((NTSTATUS)0xC00000C7) #define STATUS_PRINT_CANCELLED ((NTSTATUS)0xC00000C8) #define STATUS_NETWORK_NAME_DELETED ((NTSTATUS)0xC00000C9) #define STATUS_NETWORK_ACCESS_DENIED ((NTSTATUS)0xC00000CA) #define STATUS_BAD_DEVICE_TYPE ((NTSTATUS)0xC00000CB) #define STATUS_BAD_NETWORK_NAME ((NTSTATUS)0xC00000CC) #define STATUS_TOO_MANY_NAMES ((NTSTATUS)0xC00000CD) #define STATUS_TOO_MANY_SESSIONS ((NTSTATUS)0xC00000CE) #define STATUS_SHARING_PAUSED ((NTSTATUS)0xC00000CF) #define STATUS_REQUEST_NOT_ACCEPTED ((NTSTATUS)0xC00000D0) #define STATUS_REDIRECTOR_PAUSED ((NTSTATUS)0xC00000D1) #define STATUS_NET_WRITE_FAULT ((NTSTATUS)0xC00000D2) #define STATUS_PROFILING_AT_LIMIT ((NTSTATUS)0xC00000D3) #define STATUS_NOT_SAME_DEVICE ((NTSTATUS)0xC00000D4) #define STATUS_FILE_RENAMED ((NTSTATUS)0xC00000D5) #define STATUS_VIRTUAL_CIRCUIT_CLOSED ((NTSTATUS)0xC00000D6) #define STATUS_NO_SECURITY_ON_OBJECT ((NTSTATUS)0xC00000D7) #define STATUS_CANT_WAIT ((NTSTATUS)0xC00000D8) #define STATUS_PIPE_EMPTY ((NTSTATUS)0xC00000D9) #define STATUS_CANT_ACCESS_DOMAIN_INFO ((NTSTATUS)0xC00000DA) #define STATUS_CANT_TERMINATE_SELF ((NTSTATUS)0xC00000DB) #define STATUS_INVALID_SERVER_STATE ((NTSTATUS)0xC00000DC) #define STATUS_INVALID_DOMAIN_STATE ((NTSTATUS)0xC00000DD) #define STATUS_INVALID_DOMAIN_ROLE ((NTSTATUS)0xC00000DE) #define STATUS_NO_SUCH_DOMAIN ((NTSTATUS)0xC00000DF) #define STATUS_DOMAIN_EXISTS ((NTSTATUS)0xC00000E0) #define STATUS_DOMAIN_LIMIT_EXCEEDED ((NTSTATUS)0xC00000E1) #define STATUS_OPLOCK_NOT_GRANTED ((NTSTATUS)0xC00000E2) #define STATUS_INVALID_OPLOCK_PROTOCOL ((NTSTATUS)0xC00000E3) #define STATUS_INTERNAL_DB_CORRUPTION ((NTSTATUS)0xC00000E4) #define STATUS_INTERNAL_ERROR ((NTSTATUS)0xC00000E5) #define STATUS_GENERIC_NOT_MAPPED ((NTSTATUS)0xC00000E6) #define STATUS_BAD_DESCRIPTOR_FORMAT ((NTSTATUS)0xC00000E7) #define STATUS_INVALID_USER_BUFFER ((NTSTATUS)0xC00000E8) #define STATUS_UNEXPECTED_IO_ERROR ((NTSTATUS)0xC00000E9) #define STATUS_UNEXPECTED_MM_CREATE_ERR ((NTSTATUS)0xC00000EA) #define STATUS_UNEXPECTED_MM_MAP_ERROR ((NTSTATUS)0xC00000EB) #define STATUS_UNEXPECTED_MM_EXTEND_ERR ((NTSTATUS)0xC00000EC) #define STATUS_NOT_LOGON_PROCESS ((NTSTATUS)0xC00000ED) #define STATUS_LOGON_SESSION_EXISTS ((NTSTATUS)0xC00000EE) #define STATUS_INVALID_PARAMETER_1 ((NTSTATUS)0xC00000EF) #define STATUS_INVALID_PARAMETER_2 ((NTSTATUS)0xC00000F0) #define STATUS_INVALID_PARAMETER_3 ((NTSTATUS)0xC00000F1) #define STATUS_INVALID_PARAMETER_4 ((NTSTATUS)0xC00000F2) #define STATUS_INVALID_PARAMETER_5 ((NTSTATUS)0xC00000F3) #define STATUS_INVALID_PARAMETER_6 ((NTSTATUS)0xC00000F4) #define STATUS_INVALID_PARAMETER_7 ((NTSTATUS)0xC00000F5) #define STATUS_INVALID_PARAMETER_8 ((NTSTATUS)0xC00000F6) #define STATUS_INVALID_PARAMETER_9 ((NTSTATUS)0xC00000F7) #define STATUS_INVALID_PARAMETER_10 ((NTSTATUS)0xC00000F8) #define STATUS_INVALID_PARAMETER_11 ((NTSTATUS)0xC00000F9) #define STATUS_INVALID_PARAMETER_12 ((NTSTATUS)0xC00000FA) #define STATUS_REDIRECTOR_NOT_STARTED ((NTSTATUS)0xC00000FB) #define STATUS_REDIRECTOR_STARTED ((NTSTATUS)0xC00000FC) //#define STATUS_STACK_OVERFLOW ((NTSTATUS)0xC00000FD) #define STATUS_NO_SUCH_PACKAGE ((NTSTATUS)0xC00000FE) #define STATUS_BAD_FUNCTION_TABLE ((NTSTATUS)0xC00000FF) #define STATUS_VARIABLE_NOT_FOUND ((NTSTATUS)0xC0000100) #define STATUS_DIRECTORY_NOT_EMPTY ((NTSTATUS)0xC0000101) #define STATUS_FILE_CORRUPT_ERROR ((NTSTATUS)0xC0000102) #define STATUS_NOT_A_DIRECTORY ((NTSTATUS)0xC0000103) #define STATUS_BAD_LOGON_SESSION_STATE ((NTSTATUS)0xC0000104) #define STATUS_LOGON_SESSION_COLLISION ((NTSTATUS)0xC0000105) #define STATUS_NAME_TOO_LONG ((NTSTATUS)0xC0000106) #define STATUS_FILES_OPEN ((NTSTATUS)0xC0000107) #define STATUS_CONNECTION_IN_USE ((NTSTATUS)0xC0000108) #define STATUS_MESSAGE_NOT_FOUND ((NTSTATUS)0xC0000109) #define STATUS_PROCESS_IS_TERMINATING ((NTSTATUS)0xC000010A) #define STATUS_INVALID_LOGON_TYPE ((NTSTATUS)0xC000010B) #define STATUS_NO_GUID_TRANSLATION ((NTSTATUS)0xC000010C) #define STATUS_CANNOT_IMPERSONATE ((NTSTATUS)0xC000010D) #define STATUS_IMAGE_ALREADY_LOADED ((NTSTATUS)0xC000010E) #define STATUS_ABIOS_NOT_PRESENT ((NTSTATUS)0xC000010F) #define STATUS_ABIOS_LID_NOT_EXIST ((NTSTATUS)0xC0000110) #define STATUS_ABIOS_LID_ALREADY_OWNED ((NTSTATUS)0xC0000111) #define STATUS_ABIOS_NOT_LID_OWNER ((NTSTATUS)0xC0000112) #define STATUS_ABIOS_INVALID_COMMAND ((NTSTATUS)0xC0000113) #define STATUS_ABIOS_INVALID_LID ((NTSTATUS)0xC0000114) #define STATUS_ABIOS_SELECTOR_NOT_AVAILABLE ((NTSTATUS)0xC0000115) #define STATUS_ABIOS_INVALID_SELECTOR ((NTSTATUS)0xC0000116) #define STATUS_NO_LDT ((NTSTATUS)0xC0000117) #define STATUS_INVALID_LDT_SIZE ((NTSTATUS)0xC0000118) #define STATUS_INVALID_LDT_OFFSET ((NTSTATUS)0xC0000119) #define STATUS_INVALID_LDT_DESCRIPTOR ((NTSTATUS)0xC000011A) #define STATUS_INVALID_IMAGE_NE_FORMAT ((NTSTATUS)0xC000011B) #define STATUS_RXACT_INVALID_STATE ((NTSTATUS)0xC000011C) #define STATUS_RXACT_COMMIT_FAILURE ((NTSTATUS)0xC000011D) #define STATUS_MAPPED_FILE_SIZE_ZERO ((NTSTATUS)0xC000011E) #define STATUS_TOO_MANY_OPENED_FILES ((NTSTATUS)0xC000011F) #define STATUS_CANCELLED ((NTSTATUS)0xC0000120) #define STATUS_CANNOT_DELETE ((NTSTATUS)0xC0000121) #define STATUS_INVALID_COMPUTER_NAME ((NTSTATUS)0xC0000122) #define STATUS_FILE_DELETED ((NTSTATUS)0xC0000123) #define STATUS_SPECIAL_ACCOUNT ((NTSTATUS)0xC0000124) #define STATUS_SPECIAL_GROUP ((NTSTATUS)0xC0000125) #define STATUS_SPECIAL_USER ((NTSTATUS)0xC0000126) #define STATUS_MEMBERS_PRIMARY_GROUP ((NTSTATUS)0xC0000127) #define STATUS_FILE_CLOSED ((NTSTATUS)0xC0000128) #define STATUS_TOO_MANY_THREADS ((NTSTATUS)0xC0000129) #define STATUS_THREAD_NOT_IN_PROCESS ((NTSTATUS)0xC000012A) #define STATUS_TOKEN_ALREADY_IN_USE ((NTSTATUS)0xC000012B) #define STATUS_PAGEFILE_QUOTA_EXCEEDED ((NTSTATUS)0xC000012C) #define STATUS_COMMITMENT_LIMIT ((NTSTATUS)0xC000012D) #define STATUS_INVALID_IMAGE_LE_FORMAT ((NTSTATUS)0xC000012E) #define STATUS_INVALID_IMAGE_NOT_MZ ((NTSTATUS)0xC000012F) #define STATUS_INVALID_IMAGE_PROTECT ((NTSTATUS)0xC0000130) #define STATUS_INVALID_IMAGE_WIN_16 ((NTSTATUS)0xC0000131) #define STATUS_LOGON_SERVER_CONFLICT ((NTSTATUS)0xC0000132) #define STATUS_TIME_DIFFERENCE_AT_DC ((NTSTATUS)0xC0000133) #define STATUS_SYNCHRONIZATION_REQUIRED ((NTSTATUS)0xC0000134) //#define STATUS_DLL_NOT_FOUND ((NTSTATUS)0xC0000135) #define STATUS_OPEN_FAILED ((NTSTATUS)0xC0000136) #define STATUS_IO_PRIVILEGE_FAILED ((NTSTATUS)0xC0000137) //#define STATUS_ORDINAL_NOT_FOUND ((NTSTATUS)0xC0000138) //#define STATUS_ENTRYPOINT_NOT_FOUND ((NTSTATUS)0xC0000139) //#define STATUS_CONTROL_C_EXIT ((NTSTATUS)0xC000013A) #define STATUS_LOCAL_DISCONNECT ((NTSTATUS)0xC000013B) #define STATUS_REMOTE_DISCONNECT ((NTSTATUS)0xC000013C) #define STATUS_REMOTE_RESOURCES ((NTSTATUS)0xC000013D) #define STATUS_LINK_FAILED ((NTSTATUS)0xC000013E) #define STATUS_LINK_TIMEOUT ((NTSTATUS)0xC000013F) #define STATUS_INVALID_CONNECTION ((NTSTATUS)0xC0000140) #define STATUS_INVALID_ADDRESS ((NTSTATUS)0xC0000141) //#define STATUS_DLL_INIT_FAILED ((NTSTATUS)0xC0000142) #define STATUS_MISSING_SYSTEMFILE ((NTSTATUS)0xC0000143) #define STATUS_UNHANDLED_EXCEPTION ((NTSTATUS)0xC0000144) #define STATUS_APP_INIT_FAILURE ((NTSTATUS)0xC0000145) #define STATUS_PAGEFILE_CREATE_FAILED ((NTSTATUS)0xC0000146) #define STATUS_NO_PAGEFILE ((NTSTATUS)0xC0000147) #define STATUS_INVALID_LEVEL ((NTSTATUS)0xC0000148) #define STATUS_WRONG_PASSWORD_CORE ((NTSTATUS)0xC0000149) #define STATUS_ILLEGAL_FLOAT_CONTEXT ((NTSTATUS)0xC000014A) #define STATUS_PIPE_BROKEN ((NTSTATUS)0xC000014B) #define STATUS_REGISTRY_CORRUPT ((NTSTATUS)0xC000014C) #define STATUS_REGISTRY_IO_FAILED ((NTSTATUS)0xC000014D) #define STATUS_NO_EVENT_PAIR ((NTSTATUS)0xC000014E) #define STATUS_UNRECOGNIZED_VOLUME ((NTSTATUS)0xC000014F) #define STATUS_SERIAL_NO_DEVICE_INITED ((NTSTATUS)0xC0000150) #define STATUS_NO_SUCH_ALIAS ((NTSTATUS)0xC0000151) #define STATUS_MEMBER_NOT_IN_ALIAS ((NTSTATUS)0xC0000152) #define STATUS_MEMBER_IN_ALIAS ((NTSTATUS)0xC0000153) #define STATUS_ALIAS_EXISTS ((NTSTATUS)0xC0000154) #define STATUS_LOGON_NOT_GRANTED ((NTSTATUS)0xC0000155) #define STATUS_TOO_MANY_SECRETS ((NTSTATUS)0xC0000156) #define STATUS_SECRET_TOO_LONG ((NTSTATUS)0xC0000157) #define STATUS_INTERNAL_DB_ERROR ((NTSTATUS)0xC0000158) #define STATUS_FULLSCREEN_MODE ((NTSTATUS)0xC0000159) #define STATUS_TOO_MANY_CONTEXT_IDS ((NTSTATUS)0xC000015A) //#define STATUS_LOGON_TYPE_NOT_GRANTED ((NTSTATUS)0xC000015B) #define STATUS_NOT_REGISTRY_FILE ((NTSTATUS)0xC000015C) #define STATUS_NT_CROSS_ENCRYPTION_REQUIRED ((NTSTATUS)0xC000015D) #define STATUS_DOMAIN_CTRLR_CONFIG_ERROR ((NTSTATUS)0xC000015E) #define STATUS_FT_MISSING_MEMBER ((NTSTATUS)0xC000015F) #define STATUS_ILL_FORMED_SERVICE_ENTRY ((NTSTATUS)0xC0000160) #define STATUS_ILLEGAL_CHARACTER ((NTSTATUS)0xC0000161) #define STATUS_UNMAPPABLE_CHARACTER ((NTSTATUS)0xC0000162) #define STATUS_UNDEFINED_CHARACTER ((NTSTATUS)0xC0000163) #define STATUS_FLOPPY_VOLUME ((NTSTATUS)0xC0000164) #define STATUS_FLOPPY_ID_MARK_NOT_FOUND ((NTSTATUS)0xC0000165) #define STATUS_FLOPPY_WRONG_CYLINDER ((NTSTATUS)0xC0000166) #define STATUS_FLOPPY_UNKNOWN_ERROR ((NTSTATUS)0xC0000167) #define STATUS_FLOPPY_BAD_REGISTERS ((NTSTATUS)0xC0000168) #define STATUS_DISK_RECALIBRATE_FAILED ((NTSTATUS)0xC0000169) #define STATUS_DISK_OPERATION_FAILED ((NTSTATUS)0xC000016A) #define STATUS_DISK_RESET_FAILED ((NTSTATUS)0xC000016B) #define STATUS_SHARED_IRQ_BUSY ((NTSTATUS)0xC000016C) #define STATUS_FT_ORPHANING ((NTSTATUS)0xC000016D) #define STATUS_BIOS_FAILED_TO_CONNECT_INTERRUPT ((NTSTATUS)0xC000016E) #define STATUS_PARTITION_FAILURE ((NTSTATUS)0xC0000172) #define STATUS_INVALID_BLOCK_LENGTH ((NTSTATUS)0xC0000173) #define STATUS_DEVICE_NOT_PARTITIONED ((NTSTATUS)0xC0000174) #define STATUS_UNABLE_TO_LOCK_MEDIA ((NTSTATUS)0xC0000175) #define STATUS_UNABLE_TO_UNLOAD_MEDIA ((NTSTATUS)0xC0000176) #define STATUS_EOM_OVERFLOW ((NTSTATUS)0xC0000177) #define STATUS_NO_MEDIA ((NTSTATUS)0xC0000178) #define STATUS_NO_SUCH_MEMBER ((NTSTATUS)0xC000017A) #define STATUS_INVALID_MEMBER ((NTSTATUS)0xC000017B) #define STATUS_KEY_DELETED ((NTSTATUS)0xC000017C) #define STATUS_NO_LOG_SPACE ((NTSTATUS)0xC000017D) #define STATUS_TOO_MANY_SIDS ((NTSTATUS)0xC000017E) #define STATUS_LM_CROSS_ENCRYPTION_REQUIRED ((NTSTATUS)0xC000017F) #define STATUS_KEY_HAS_CHILDREN ((NTSTATUS)0xC0000180) #define STATUS_CHILD_MUST_BE_VOLATILE ((NTSTATUS)0xC0000181) #define STATUS_DEVICE_CONFIGURATION_ERROR ((NTSTATUS)0xC0000182) #define STATUS_DRIVER_INTERNAL_ERROR ((NTSTATUS)0xC0000183) #define STATUS_INVALID_DEVICE_STATE ((NTSTATUS)0xC0000184) #define STATUS_IO_DEVICE_ERROR ((NTSTATUS)0xC0000185) #define STATUS_DEVICE_PROTOCOL_ERROR ((NTSTATUS)0xC0000186) #define STATUS_BACKUP_CONTROLLER ((NTSTATUS)0xC0000187) #define STATUS_LOG_FILE_FULL ((NTSTATUS)0xC0000188) #define STATUS_TOO_LATE ((NTSTATUS)0xC0000189) #define STATUS_NO_TRUST_LSA_SECRET ((NTSTATUS)0xC000018A) #define STATUS_NO_TRUST_SAM_ACCOUNT ((NTSTATUS)0xC000018B) #define STATUS_TRUSTED_DOMAIN_FAILURE ((NTSTATUS)0xC000018C) #define STATUS_TRUSTED_RELATIONSHIP_FAILURE ((NTSTATUS)0xC000018D) #define STATUS_EVENTLOG_FILE_CORRUPT ((NTSTATUS)0xC000018E) #define STATUS_EVENTLOG_CANT_START ((NTSTATUS)0xC000018F) #define STATUS_TRUST_FAILURE ((NTSTATUS)0xC0000190) #define STATUS_MUTANT_LIMIT_EXCEEDED ((NTSTATUS)0xC0000191) #define STATUS_NETLOGON_NOT_STARTED ((NTSTATUS)0xC0000192) //#define STATUS_ACCOUNT_EXPIRED ((NTSTATUS)0xC0000193) #define STATUS_POSSIBLE_DEADLOCK ((NTSTATUS)0xC0000194) #define STATUS_NETWORK_CREDENTIAL_CONFLICT ((NTSTATUS)0xC0000195) #define STATUS_REMOTE_SESSION_LIMIT ((NTSTATUS)0xC0000196) #define STATUS_EVENTLOG_FILE_CHANGED ((NTSTATUS)0xC0000197) #define STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT ((NTSTATUS)0xC0000198) #define STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT ((NTSTATUS)0xC0000199) #define STATUS_NOLOGON_SERVER_TRUST_ACCOUNT ((NTSTATUS)0xC000019A) #define STATUS_DOMAIN_TRUST_INCONSISTENT ((NTSTATUS)0xC000019B) #define STATUS_FS_DRIVER_REQUIRED ((NTSTATUS)0xC000019C) #define STATUS_NO_USER_SESSION_KEY ((NTSTATUS)0xC0000202) #define STATUS_USER_SESSION_DELETED ((NTSTATUS)0xC0000203) #define STATUS_RESOURCE_LANG_NOT_FOUND ((NTSTATUS)0xC0000204) #define STATUS_INSUFF_SERVER_RESOURCES ((NTSTATUS)0xC0000205) #define STATUS_INVALID_BUFFER_SIZE ((NTSTATUS)0xC0000206) #define STATUS_INVALID_ADDRESS_COMPONENT ((NTSTATUS)0xC0000207) #define STATUS_INVALID_ADDRESS_WILDCARD ((NTSTATUS)0xC0000208) #define STATUS_TOO_MANY_ADDRESSES ((NTSTATUS)0xC0000209) #define STATUS_ADDRESS_ALREADY_EXISTS ((NTSTATUS)0xC000020A) #define STATUS_ADDRESS_CLOSED ((NTSTATUS)0xC000020B) #define STATUS_CONNECTION_DISCONNECTED ((NTSTATUS)0xC000020C) #define STATUS_CONNECTION_RESET ((NTSTATUS)0xC000020D) #define STATUS_TOO_MANY_NODES ((NTSTATUS)0xC000020E) #define STATUS_TRANSACTION_ABORTED ((NTSTATUS)0xC000020F) #define STATUS_TRANSACTION_TIMED_OUT ((NTSTATUS)0xC0000210) #define STATUS_TRANSACTION_NO_RELEASE ((NTSTATUS)0xC0000211) #define STATUS_TRANSACTION_NO_MATCH ((NTSTATUS)0xC0000212) #define STATUS_TRANSACTION_RESPONDED ((NTSTATUS)0xC0000213) #define STATUS_TRANSACTION_INVALID_ID ((NTSTATUS)0xC0000214) #define STATUS_TRANSACTION_INVALID_TYPE ((NTSTATUS)0xC0000215) #define STATUS_NOT_SERVER_SESSION ((NTSTATUS)0xC0000216) #define STATUS_NOT_CLIENT_SESSION ((NTSTATUS)0xC0000217) #define STATUS_CANNOT_LOAD_REGISTRY_FILE ((NTSTATUS)0xC0000218) #define STATUS_DEBUG_ATTACH_FAILED ((NTSTATUS)0xC0000219) #define STATUS_SYSTEM_PROCESS_TERMINATED ((NTSTATUS)0xC000021A) #define STATUS_DATA_NOT_ACCEPTED ((NTSTATUS)0xC000021B) #define STATUS_NO_BROWSER_SERVERS_FOUND ((NTSTATUS)0xC000021C) #define STATUS_VDM_HARD_ERROR ((NTSTATUS)0xC000021D) #define STATUS_DRIVER_CANCEL_TIMEOUT ((NTSTATUS)0xC000021E) #define STATUS_REPLY_MESSAGE_MISMATCH ((NTSTATUS)0xC000021F) #define STATUS_MAPPED_ALIGNMENT ((NTSTATUS)0xC0000220) #define STATUS_IMAGE_CHECKSUM_MISMATCH ((NTSTATUS)0xC0000221) #define STATUS_LOST_WRITEBEHIND_DATA ((NTSTATUS)0xC0000222) #define STATUS_CLIENT_SERVER_PARAMETERS_INVALID ((NTSTATUS)0xC0000223) //#define STATUS_PASSWORD_MUST_CHANGE ((NTSTATUS)0xC0000224) #define STATUS_NOT_FOUND ((NTSTATUS)0xC0000225) #define STATUS_NOT_TINY_STREAM ((NTSTATUS)0xC0000226) #define STATUS_RECOVERY_FAILURE ((NTSTATUS)0xC0000227) #define STATUS_STACK_OVERFLOW_READ ((NTSTATUS)0xC0000228) #define STATUS_FAIL_CHECK ((NTSTATUS)0xC0000229) #define STATUS_DUPLICATE_OBJECTID ((NTSTATUS)0xC000022A) #define STATUS_OBJECTID_EXISTS ((NTSTATUS)0xC000022B) #define STATUS_CONVERT_TO_LARGE ((NTSTATUS)0xC000022C) #define STATUS_RETRY ((NTSTATUS)0xC000022D) #define STATUS_FOUND_OUT_OF_SCOPE ((NTSTATUS)0xC000022E) #define STATUS_ALLOCATE_BUCKET ((NTSTATUS)0xC000022F) #define STATUS_PROPSET_NOT_FOUND ((NTSTATUS)0xC0000230) #define STATUS_MARSHALL_OVERFLOW ((NTSTATUS)0xC0000231) #define STATUS_INVALID_VARIANT ((NTSTATUS)0xC0000232) #define STATUS_DOMAIN_CONTROLLER_NOT_FOUND ((NTSTATUS)0xC0000233) //#define STATUS_ACCOUNT_LOCKED_OUT ((NTSTATUS)0xC0000234) #define STATUS_HANDLE_NOT_CLOSABLE ((NTSTATUS)0xC0000235) #define STATUS_CONNECTION_REFUSED ((NTSTATUS)0xC0000236) #define STATUS_GRACEFUL_DISCONNECT ((NTSTATUS)0xC0000237) #define STATUS_ADDRESS_ALREADY_ASSOCIATED ((NTSTATUS)0xC0000238) #define STATUS_ADDRESS_NOT_ASSOCIATED ((NTSTATUS)0xC0000239) #define STATUS_CONNECTION_INVALID ((NTSTATUS)0xC000023A) #define STATUS_CONNECTION_ACTIVE ((NTSTATUS)0xC000023B) #define STATUS_NETWORK_UNREACHABLE ((NTSTATUS)0xC000023C) #define STATUS_HOST_UNREACHABLE ((NTSTATUS)0xC000023D) #define STATUS_PROTOCOL_UNREACHABLE ((NTSTATUS)0xC000023E) #define STATUS_PORT_UNREACHABLE ((NTSTATUS)0xC000023F) #define STATUS_REQUEST_ABORTED ((NTSTATUS)0xC0000240) #define STATUS_CONNECTION_ABORTED ((NTSTATUS)0xC0000241) #define STATUS_BAD_COMPRESSION_BUFFER ((NTSTATUS)0xC0000242) #define STATUS_USER_MAPPED_FILE ((NTSTATUS)0xC0000243) #define STATUS_AUDIT_FAILED ((NTSTATUS)0xC0000244) #define STATUS_TIMER_RESOLUTION_NOT_SET ((NTSTATUS)0xC0000245) #define STATUS_CONNECTION_COUNT_LIMIT ((NTSTATUS)0xC0000246) #define STATUS_LOGIN_TIME_RESTRICTION ((NTSTATUS)0xC0000247) #define STATUS_LOGIN_WKSTA_RESTRICTION ((NTSTATUS)0xC0000248) #define STATUS_IMAGE_MP_UP_MISMATCH ((NTSTATUS)0xC0000249) #define STATUS_INSUFFICIENT_LOGON_INFO ((NTSTATUS)0xC0000250) #define STATUS_BAD_DLL_ENTRYPOINT ((NTSTATUS)0xC0000251) #define STATUS_BAD_SERVICE_ENTRYPOINT ((NTSTATUS)0xC0000252) #define STATUS_LPC_REPLY_LOST ((NTSTATUS)0xC0000253) #define STATUS_IP_ADDRESS_CONFLICT1 ((NTSTATUS)0xC0000254) #define STATUS_IP_ADDRESS_CONFLICT2 ((NTSTATUS)0xC0000255) #define STATUS_REGISTRY_QUOTA_LIMIT ((NTSTATUS)0xC0000256) #define STATUS_PATH_NOT_COVERED ((NTSTATUS)0xC0000257) #define STATUS_NO_CALLBACK_ACTIVE ((NTSTATUS)0xC0000258) #define STATUS_LICENSE_QUOTA_EXCEEDED ((NTSTATUS)0xC0000259) #define STATUS_PWD_TOO_SHORT ((NTSTATUS)0xC000025A) #define STATUS_PWD_TOO_RECENT ((NTSTATUS)0xC000025B) #define STATUS_PWD_HISTORY_CONFLICT ((NTSTATUS)0xC000025C) #define STATUS_PLUGPLAY_NO_DEVICE ((NTSTATUS)0xC000025E) #define STATUS_UNSUPPORTED_COMPRESSION ((NTSTATUS)0xC000025F) #define STATUS_INVALID_HW_PROFILE ((NTSTATUS)0xC0000260) #define STATUS_INVALID_PLUGPLAY_DEVICE_PATH ((NTSTATUS)0xC0000261) #define STATUS_DRIVER_ORDINAL_NOT_FOUND ((NTSTATUS)0xC0000262) #define STATUS_DRIVER_ENTRYPOINT_NOT_FOUND ((NTSTATUS)0xC0000263) #define STATUS_RESOURCE_NOT_OWNED ((NTSTATUS)0xC0000264) #define STATUS_TOO_MANY_LINKS ((NTSTATUS)0xC0000265) #define STATUS_QUOTA_LIST_INCONSISTENT ((NTSTATUS)0xC0000266) #define STATUS_FILE_IS_OFFLINE ((NTSTATUS)0xC0000267) #define STATUS_EVALUATION_EXPIRATION ((NTSTATUS)0xC0000268) #define STATUS_ILLEGAL_DLL_RELOCATION ((NTSTATUS)0xC0000269) #define STATUS_LICENSE_VIOLATION ((NTSTATUS)0xC000026A) #define STATUS_DLL_INIT_FAILED_LOGOFF ((NTSTATUS)0xC000026B) #define STATUS_DRIVER_UNABLE_TO_LOAD ((NTSTATUS)0xC000026C) #define STATUS_DFS_UNAVAILABLE ((NTSTATUS)0xC000026D) #define STATUS_VOLUME_DISMOUNTED ((NTSTATUS)0xC000026E) #define STATUS_WX86_INTERNAL_ERROR ((NTSTATUS)0xC000026F) #define STATUS_WX86_FLOAT_STACK_CHECK ((NTSTATUS)0xC0000270) #define STATUS_VALIDATE_CONTINUE ((NTSTATUS)0xC0000271) #define STATUS_NO_MATCH ((NTSTATUS)0xC0000272) #define STATUS_NO_MORE_MATCHES ((NTSTATUS)0xC0000273) #define STATUS_NOT_A_REPARSE_POINT ((NTSTATUS)0xC0000275) #define STATUS_IO_REPARSE_TAG_INVALID ((NTSTATUS)0xC0000276) #define STATUS_IO_REPARSE_TAG_MISMATCH ((NTSTATUS)0xC0000277) #define STATUS_IO_REPARSE_DATA_INVALID ((NTSTATUS)0xC0000278) #define STATUS_IO_REPARSE_TAG_NOT_HANDLED ((NTSTATUS)0xC0000279) #define STATUS_REPARSE_POINT_NOT_RESOLVED ((NTSTATUS)0xC0000280) #define STATUS_DIRECTORY_IS_A_REPARSE_POINT ((NTSTATUS)0xC0000281) #define STATUS_RANGE_LIST_CONFLICT ((NTSTATUS)0xC0000282) #define STATUS_SOURCE_ELEMENT_EMPTY ((NTSTATUS)0xC0000283) #define STATUS_DESTINATION_ELEMENT_FULL ((NTSTATUS)0xC0000284) #define STATUS_ILLEGAL_ELEMENT_ADDRESS ((NTSTATUS)0xC0000285) #define STATUS_MAGAZINE_NOT_PRESENT ((NTSTATUS)0xC0000286) #define STATUS_REINITIALIZATION_NEEDED ((NTSTATUS)0xC0000287) #define STATUS_ENCRYPTION_FAILED ((NTSTATUS)0xC000028A) #define STATUS_DECRYPTION_FAILED ((NTSTATUS)0xC000028B) #define STATUS_RANGE_NOT_FOUND ((NTSTATUS)0xC000028C) #define STATUS_NO_RECOVERY_POLICY ((NTSTATUS)0xC000028D) #define STATUS_NO_EFS ((NTSTATUS)0xC000028E) #define STATUS_WRONG_EFS ((NTSTATUS)0xC000028F) #define STATUS_NO_USER_KEYS ((NTSTATUS)0xC0000290) #define STATUS_FILE_NOT_ENCRYPTED ((NTSTATUS)0xC0000291) #define STATUS_NOT_EXPORT_FORMAT ((NTSTATUS)0xC0000292) #define STATUS_FILE_ENCRYPTED ((NTSTATUS)0xC0000293) #define STATUS_WMI_GUID_NOT_FOUND ((NTSTATUS)0xC0000295) #define STATUS_WMI_INSTANCE_NOT_FOUND ((NTSTATUS)0xC0000296) #define STATUS_WMI_ITEMID_NOT_FOUND ((NTSTATUS)0xC0000297) #define STATUS_WMI_TRY_AGAIN ((NTSTATUS)0xC0000298) #define STATUS_SHARED_POLICY ((NTSTATUS)0xC0000299) #define STATUS_POLICY_OBJECT_NOT_FOUND ((NTSTATUS)0xC000029A) #define STATUS_POLICY_ONLY_IN_DS ((NTSTATUS)0xC000029B) #define STATUS_VOLUME_NOT_UPGRADED ((NTSTATUS)0xC000029C) #define STATUS_REMOTE_STORAGE_NOT_ACTIVE ((NTSTATUS)0xC000029D) #define STATUS_REMOTE_STORAGE_MEDIA_ERROR ((NTSTATUS)0xC000029E) #define STATUS_NO_TRACKING_SERVICE ((NTSTATUS)0xC000029F) #define STATUS_SERVER_SID_MISMATCH ((NTSTATUS)0xC00002A0) #define STATUS_DS_NO_ATTRIBUTE_OR_VALUE ((NTSTATUS)0xC00002A1) #define STATUS_DS_INVALID_ATTRIBUTE_SYNTAX ((NTSTATUS)0xC00002A2) #define STATUS_DS_ATTRIBUTE_TYPE_UNDEFINED ((NTSTATUS)0xC00002A3) #define STATUS_DS_ATTRIBUTE_OR_VALUE_EXISTS ((NTSTATUS)0xC00002A4) #define STATUS_DS_BUSY ((NTSTATUS)0xC00002A5) #define STATUS_DS_UNAVAILABLE ((NTSTATUS)0xC00002A6) #define STATUS_DS_NO_RIDS_ALLOCATED ((NTSTATUS)0xC00002A7) #define STATUS_DS_NO_MORE_RIDS ((NTSTATUS)0xC00002A8) #define STATUS_DS_INCORRECT_ROLE_OWNER ((NTSTATUS)0xC00002A9) #define STATUS_DS_RIDMGR_INIT_ERROR ((NTSTATUS)0xC00002AA) #define STATUS_DS_OBJ_CLASS_VIOLATION ((NTSTATUS)0xC00002AB) #define STATUS_DS_CANT_ON_NON_LEAF ((NTSTATUS)0xC00002AC) #define STATUS_DS_CANT_ON_RDN ((NTSTATUS)0xC00002AD) #define STATUS_DS_CANT_MOD_OBJ_CLASS ((NTSTATUS)0xC00002AE) #define STATUS_DS_CROSS_DOM_MOVE_FAILED ((NTSTATUS)0xC00002AF) #define STATUS_DS_GC_NOT_AVAILABLE ((NTSTATUS)0xC00002B0) #define STATUS_DIRECTORY_SERVICE_REQUIRED ((NTSTATUS)0xC00002B1) #define STATUS_REPARSE_ATTRIBUTE_CONFLICT ((NTSTATUS)0xC00002B2) #define STATUS_CANT_ENABLE_DENY_ONLY ((NTSTATUS)0xC00002B3) //#define STATUS_FLOAT_MULTIPLE_FAULTS ((NTSTATUS)0xC00002B4) //#define STATUS_FLOAT_MULTIPLE_TRAPS ((NTSTATUS)0xC00002B5) #define STATUS_DEVICE_REMOVED ((NTSTATUS)0xC00002B6) #define STATUS_JOURNAL_DELETE_IN_PROGRESS ((NTSTATUS)0xC00002B7) #define STATUS_JOURNAL_NOT_ACTIVE ((NTSTATUS)0xC00002B8) #define STATUS_NOINTERFACE ((NTSTATUS)0xC00002B9) #define STATUS_DS_ADMIN_LIMIT_EXCEEDED ((NTSTATUS)0xC00002C1) #define STATUS_DRIVER_FAILED_SLEEP ((NTSTATUS)0xC00002C2) #define STATUS_MUTUAL_AUTHENTICATION_FAILED ((NTSTATUS)0xC00002C3) #define STATUS_CORRUPT_SYSTEM_FILE ((NTSTATUS)0xC00002C4) #define STATUS_DATATYPE_MISALIGNMENT_ERROR ((NTSTATUS)0xC00002C5) #define STATUS_WMI_READ_ONLY ((NTSTATUS)0xC00002C6) #define STATUS_WMI_SET_FAILURE ((NTSTATUS)0xC00002C7) #define STATUS_COMMITMENT_MINIMUM ((NTSTATUS)0xC00002C8) //#define STATUS_REG_NAT_CONSUMPTION ((NTSTATUS)0xC00002C9) #define STATUS_TRANSPORT_FULL ((NTSTATUS)0xC00002CA) #define STATUS_DS_SAM_INIT_FAILURE ((NTSTATUS)0xC00002CB) #define STATUS_ONLY_IF_CONNECTED ((NTSTATUS)0xC00002CC) #define STATUS_DS_SENSITIVE_GROUP_VIOLATION ((NTSTATUS)0xC00002CD) #define STATUS_PNP_RESTART_ENUMERATION ((NTSTATUS)0xC00002CE) #define STATUS_JOURNAL_ENTRY_DELETED ((NTSTATUS)0xC00002CF) #define STATUS_DS_CANT_MOD_PRIMARYGROUPID ((NTSTATUS)0xC00002D0) #define STATUS_SYSTEM_IMAGE_BAD_SIGNATURE ((NTSTATUS)0xC00002D1) #define STATUS_PNP_REBOOT_REQUIRED ((NTSTATUS)0xC00002D2) #define STATUS_POWER_STATE_INVALID ((NTSTATUS)0xC00002D3) #define STATUS_DS_INVALID_GROUP_TYPE ((NTSTATUS)0xC00002D4) #define STATUS_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN ((NTSTATUS)0xC00002D5) #define STATUS_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN ((NTSTATUS)0xC00002D6) #define STATUS_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER ((NTSTATUS)0xC00002D7) #define STATUS_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER ((NTSTATUS)0xC00002D8) #define STATUS_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER ((NTSTATUS)0xC00002D9) #define STATUS_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER ((NTSTATUS)0xC00002DA) #define STATUS_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER ((NTSTATUS)0xC00002DB) #define STATUS_DS_HAVE_PRIMARY_MEMBERS ((NTSTATUS)0xC00002DC) #define STATUS_WMI_NOT_SUPPORTED ((NTSTATUS)0xC00002DD) #define STATUS_INSUFFICIENT_POWER ((NTSTATUS)0xC00002DE) #define STATUS_SAM_NEED_BOOTKEY_PASSWORD ((NTSTATUS)0xC00002DF) #define STATUS_SAM_NEED_BOOTKEY_FLOPPY ((NTSTATUS)0xC00002E0) #define STATUS_DS_CANT_START ((NTSTATUS)0xC00002E1) #define STATUS_DS_INIT_FAILURE ((NTSTATUS)0xC00002E2) #define STATUS_SAM_INIT_FAILURE ((NTSTATUS)0xC00002E3) #define STATUS_DS_GC_REQUIRED ((NTSTATUS)0xC00002E4) #define STATUS_DS_LOCAL_MEMBER_OF_LOCAL_ONLY ((NTSTATUS)0xC00002E5) #define STATUS_DS_NO_FPO_IN_UNIVERSAL_GROUPS ((NTSTATUS)0xC00002E6) #define STATUS_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED ((NTSTATUS)0xC00002E7) #define STATUS_MULTIPLE_FAULT_VIOLATION ((NTSTATUS)0xC00002E8) #define STATUS_CURRENT_DOMAIN_NOT_ALLOWED ((NTSTATUS)0xC00002E9) #define STATUS_CANNOT_MAKE ((NTSTATUS)0xC00002EA) #define STATUS_SYSTEM_SHUTDOWN ((NTSTATUS)0xC00002EB) #define STATUS_DS_INIT_FAILURE_CONSOLE ((NTSTATUS)0xC00002EC) #define STATUS_DS_SAM_INIT_FAILURE_CONSOLE ((NTSTATUS)0xC00002ED) #define STATUS_UNFINISHED_CONTEXT_DELETED ((NTSTATUS)0xC00002EE) #define STATUS_NO_TGT_REPLY ((NTSTATUS)0xC00002EF) #define STATUS_OBJECTID_NOT_FOUND ((NTSTATUS)0xC00002F0) #define STATUS_NO_IP_ADDRESSES ((NTSTATUS)0xC00002F1) #define STATUS_WRONG_CREDENTIAL_HANDLE ((NTSTATUS)0xC00002F2) #define STATUS_CRYPTO_SYSTEM_INVALID ((NTSTATUS)0xC00002F3) #define STATUS_MAX_REFERRALS_EXCEEDED ((NTSTATUS)0xC00002F4) #define STATUS_MUST_BE_KDC ((NTSTATUS)0xC00002F5) #define STATUS_STRONG_CRYPTO_NOT_SUPPORTED ((NTSTATUS)0xC00002F6) #define STATUS_TOO_MANY_PRINCIPALS ((NTSTATUS)0xC00002F7) #define STATUS_NO_PA_DATA ((NTSTATUS)0xC00002F8) #define STATUS_PKINIT_NAME_MISMATCH ((NTSTATUS)0xC00002F9) #define STATUS_SMARTCARD_LOGON_REQUIRED ((NTSTATUS)0xC00002FA) #define STATUS_KDC_INVALID_REQUEST ((NTSTATUS)0xC00002FB) #define STATUS_KDC_UNABLE_TO_REFER ((NTSTATUS)0xC00002FC) #define STATUS_KDC_UNKNOWN_ETYPE ((NTSTATUS)0xC00002FD) #define STATUS_SHUTDOWN_IN_PROGRESS ((NTSTATUS)0xC00002FE) #define STATUS_SERVER_SHUTDOWN_IN_PROGRESS ((NTSTATUS)0xC00002FF) #define STATUS_NOT_SUPPORTED_ON_SBS ((NTSTATUS)0xC0000300) #define STATUS_WMI_GUID_DISCONNECTED ((NTSTATUS)0xC0000301) #define STATUS_WMI_ALREADY_DISABLED ((NTSTATUS)0xC0000302) #define STATUS_WMI_ALREADY_ENABLED ((NTSTATUS)0xC0000303) #define STATUS_MFT_TOO_FRAGMENTED ((NTSTATUS)0xC0000304) #define STATUS_COPY_PROTECTION_FAILURE ((NTSTATUS)0xC0000305) #define STATUS_CSS_AUTHENTICATION_FAILURE ((NTSTATUS)0xC0000306) #define STATUS_CSS_KEY_NOT_PRESENT ((NTSTATUS)0xC0000307) #define STATUS_CSS_KEY_NOT_ESTABLISHED ((NTSTATUS)0xC0000308) #define STATUS_CSS_SCRAMBLED_SECTOR ((NTSTATUS)0xC0000309) #define STATUS_CSS_REGION_MISMATCH ((NTSTATUS)0xC000030A) #define STATUS_CSS_RESETS_EXHAUSTED ((NTSTATUS)0xC000030B) #define STATUS_PKINIT_FAILURE ((NTSTATUS)0xC0000320) #define STATUS_SMARTCARD_SUBSYSTEM_FAILURE ((NTSTATUS)0xC0000321) #define STATUS_NO_KERB_KEY ((NTSTATUS)0xC0000322) #define STATUS_HOST_DOWN ((NTSTATUS)0xC0000350) #define STATUS_UNSUPPORTED_PREAUTH ((NTSTATUS)0xC0000351) #define STATUS_EFS_ALG_BLOB_TOO_BIG ((NTSTATUS)0xC0000352) #define STATUS_PORT_NOT_SET ((NTSTATUS)0xC0000353) #define STATUS_DEBUGGER_INACTIVE ((NTSTATUS)0xC0000354) #define STATUS_DS_VERSION_CHECK_FAILURE ((NTSTATUS)0xC0000355) #define STATUS_AUDITING_DISABLED ((NTSTATUS)0xC0000356) #define STATUS_PRENT4_MACHINE_ACCOUNT ((NTSTATUS)0xC0000357) #define STATUS_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER ((NTSTATUS)0xC0000358) #define STATUS_INVALID_IMAGE_WIN_32 ((NTSTATUS)0xC0000359) #define STATUS_INVALID_IMAGE_WIN_64 ((NTSTATUS)0xC000035A) #define STATUS_BAD_BINDINGS ((NTSTATUS)0xC000035B) #define STATUS_NETWORK_SESSION_EXPIRED ((NTSTATUS)0xC000035C) #define STATUS_APPHELP_BLOCK ((NTSTATUS)0xC000035D) #define STATUS_ALL_SIDS_FILTERED ((NTSTATUS)0xC000035E) #define STATUS_NOT_SAFE_MODE_DRIVER ((NTSTATUS)0xC000035F) #define STATUS_ACCESS_DISABLED_BY_POLICY_DEFAULT ((NTSTATUS)0xC0000361) #define STATUS_ACCESS_DISABLED_BY_POLICY_PATH ((NTSTATUS)0xC0000362) #define STATUS_ACCESS_DISABLED_BY_POLICY_PUBLISHER ((NTSTATUS)0xC0000363) #define STATUS_ACCESS_DISABLED_BY_POLICY_OTHER ((NTSTATUS)0xC0000364) #define STATUS_FAILED_DRIVER_ENTRY ((NTSTATUS)0xC0000365) #define STATUS_DEVICE_ENUMERATION_ERROR ((NTSTATUS)0xC0000366) #define STATUS_WAIT_FOR_OPLOCK ((NTSTATUS)0x00000367) #define STATUS_MOUNT_POINT_NOT_RESOLVED ((NTSTATUS)0xC0000368) #define STATUS_INVALID_DEVICE_OBJECT_PARAMETER ((NTSTATUS)0xC0000369) /* The following is not a typo. It's the same spelling as in the Microsoft headers */ #define STATUS_MCA_OCCURED ((NTSTATUS)0xC000036A) #define STATUS_DRIVER_BLOCKED_CRITICAL ((NTSTATUS)0xC000036B) #define STATUS_DRIVER_BLOCKED ((NTSTATUS)0xC000036C) #define STATUS_DRIVER_DATABASE_ERROR ((NTSTATUS)0xC000036D) #define STATUS_SYSTEM_HIVE_TOO_LARGE ((NTSTATUS)0xC000036E) #define STATUS_INVALID_IMPORT_OF_NON_DLL ((NTSTATUS)0xC000036F) #define STATUS_SMARTCARD_WRONG_PIN ((NTSTATUS)0xC0000380) #define STATUS_SMARTCARD_CARD_BLOCKED ((NTSTATUS)0xC0000381) #define STATUS_SMARTCARD_CARD_NOT_AUTHENTICATED ((NTSTATUS)0xC0000382) #define STATUS_SMARTCARD_NO_CARD ((NTSTATUS)0xC0000383) #define STATUS_SMARTCARD_NO_KEY_CONTAINER ((NTSTATUS)0xC0000384) #define STATUS_SMARTCARD_NO_CERTIFICATE ((NTSTATUS)0xC0000385) #define STATUS_SMARTCARD_NO_KEYSET ((NTSTATUS)0xC0000386) #define STATUS_SMARTCARD_IO_ERROR ((NTSTATUS)0xC0000387) //#define STATUS_DOWNGRADE_DETECTED ((NTSTATUS)0xC0000388) #define STATUS_SMARTCARD_CERT_REVOKED ((NTSTATUS)0xC0000389) #define STATUS_ISSUING_CA_UNTRUSTED ((NTSTATUS)0xC000038A) #define STATUS_REVOCATION_OFFLINE_C ((NTSTATUS)0xC000038B) #define STATUS_PKINIT_CLIENT_FAILURE ((NTSTATUS)0xC000038C) #define STATUS_SMARTCARD_CERT_EXPIRED ((NTSTATUS)0xC000038D) #define STATUS_DRIVER_FAILED_PRIOR_UNLOAD ((NTSTATUS)0xC000038E) #define STATUS_SMARTCARD_SILENT_CONTEXT ((NTSTATUS)0xC000038F) #define STATUS_PER_USER_TRUST_QUOTA_EXCEEDED ((NTSTATUS)0xC0000401) #define STATUS_ALL_USER_TRUST_QUOTA_EXCEEDED ((NTSTATUS)0xC0000402) #define STATUS_USER_DELETE_TRUST_QUOTA_EXCEEDED ((NTSTATUS)0xC0000403) #define STATUS_DS_NAME_NOT_UNIQUE ((NTSTATUS)0xC0000404) #define STATUS_DS_DUPLICATE_ID_FOUND ((NTSTATUS)0xC0000405) #define STATUS_DS_GROUP_CONVERSION_ERROR ((NTSTATUS)0xC0000406) #define STATUS_VOLSNAP_PREPARE_HIBERNATE ((NTSTATUS)0xC0000407) #define STATUS_USER2USER_REQUIRED ((NTSTATUS)0xC0000408) //#define STATUS_STACK_BUFFER_OVERRUN ((NTSTATUS)0xC0000409) #define STATUS_NO_S4U_PROT_SUPPORT ((NTSTATUS)0xC000040A) #define STATUS_CROSSREALM_DELEGATION_FAILURE ((NTSTATUS)0xC000040B) #define STATUS_REVOCATION_OFFLINE_KDC ((NTSTATUS)0xC000040C) #define STATUS_ISSUING_CA_UNTRUSTED_KDC ((NTSTATUS)0xC000040D) #define STATUS_KDC_CERT_EXPIRED ((NTSTATUS)0xC000040E) #define STATUS_KDC_CERT_REVOKED ((NTSTATUS)0xC000040F) #define STATUS_PARAMETER_QUOTA_EXCEEDED ((NTSTATUS)0xC0000410) #define STATUS_HIBERNATION_FAILURE ((NTSTATUS)0xC0000411) #define STATUS_DELAY_LOAD_FAILED ((NTSTATUS)0xC0000412) //#define STATUS_AUTHENTICATION_FIREWALL_FAILED ((NTSTATUS)0xC0000413) #define STATUS_VDM_DISALLOWED ((NTSTATUS)0xC0000414) #define STATUS_HUNG_DISPLAY_DRIVER_THREAD ((NTSTATUS)0xC0000415) //#define STATUS_INVALID_CRUNTIME_PARAMETER ((NTSTATUS)0xC0000417) //#define STATUS_ASSERTION_FAILURE ((NTSTATUS)0xC0000420L) #define STATUS_CALLBACK_POP_STACK ((NTSTATUS)0xC0000423) #define STATUS_WOW_ASSERTION ((NTSTATUS)0xC0009898) #define RPC_NT_INVALID_STRING_BINDING ((NTSTATUS)0xC0020001) #define RPC_NT_WRONG_KIND_OF_BINDING ((NTSTATUS)0xC0020002) #define RPC_NT_INVALID_BINDING ((NTSTATUS)0xC0020003) #define RPC_NT_PROTSEQ_NOT_SUPPORTED ((NTSTATUS)0xC0020004) #define RPC_NT_INVALID_RPC_PROTSEQ ((NTSTATUS)0xC0020005) #define RPC_NT_INVALID_STRING_UUID ((NTSTATUS)0xC0020006) #define RPC_NT_INVALID_ENDPOINT_FORMAT ((NTSTATUS)0xC0020007) #define RPC_NT_INVALID_NET_ADDR ((NTSTATUS)0xC0020008) #define RPC_NT_NO_ENDPOINT_FOUND ((NTSTATUS)0xC0020009) #define RPC_NT_INVALID_TIMEOUT ((NTSTATUS)0xC002000A) #define RPC_NT_OBJECT_NOT_FOUND ((NTSTATUS)0xC002000B) #define RPC_NT_ALREADY_REGISTERED ((NTSTATUS)0xC002000C) #define RPC_NT_TYPE_ALREADY_REGISTERED ((NTSTATUS)0xC002000D) #define RPC_NT_ALREADY_LISTENING ((NTSTATUS)0xC002000E) #define RPC_NT_NO_PROTSEQS_REGISTERED ((NTSTATUS)0xC002000F) #define RPC_NT_NOT_LISTENING ((NTSTATUS)0xC0020010) #define RPC_NT_UNKNOWN_MGR_TYPE ((NTSTATUS)0xC0020011) #define RPC_NT_UNKNOWN_IF ((NTSTATUS)0xC0020012) #define RPC_NT_NO_BINDINGS ((NTSTATUS)0xC0020013) #define RPC_NT_NO_PROTSEQS ((NTSTATUS)0xC0020014) #define RPC_NT_CANT_CREATE_ENDPOINT ((NTSTATUS)0xC0020015) #define RPC_NT_OUT_OF_RESOURCES ((NTSTATUS)0xC0020016) #define RPC_NT_SERVER_UNAVAILABLE ((NTSTATUS)0xC0020017) #define RPC_NT_SERVER_TOO_BUSY ((NTSTATUS)0xC0020018) #define RPC_NT_INVALID_NETWORK_OPTIONS ((NTSTATUS)0xC0020019) #define RPC_NT_NO_CALL_ACTIVE ((NTSTATUS)0xC002001A) #define RPC_NT_CALL_FAILED ((NTSTATUS)0xC002001B) #define RPC_NT_CALL_FAILED_DNE ((NTSTATUS)0xC002001C) #define RPC_NT_PROTOCOL_ERROR ((NTSTATUS)0xC002001D) #define RPC_NT_UNSUPPORTED_TRANS_SYN ((NTSTATUS)0xC002001F) #define RPC_NT_UNSUPPORTED_TYPE ((NTSTATUS)0xC0020021) #define RPC_NT_INVALID_TAG ((NTSTATUS)0xC0020022) #define RPC_NT_INVALID_BOUND ((NTSTATUS)0xC0020023) #define RPC_NT_NO_ENTRY_NAME ((NTSTATUS)0xC0020024) #define RPC_NT_INVALID_NAME_SYNTAX ((NTSTATUS)0xC0020025) #define RPC_NT_UNSUPPORTED_NAME_SYNTAX ((NTSTATUS)0xC0020026) #define RPC_NT_UUID_NO_ADDRESS ((NTSTATUS)0xC0020028) #define RPC_NT_DUPLICATE_ENDPOINT ((NTSTATUS)0xC0020029) #define RPC_NT_UNKNOWN_AUTHN_TYPE ((NTSTATUS)0xC002002A) #define RPC_NT_MAX_CALLS_TOO_SMALL ((NTSTATUS)0xC002002B) #define RPC_NT_STRING_TOO_LONG ((NTSTATUS)0xC002002C) #define RPC_NT_PROTSEQ_NOT_FOUND ((NTSTATUS)0xC002002D) #define RPC_NT_PROCNUM_OUT_OF_RANGE ((NTSTATUS)0xC002002E) #define RPC_NT_BINDING_HAS_NO_AUTH ((NTSTATUS)0xC002002F) #define RPC_NT_UNKNOWN_AUTHN_SERVICE ((NTSTATUS)0xC0020030) #define RPC_NT_UNKNOWN_AUTHN_LEVEL ((NTSTATUS)0xC0020031) #define RPC_NT_INVALID_AUTH_IDENTITY ((NTSTATUS)0xC0020032) #define RPC_NT_UNKNOWN_AUTHZ_SERVICE ((NTSTATUS)0xC0020033) #define EPT_NT_INVALID_ENTRY ((NTSTATUS)0xC0020034) #define EPT_NT_CANT_PERFORM_OP ((NTSTATUS)0xC0020035) #define EPT_NT_NOT_REGISTERED ((NTSTATUS)0xC0020036) #define RPC_NT_NOTHING_TO_EXPORT ((NTSTATUS)0xC0020037) #define RPC_NT_INCOMPLETE_NAME ((NTSTATUS)0xC0020038) #define RPC_NT_INVALID_VERS_OPTION ((NTSTATUS)0xC0020039) #define RPC_NT_NO_MORE_MEMBERS ((NTSTATUS)0xC002003A) #define RPC_NT_NOT_ALL_OBJS_UNEXPORTED ((NTSTATUS)0xC002003B) #define RPC_NT_INTERFACE_NOT_FOUND ((NTSTATUS)0xC002003C) #define RPC_NT_ENTRY_ALREADY_EXISTS ((NTSTATUS)0xC002003D) #define RPC_NT_ENTRY_NOT_FOUND ((NTSTATUS)0xC002003E) #define RPC_NT_NAME_SERVICE_UNAVAILABLE ((NTSTATUS)0xC002003F) #define RPC_NT_INVALID_NAF_ID ((NTSTATUS)0xC0020040) #define RPC_NT_CANNOT_SUPPORT ((NTSTATUS)0xC0020041) #define RPC_NT_NO_CONTEXT_AVAILABLE ((NTSTATUS)0xC0020042) #define RPC_NT_INTERNAL_ERROR ((NTSTATUS)0xC0020043) #define RPC_NT_ZERO_DIVIDE ((NTSTATUS)0xC0020044) #define RPC_NT_ADDRESS_ERROR ((NTSTATUS)0xC0020045) #define RPC_NT_FP_DIV_ZERO ((NTSTATUS)0xC0020046) #define RPC_NT_FP_UNDERFLOW ((NTSTATUS)0xC0020047) #define RPC_NT_FP_OVERFLOW ((NTSTATUS)0xC0020048) #define RPC_NT_CALL_IN_PROGRESS ((NTSTATUS)0xC0020049) #define RPC_NT_NO_MORE_BINDINGS ((NTSTATUS)0xC002004A) #define RPC_NT_GROUP_MEMBER_NOT_FOUND ((NTSTATUS)0xC002004B) #define EPT_NT_CANT_CREATE ((NTSTATUS)0xC002004C) #define RPC_NT_INVALID_OBJECT ((NTSTATUS)0xC002004D) #define RPC_NT_NO_INTERFACES ((NTSTATUS)0xC002004F) #define RPC_NT_CALL_CANCELLED ((NTSTATUS)0xC0020050) #define RPC_NT_BINDING_INCOMPLETE ((NTSTATUS)0xC0020051) #define RPC_NT_COMM_FAILURE ((NTSTATUS)0xC0020052) #define RPC_NT_UNSUPPORTED_AUTHN_LEVEL ((NTSTATUS)0xC0020053) #define RPC_NT_NO_PRINC_NAME ((NTSTATUS)0xC0020054) #define RPC_NT_NOT_RPC_ERROR ((NTSTATUS)0xC0020055) #define RPC_NT_SEC_PKG_ERROR ((NTSTATUS)0xC0020057) #define RPC_NT_NOT_CANCELLED ((NTSTATUS)0xC0020058) #define RPC_NT_INVALID_ASYNC_HANDLE ((NTSTATUS)0xC0020062) #define RPC_NT_INVALID_ASYNC_CALL ((NTSTATUS)0xC0020063) #define RPC_NT_NO_MORE_ENTRIES ((NTSTATUS)0xC0030001) #define RPC_NT_SS_CHAR_TRANS_OPEN_FAIL ((NTSTATUS)0xC0030002) #define RPC_NT_SS_CHAR_TRANS_SHORT_FILE ((NTSTATUS)0xC0030003) #define RPC_NT_SS_IN_NULL_CONTEXT ((NTSTATUS)0xC0030004) #define RPC_NT_SS_CONTEXT_MISMATCH ((NTSTATUS)0xC0030005) #define RPC_NT_SS_CONTEXT_DAMAGED ((NTSTATUS)0xC0030006) #define RPC_NT_SS_HANDLES_MISMATCH ((NTSTATUS)0xC0030007) #define RPC_NT_SS_CANNOT_GET_CALL_HANDLE ((NTSTATUS)0xC0030008) #define RPC_NT_NULL_REF_POINTER ((NTSTATUS)0xC0030009) #define RPC_NT_ENUM_VALUE_OUT_OF_RANGE ((NTSTATUS)0xC003000A) #define RPC_NT_BYTE_COUNT_TOO_SMALL ((NTSTATUS)0xC003000B) #define RPC_NT_BAD_STUB_DATA ((NTSTATUS)0xC003000C) #define RPC_NT_INVALID_ES_ACTION ((NTSTATUS)0xC0030059) #define RPC_NT_WRONG_ES_VERSION ((NTSTATUS)0xC003005A) #define RPC_NT_WRONG_STUB_VERSION ((NTSTATUS)0xC003005B) #define RPC_NT_INVALID_PIPE_OBJECT ((NTSTATUS)0xC003005C) #define RPC_NT_INVALID_PIPE_OPERATION ((NTSTATUS)0xC003005D) #define RPC_NT_WRONG_PIPE_VERSION ((NTSTATUS)0xC003005E) #define RPC_NT_PIPE_CLOSED ((NTSTATUS)0xC003005F) #define RPC_NT_PIPE_DISCIPLINE_ERROR ((NTSTATUS)0xC0030060) #define RPC_NT_PIPE_EMPTY ((NTSTATUS)0xC0030061) #define STATUS_PNP_BAD_MPS_TABLE ((NTSTATUS)0xC0040035) #define STATUS_PNP_TRANSLATION_FAILED ((NTSTATUS)0xC0040036) #define STATUS_PNP_IRQ_TRANSLATION_FAILED ((NTSTATUS)0xC0040037) #define STATUS_PNP_INVALID_ID ((NTSTATUS)0xC0040038) #define STATUS_ACPI_INVALID_OPCODE ((NTSTATUS)0xC0140001L) #define STATUS_ACPI_STACK_OVERFLOW ((NTSTATUS)0xC0140002L) #define STATUS_ACPI_ASSERT_FAILED ((NTSTATUS)0xC0140003L) #define STATUS_ACPI_INVALID_INDEX ((NTSTATUS)0xC0140004L) #define STATUS_ACPI_INVALID_ARGUMENT ((NTSTATUS)0xC0140005L) #define STATUS_ACPI_FATAL ((NTSTATUS)0xC0140006L) #define STATUS_ACPI_INVALID_SUPERNAME ((NTSTATUS)0xC0140007L) #define STATUS_ACPI_INVALID_ARGTYPE ((NTSTATUS)0xC0140008L) #define STATUS_ACPI_INVALID_OBJTYPE ((NTSTATUS)0xC0140009L) #define STATUS_ACPI_INVALID_TARGETTYPE ((NTSTATUS)0xC014000AL) #define STATUS_ACPI_INCORRECT_ARGUMENT_COUNT ((NTSTATUS)0xC014000BL) #define STATUS_ACPI_ADDRESS_NOT_MAPPED ((NTSTATUS)0xC014000CL) #define STATUS_ACPI_INVALID_EVENTTYPE ((NTSTATUS)0xC014000DL) #define STATUS_ACPI_HANDLER_COLLISION ((NTSTATUS)0xC014000EL) #define STATUS_ACPI_INVALID_DATA ((NTSTATUS)0xC014000FL) #define STATUS_ACPI_INVALID_REGION ((NTSTATUS)0xC0140010L) #define STATUS_ACPI_INVALID_ACCESS_SIZE ((NTSTATUS)0xC0140011L) #define STATUS_ACPI_ACQUIRE_GLOBAL_LOCK ((NTSTATUS)0xC0140012L) #define STATUS_ACPI_ALREADY_INITIALIZED ((NTSTATUS)0xC0140013L) #define STATUS_ACPI_NOT_INITIALIZED ((NTSTATUS)0xC0140014L) #define STATUS_ACPI_INVALID_MUTEX_LEVEL ((NTSTATUS)0xC0140015L) #define STATUS_ACPI_MUTEX_NOT_OWNED ((NTSTATUS)0xC0140016L) #define STATUS_ACPI_MUTEX_NOT_OWNER ((NTSTATUS)0xC0140017L) #define STATUS_ACPI_RS_ACCESS ((NTSTATUS)0xC0140018L) #define STATUS_ACPI_INVALID_TABLE ((NTSTATUS)0xC0140019L) #define STATUS_ACPI_REG_HANDLER_FAILED ((NTSTATUS)0xC0140020L) #define STATUS_ACPI_POWER_REQUEST_FAILED ((NTSTATUS)0xC0140021L) #define STATUS_CTX_WINSTATION_NAME_INVALID ((NTSTATUS)0xC00A0001) #define STATUS_CTX_INVALID_PD ((NTSTATUS)0xC00A0002) #define STATUS_CTX_PD_NOT_FOUND ((NTSTATUS)0xC00A0003) #define STATUS_CTX_CLOSE_PENDING ((NTSTATUS)0xC00A0006) #define STATUS_CTX_NO_OUTBUF ((NTSTATUS)0xC00A0007) #define STATUS_CTX_MODEM_INF_NOT_FOUND ((NTSTATUS)0xC00A0008) #define STATUS_CTX_INVALID_MODEMNAME ((NTSTATUS)0xC00A0009) #define STATUS_CTX_RESPONSE_ERROR ((NTSTATUS)0xC00A000A) #define STATUS_CTX_MODEM_RESPONSE_TIMEOUT ((NTSTATUS)0xC00A000B) #define STATUS_CTX_MODEM_RESPONSE_NO_CARRIER ((NTSTATUS)0xC00A000C) #define STATUS_CTX_MODEM_RESPONSE_NO_DIALTONE ((NTSTATUS)0xC00A000D) #define STATUS_CTX_MODEM_RESPONSE_BUSY ((NTSTATUS)0xC00A000E) #define STATUS_CTX_MODEM_RESPONSE_VOICE ((NTSTATUS)0xC00A000F) #define STATUS_CTX_TD_ERROR ((NTSTATUS)0xC00A0010) #define STATUS_CTX_LICENSE_CLIENT_INVALID ((NTSTATUS)0xC00A0012) #define STATUS_CTX_LICENSE_NOT_AVAILABLE ((NTSTATUS)0xC00A0013) #define STATUS_CTX_LICENSE_EXPIRED ((NTSTATUS)0xC00A0014) #define STATUS_CTX_WINSTATION_NOT_FOUND ((NTSTATUS)0xC00A0015) #define STATUS_CTX_WINSTATION_NAME_COLLISION ((NTSTATUS)0xC00A0016) #define STATUS_CTX_WINSTATION_BUSY ((NTSTATUS)0xC00A0017) #define STATUS_CTX_BAD_VIDEO_MODE ((NTSTATUS)0xC00A0018) #define STATUS_CTX_GRAPHICS_INVALID ((NTSTATUS)0xC00A0022) #define STATUS_CTX_NOT_CONSOLE ((NTSTATUS)0xC00A0024) #define STATUS_CTX_CLIENT_QUERY_TIMEOUT ((NTSTATUS)0xC00A0026) #define STATUS_CTX_CONSOLE_DISCONNECT ((NTSTATUS)0xC00A0027) #define STATUS_CTX_CONSOLE_CONNECT ((NTSTATUS)0xC00A0028) #define STATUS_CTX_SHADOW_DENIED ((NTSTATUS)0xC00A002A) #define STATUS_CTX_WINSTATION_ACCESS_DENIED ((NTSTATUS)0xC00A002B) #define STATUS_CTX_INVALID_WD ((NTSTATUS)0xC00A002E) #define STATUS_CTX_WD_NOT_FOUND ((NTSTATUS)0xC00A002F) #define STATUS_CTX_SHADOW_INVALID ((NTSTATUS)0xC00A0030) #define STATUS_CTX_SHADOW_DISABLED ((NTSTATUS)0xC00A0031) #define STATUS_RDP_PROTOCOL_ERROR ((NTSTATUS)0xC00A0032) #define STATUS_CTX_CLIENT_LICENSE_NOT_SET ((NTSTATUS)0xC00A0033) #define STATUS_CTX_CLIENT_LICENSE_IN_USE ((NTSTATUS)0xC00A0034) #define STATUS_CTX_SHADOW_ENDED_BY_MODE_CHANGE ((NTSTATUS)0xC00A0035) #define STATUS_CTX_SHADOW_NOT_RUNNING ((NTSTATUS)0xC00A0036) #define STATUS_CLUSTER_INVALID_NODE ((NTSTATUS)0xC0130001) #define STATUS_CLUSTER_NODE_EXISTS ((NTSTATUS)0xC0130002) #define STATUS_CLUSTER_JOIN_IN_PROGRESS ((NTSTATUS)0xC0130003) #define STATUS_CLUSTER_NODE_NOT_FOUND ((NTSTATUS)0xC0130004) #define STATUS_CLUSTER_LOCAL_NODE_NOT_FOUND ((NTSTATUS)0xC0130005) #define STATUS_CLUSTER_NETWORK_EXISTS ((NTSTATUS)0xC0130006) #define STATUS_CLUSTER_NETWORK_NOT_FOUND ((NTSTATUS)0xC0130007) #define STATUS_CLUSTER_NETINTERFACE_EXISTS ((NTSTATUS)0xC0130008) #define STATUS_CLUSTER_NETINTERFACE_NOT_FOUND ((NTSTATUS)0xC0130009) #define STATUS_CLUSTER_INVALID_REQUEST ((NTSTATUS)0xC013000A) #define STATUS_CLUSTER_INVALID_NETWORK_PROVIDER ((NTSTATUS)0xC013000B) #define STATUS_CLUSTER_NODE_DOWN ((NTSTATUS)0xC013000C) #define STATUS_CLUSTER_NODE_UNREACHABLE ((NTSTATUS)0xC013000D) #define STATUS_CLUSTER_NODE_NOT_MEMBER ((NTSTATUS)0xC013000E) #define STATUS_CLUSTER_JOIN_NOT_IN_PROGRESS ((NTSTATUS)0xC013000F) #define STATUS_CLUSTER_INVALID_NETWORK ((NTSTATUS)0xC0130010) #define STATUS_CLUSTER_NO_NET_ADAPTERS ((NTSTATUS)0xC0130011) #define STATUS_CLUSTER_NODE_UP ((NTSTATUS)0xC0130012) #define STATUS_CLUSTER_NODE_PAUSED ((NTSTATUS)0xC0130013) #define STATUS_CLUSTER_NODE_NOT_PAUSED ((NTSTATUS)0xC0130014) #define STATUS_CLUSTER_NO_SECURITY_CONTEXT ((NTSTATUS)0xC0130015) #define STATUS_CLUSTER_NETWORK_NOT_INTERNAL ((NTSTATUS)0xC0130016) #define STATUS_CLUSTER_POISONED ((NTSTATUS)0xC0130017) #define STATUS_SXS_SECTION_NOT_FOUND ((NTSTATUS)0xC0150001) #define STATUS_SXS_CANT_GEN_ACTCTX ((NTSTATUS)0xC0150002) #define STATUS_SXS_INVALID_ACTCTXDATA_FORMAT ((NTSTATUS)0xC0150003) #define STATUS_SXS_ASSEMBLY_NOT_FOUND ((NTSTATUS)0xC0150004) #define STATUS_SXS_MANIFEST_FORMAT_ERROR ((NTSTATUS)0xC0150005) #define STATUS_SXS_MANIFEST_PARSE_ERROR ((NTSTATUS)0xC0150006) #define STATUS_SXS_ACTIVATION_CONTEXT_DISABLED ((NTSTATUS)0xC0150007) #define STATUS_SXS_KEY_NOT_FOUND ((NTSTATUS)0xC0150008) #define STATUS_SXS_VERSION_CONFLICT ((NTSTATUS)0xC0150009) #define STATUS_SXS_WRONG_SECTION_TYPE ((NTSTATUS)0xC015000A) #define STATUS_SXS_THREAD_QUERIES_DISABLED ((NTSTATUS)0xC015000B) #define STATUS_SXS_ASSEMBLY_MISSING ((NTSTATUS)0xC015000C) #define STATUS_SXS_PROCESS_DEFAULT_ALREADY_SET ((NTSTATUS)0xC015000E) //#define STATUS_SXS_EARLY_DEACTIVATION ((NTSTATUS)0xC015000F) //#define STATUS_SXS_INVALID_DEACTIVATION ((NTSTATUS)0xC0150010) #define STATUS_SXS_MULTIPLE_DEACTIVATION ((NTSTATUS)0xC0150011) #define STATUS_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY ((NTSTATUS)0xC0150012) #define STATUS_SXS_PROCESS_TERMINATION_REQUESTED ((NTSTATUS)0xC0150013) #define STATUS_SXS_CORRUPT_ACTIVATION_STACK ((NTSTATUS)0xC0150014) #define STATUS_SXS_CORRUPTION ((NTSTATUS)0xC0150015) #define STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE ((NTSTATUS)0xC0150016) #define STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME ((NTSTATUS)0xC0150017) #define STATUS_SXS_IDENTITY_DUPLICATE_ATTRIBUTE ((NTSTATUS)0xC0150018) #define STATUS_SXS_IDENTITY_PARSE_ERROR ((NTSTATUS)0xC0150019) #define STATUS_SXS_COMPONENT_STORE_CORRUPT ((NTSTATUS)0xC015001A) #define STATUS_SXS_FILE_HASH_MISMATCH ((NTSTATUS)0xC015001B) #define STATUS_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT ((NTSTATUS)0xC015001C) #define STATUS_SXS_IDENTITIES_DIFFERENT ((NTSTATUS)0xC015001D) #define STATUS_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT ((NTSTATUS)0xC015001E) #define STATUS_SXS_FILE_NOT_PART_OF_ASSEMBLY ((NTSTATUS)0xC015001F) #define STATUS_ADVANCED_INSTALLER_FAILED ((NTSTATUS)0xC0150020) #define STATUS_XML_ENCODING_MISMATCH ((NTSTATUS)0xC0150021) #define STATUS_SXS_MANIFEST_TOO_BIG ((NTSTATUS)0xC0150022) #define STATUS_SXS_SETTING_NOT_REGISTERED ((NTSTATUS)0xC0150023) #define STATUS_SXS_TRANSACTION_CLOSURE_INCOMPLETE ((NTSTATUS)0xC0150024) #define STATUS_SXS_PRIMITIVE_INSTALLER_FAILED ((NTSTATUS)0xC0150025) #define STATUS_GENERIC_COMMAND_FAILED ((NTSTATUS)0xC0150026) #define STATUS_SXS_FILE_HASH_MISSING ((NTSTATUS)0xC0150027) /* Defined in winternl.h, always define since we do not include this header */ /* defined in ntstatus.h */ #if !defined(NTSTATUS_FROM_WIN32) && !defined(INLINE_NTSTATUS_FROM_WIN32) static INLINE NTSTATUS NTSTATUS_FROM_WIN32(long x) { return x <= 0 ? (NTSTATUS)x : (NTSTATUS)(((x) & 0x0000FFFF) | (0x7 << 16) | 0xC0000000); } #endif #ifdef _WIN32 /** * winternl.h contains an incomplete definition of enum FILE_INFORMATION_CLASS * avoid conflict by prefixing the winternl.h definition by _WINTERNL_ and then * make a complete definition of enum FILE_INFORMATION_CLASS ourselves. * * For more information, refer to [MS-FSCC]: File System Control Codes: * http://msdn.microsoft.com/en-us/library/cc231987.aspx */ #define FILE_INFORMATION_CLASS _WINTERNL_FILE_INFORMATION_CLASS #define _FILE_INFORMATION_CLASS _WINTERNL__FILE_INFORMATION_CLASS #define FileDirectoryInformation _WINTERNL_FileDirectoryInformation #include <winternl.h> #undef FILE_INFORMATION_CLASS #undef _FILE_INFORMATION_CLASS #undef FileDirectoryInformation #endif typedef enum _FILE_INFORMATION_CLASS { FileDirectoryInformation = 1, FileFullDirectoryInformation, FileBothDirectoryInformation, FileBasicInformation, FileStandardInformation, FileInternalInformation, FileEaInformation, FileAccessInformation, FileNameInformation, FileRenameInformation, FileLinkInformation, FileNamesInformation, FileDispositionInformation, FilePositionInformation, FileFullEaInformation, FileModeInformation, FileAlignmentInformation, FileAllInformation, FileAllocationInformation, FileEndOfFileInformation, FileAlternateNameInformation, FileStreamInformation, FilePipeInformation, FilePipeLocalInformation, FilePipeRemoteInformation, FileMailslotQueryInformation, FileMailslotSetInformation, FileCompressionInformation, FileObjectIdInformation, FileUnknownInformation1, FileMoveClusterInformation, FileQuotaInformation, FileReparsePointInformation, FileNetworkOpenInformation, FileAttributeTagInformation, FileTrackingInformation, FileIdBothDirectoryInformation, FileIdFullDirectoryInformation, FileValidDataLengthInformation, FileShortNameInformation } FILE_INFORMATION_CLASS; #if !defined(_WIN32) || defined(_UWP) #define FILE_SUPERSEDE 0x00000000 #define FILE_OPEN 0x00000001 #define FILE_CREATE 0x00000002 #define FILE_OPEN_IF 0x00000003 #define FILE_OVERWRITE 0x00000004 #define FILE_OVERWRITE_IF 0x00000005 #define FILE_MAXIMUM_DISPOSITION 0x00000005 #define FILE_DIRECTORY_FILE 0x00000001 #define FILE_WRITE_THROUGH 0x00000002 #define FILE_SEQUENTIAL_ONLY 0x00000004 #define FILE_NO_INTERMEDIATE_BUFFERING 0x00000008 #define FILE_SYNCHRONOUS_IO_ALERT 0x00000010 #define FILE_SYNCHRONOUS_IO_NONALERT 0x00000020 #define FILE_NON_DIRECTORY_FILE 0x00000040 #define FILE_CREATE_TREE_CONNECTION 0x00000080 #define FILE_COMPLETE_IF_OPLOCKED 0x00000100 #define FILE_NO_EA_KNOWLEDGE 0x00000200 #define FILE_OPEN_REMOTE_INSTANCE 0x00000400 #define FILE_RANDOM_ACCESS 0x00000800 #define FILE_DELETE_ON_CLOSE 0x00001000 #define FILE_OPEN_BY_FILE_ID 0x00002000 #define FILE_OPEN_FOR_BACKUP_INTENT 0x00004000 #define FILE_NO_COMPRESSION 0x00008000 #define FILE_OPEN_REQUIRING_OPLOCK 0x00010000 #define FILE_RESERVE_OPFILTER 0x00100000 #define FILE_OPEN_REPARSE_POINT 0x00200000 #define FILE_OPEN_NO_RECALL 0x00400000 #define FILE_OPEN_FOR_FREE_SPACE_QUERY 0x00800000 #define FILE_VALID_OPTION_FLAGS 0x00FFFFFF #define FILE_VALID_PIPE_OPTION_FLAGS 0x00000032 #define FILE_VALID_MAILSLOT_OPTION_FLAGS 0x00000032 #define FILE_VALID_SET_FLAGS 0x00000036 #define FILE_SUPERSEDED 0x00000000 #define FILE_OPENED 0x00000001 #define FILE_CREATED 0x00000002 #define FILE_OVERWRITTEN 0x00000003 #define FILE_EXISTS 0x00000004 #define FILE_DOES_NOT_EXIST 0x00000005 typedef CONST char* PCSZ; typedef struct _STRING { USHORT Length; USHORT MaximumLength; PCHAR Buffer; } STRING; typedef STRING* PSTRING; typedef STRING ANSI_STRING; typedef PSTRING PANSI_STRING; typedef PSTRING PCANSI_STRING; typedef STRING OEM_STRING; typedef PSTRING POEM_STRING; typedef CONST STRING* PCOEM_STRING; typedef struct _LSA_UNICODE_STRING { USHORT Length; USHORT MaximumLength; PWSTR Buffer; } LSA_UNICODE_STRING, *PLSA_UNICODE_STRING, UNICODE_STRING, *PUNICODE_STRING; #define OBJ_INHERIT 0x00000002L #define OBJ_PERMANENT 0x00000010L #define OBJ_EXCLUSIVE 0x00000020L #define OBJ_CASE_INSENSITIVE 0x00000040L #define OBJ_OPENIF 0x00000080L #define OBJ_OPENLINK 0x00000100L #define OBJ_KERNEL_HANDLE 0x00000200L #define OBJ_FORCE_ACCESS_CHECK 0x00000400L #define OBJ_VALID_ATTRIBUTES 0x000007F2L typedef struct _OBJECT_ATTRIBUTES { ULONG Length; HANDLE RootDirectory; PUNICODE_STRING ObjectName; ULONG Attributes; PVOID SecurityDescriptor; PVOID SecurityQualityOfService; } OBJECT_ATTRIBUTES; typedef OBJECT_ATTRIBUTES* POBJECT_ATTRIBUTES; typedef struct _IO_STATUS_BLOCK { union { #ifdef _WIN32 NTSTATUS Status; #else NTSTATUS status; #endif PVOID Pointer; }; ULONG_PTR Information; } IO_STATUS_BLOCK, *PIO_STATUS_BLOCK; typedef VOID (*PIO_APC_ROUTINE)(PVOID ApcContext, PIO_STATUS_BLOCK IoStatusBlock, ULONG Reserved); #endif #if !defined(_WIN32) typedef struct _PEB PEB; typedef struct _PEB* PPEB; typedef struct _TEB TEB; typedef struct _TEB* PTEB; /** * Process Environment Block */ struct _THREAD_BLOCK_ID { DWORD ThreadId; TEB* ThreadEnvironmentBlock; }; typedef struct _THREAD_BLOCK_ID THREAD_BLOCK_ID; struct _PEB { DWORD ThreadCount; DWORD ThreadArraySize; THREAD_BLOCK_ID* Threads; }; /* * Thread Environment Block */ struct _TEB { PEB* ProcessEnvironmentBlock; DWORD LastErrorValue; PVOID TlsSlots[64]; }; #define GENERIC_READ 0x80000000 #define GENERIC_WRITE 0x40000000 #define GENERIC_EXECUTE 0x20000000 #define GENERIC_ALL 0x10000000 #define DELETE 0x00010000 #define READ_CONTROL 0x00020000 #define WRITE_DAC 0x00040000 #define WRITE_OWNER 0x00080000 #define SYNCHRONIZE 0x00100000 #define STANDARD_RIGHTS_REQUIRED 0x000F0000 #define STANDARD_RIGHTS_READ 0x00020000 #define STANDARD_RIGHTS_WRITE 0x00020000 #define STANDARD_RIGHTS_EXECUTE 0x00020000 #define STANDARD_RIGHTS_ALL 0x001F0000 #define SPECIFIC_RIGHTS_ALL 0x0000FFFF #define ACCESS_SYSTEM_SECURITY 0x01000000 #define MAXIMUM_ALLOWED 0x02000000 #define FILE_READ_DATA 0x0001 #define FILE_LIST_DIRECTORY 0x0001 #define FILE_WRITE_DATA 0x0002 #define FILE_ADD_FILE 0x0002 #define FILE_APPEND_DATA 0x0004 #define FILE_ADD_SUBDIRECTORY 0x0004 #define FILE_CREATE_PIPE_INSTANCE 0x0004 #define FILE_READ_EA 0x0008 #define FILE_WRITE_EA 0x0010 #define FILE_EXECUTE 0x0020 #define FILE_TRAVERSE 0x0020 #define FILE_DELETE_CHILD 0x0040 #define FILE_READ_ATTRIBUTES 0x0080 #define FILE_WRITE_ATTRIBUTES 0x0100 #define FILE_ALL_ACCESS (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x1FF) #define FILE_GENERIC_READ (STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE) #define FILE_GENERIC_WRITE (STANDARD_RIGHTS_WRITE | FILE_WRITE_DATA | FILE_WRITE_ATTRIBUTES | FILE_WRITE_EA | FILE_APPEND_DATA | SYNCHRONIZE) #define FILE_GENERIC_EXECUTE (STANDARD_RIGHTS_EXECUTE | FILE_READ_ATTRIBUTES | FILE_EXECUTE | SYNCHRONIZE) #define FILE_SHARE_READ 0x00000001 #define FILE_SHARE_WRITE 0x00000002 #define FILE_SHARE_DELETE 0x00000004 typedef DWORD ACCESS_MASK; typedef ACCESS_MASK* PACCESS_MASK; #ifdef __cplusplus extern "C" { #endif WINPR_API PTEB NtCurrentTeb(void); #ifdef __cplusplus } #endif #endif #ifdef __cplusplus extern "C" { #endif WINPR_API VOID _RtlInitAnsiString(PANSI_STRING DestinationString, PCSZ SourceString); WINPR_API VOID _RtlInitUnicodeString(PUNICODE_STRING DestinationString, PCWSTR SourceString); WINPR_API NTSTATUS _RtlAnsiStringToUnicodeString(PUNICODE_STRING DestinationString, PCANSI_STRING SourceString, BOOLEAN AllocateDestinationString); WINPR_API VOID _RtlFreeUnicodeString(PUNICODE_STRING UnicodeString); WINPR_API ULONG _RtlNtStatusToDosError(NTSTATUS status); WINPR_API VOID _InitializeObjectAttributes(POBJECT_ATTRIBUTES InitializedAttributes, PUNICODE_STRING ObjectName, ULONG Attributes, HANDLE RootDirectory, PSECURITY_DESCRIPTOR SecurityDescriptor); WINPR_API NTSTATUS _NtCreateFile(PHANDLE FileHandle, ACCESS_MASK DesiredAccess, POBJECT_ATTRIBUTES ObjectAttributes, PIO_STATUS_BLOCK IoStatusBlock, PLARGE_INTEGER AllocationSize, ULONG FileAttributes, ULONG ShareAccess, ULONG CreateDisposition, ULONG CreateOptions, PVOID EaBuffer, ULONG EaLength); WINPR_API NTSTATUS _NtOpenFile(PHANDLE FileHandle, ACCESS_MASK DesiredAccess, POBJECT_ATTRIBUTES ObjectAttributes, PIO_STATUS_BLOCK IoStatusBlock, ULONG ShareAccess, ULONG OpenOptions); WINPR_API NTSTATUS _NtReadFile(HANDLE FileHandle, HANDLE Event, PIO_APC_ROUTINE ApcRoutine, PVOID ApcContext, PIO_STATUS_BLOCK IoStatusBlock, PVOID Buffer, ULONG Length, PLARGE_INTEGER ByteOffset, PULONG Key); WINPR_API NTSTATUS _NtWriteFile(HANDLE FileHandle, HANDLE Event, PIO_APC_ROUTINE ApcRoutine, PVOID ApcContext, PIO_STATUS_BLOCK IoStatusBlock, PVOID Buffer, ULONG Length, PLARGE_INTEGER ByteOffset, PULONG Key); WINPR_API NTSTATUS _NtDeviceIoControlFile(HANDLE FileHandle, HANDLE Event, PIO_APC_ROUTINE ApcRoutine, PVOID ApcContext, PIO_STATUS_BLOCK IoStatusBlock, ULONG IoControlCode, PVOID InputBuffer, ULONG InputBufferLength, PVOID OutputBuffer, ULONG OutputBufferLength); WINPR_API NTSTATUS _NtClose(HANDLE Handle); WINPR_API NTSTATUS _NtWaitForSingleObject(HANDLE Handle, BOOLEAN Alertable, PLARGE_INTEGER Timeout); #ifdef __cplusplus } #endif #endif /* WINPR_NT_H */
{ "content_hash": "8e97f44e3c4efecd9d141f06f3bb2010", "timestamp": "", "source": "github", "line_count": 1578, "max_line_length": 141, "avg_line_length": 53.11216730038023, "alnum_prop": 0.7557480521649903, "repo_name": "oshogbo/FreeRDP", "id": "d7ba795902453be7541ac4dcf282ff18972abc67", "size": "84513", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "winpr/include/winpr/nt.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "11859984" }, { "name": "C#", "bytes": "9809" }, { "name": "C++", "bytes": "169689" }, { "name": "CMake", "bytes": "719537" }, { "name": "CSS", "bytes": "5696" }, { "name": "HTML", "bytes": "99139" }, { "name": "Java", "bytes": "419683" }, { "name": "Makefile", "bytes": "1585" }, { "name": "Objective-C", "bytes": "1139060" }, { "name": "Perl", "bytes": "8044" }, { "name": "Python", "bytes": "3318" }, { "name": "Roff", "bytes": "3708" }, { "name": "Shell", "bytes": "26450" } ], "symlink_target": "" }
<?php namespace TYPO3\CMS\Extensionmanager\Exception; /** * An exception when some dependency is unresolved */ class UnresolvedTypo3DependencyException extends UnresolvedDependencyException { }
{ "content_hash": "7f4ade67f5abdf886b4576571f2a32de", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 78, "avg_line_length": 18.09090909090909, "alnum_prop": 0.8090452261306532, "repo_name": "ahmedRguei/blogTypo", "id": "d9ea66dae755b6b5a7bb763cfa1dc81b90fa4448", "size": "619", "binary": false, "copies": "15", "ref": "refs/heads/master", "path": "typo3/sysext/extensionmanager/Classes/Exception/UnresolvedTypo3DependencyException.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "3425" }, { "name": "CSS", "bytes": "1591892" }, { "name": "HTML", "bytes": "617019" }, { "name": "JavaScript", "bytes": "3011598" }, { "name": "PHP", "bytes": "25967799" }, { "name": "PLpgSQL", "bytes": "3957" }, { "name": "Shell", "bytes": "7244" }, { "name": "Smarty", "bytes": "302" }, { "name": "TypeScript", "bytes": "122147" }, { "name": "XSLT", "bytes": "27654" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <profiles version="2"> <profile kind="CleanUpProfile" name="soap2jms" version="2"> <setting id="cleanup.qualify_static_method_accesses_with_declaring_class" value="false"/> <setting id="cleanup.always_use_this_for_non_static_method_access" value="false"/> <setting id="cleanup.organize_imports" value="true"/> <setting id="cleanup.remove_trailing_whitespaces_ignore_empty" value="false"/> <setting id="cleanup.format_source_code_changes_only" value="false"/> <setting id="cleanup.qualify_static_field_accesses_with_declaring_class" value="false"/> <setting id="cleanup.add_generated_serial_version_id" value="true"/> <setting id="cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class" value="true"/> <setting id="cleanup.remove_redundant_type_arguments" value="true"/> <setting id="cleanup.remove_unused_imports" value="true"/> <setting id="cleanup.insert_inferred_type_arguments" value="false"/> <setting id="cleanup.make_private_fields_final" value="true"/> <setting id="cleanup.use_lambda" value="true"/> <setting id="cleanup.always_use_blocks" value="true"/> <setting id="cleanup.use_this_for_non_static_field_access_only_if_necessary" value="false"/> <setting id="cleanup.sort_members_all" value="true"/> <setting id="cleanup.remove_trailing_whitespaces_all" value="true"/> <setting id="cleanup.add_missing_annotations" value="true"/> <setting id="cleanup.always_use_this_for_non_static_field_access" value="true"/> <setting id="cleanup.make_parameters_final" value="true"/> <setting id="cleanup.sort_members" value="true"/> <setting id="cleanup.remove_private_constructors" value="true"/> <setting id="cleanup.always_use_parentheses_in_expressions" value="false"/> <setting id="cleanup.remove_unused_local_variables" value="true"/> <setting id="cleanup.convert_to_enhanced_for_loop" value="true"/> <setting id="cleanup.remove_unused_private_fields" value="true"/> <setting id="cleanup.never_use_blocks" value="false"/> <setting id="cleanup.add_missing_deprecated_annotations" value="true"/> <setting id="cleanup.use_this_for_non_static_field_access" value="true"/> <setting id="cleanup.remove_unnecessary_nls_tags" value="true"/> <setting id="cleanup.qualify_static_member_accesses_through_instances_with_declaring_class" value="true"/> <setting id="cleanup.add_missing_nls_tags" value="false"/> <setting id="cleanup.remove_unnecessary_casts" value="true"/> <setting id="cleanup.use_blocks_only_for_return_and_throw" value="false"/> <setting id="cleanup.format_source_code" value="true"/> <setting id="cleanup.convert_functional_interfaces" value="true"/> <setting id="cleanup.add_default_serial_version_id" value="false"/> <setting id="cleanup.remove_unused_private_methods" value="true"/> <setting id="cleanup.remove_trailing_whitespaces" value="true"/> <setting id="cleanup.make_type_abstract_if_missing_method" value="false"/> <setting id="cleanup.add_serial_version_id" value="true"/> <setting id="cleanup.use_this_for_non_static_method_access" value="false"/> <setting id="cleanup.use_this_for_non_static_method_access_only_if_necessary" value="true"/> <setting id="cleanup.use_anonymous_class_creation" value="false"/> <setting id="cleanup.add_missing_override_annotations_interface_methods" value="true"/> <setting id="cleanup.remove_unused_private_members" value="false"/> <setting id="cleanup.make_local_variable_final" value="true"/> <setting id="cleanup.add_missing_methods" value="false"/> <setting id="cleanup.never_use_parentheses_in_expressions" value="true"/> <setting id="cleanup.qualify_static_member_accesses_with_declaring_class" value="true"/> <setting id="cleanup.use_parentheses_in_expressions" value="false"/> <setting id="cleanup.add_missing_override_annotations" value="true"/> <setting id="cleanup.use_blocks" value="true"/> <setting id="cleanup.make_variable_declarations_final" value="true"/> <setting id="cleanup.correct_indentation" value="true"/> <setting id="cleanup.remove_unused_private_types" value="true"/> </profile> </profiles>
{ "content_hash": "d20ff19de2ad15beebf80b12ef7315d1", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 106, "avg_line_length": 66.42622950819673, "alnum_prop": 0.7576505429417572, "repo_name": "jbricks/soap2jms", "id": "efa946ee23ceee8edbbd83519c2707e48551caeb", "size": "4052", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "eclipse_cleanup_settings.xml", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "873" }, { "name": "HTML", "bytes": "1675" }, { "name": "Java", "bytes": "167798" } ], "symlink_target": "" }
import { FULLY_QUALIFIED, MINIMALLY_QUALIFIED, Qualifier, UNQUALIFIED } from 'emojibase'; export function extractQualifier(type: string): Qualifier | null { if (type === 'fully-qualified') { return FULLY_QUALIFIED; } if (type === 'minimally-qualified') { return MINIMALLY_QUALIFIED; } if (type === 'unqualified') { return UNQUALIFIED; } return null; }
{ "content_hash": "ea54953ecedd7f11e142662f25b0b5dc", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 89, "avg_line_length": 21.705882352941178, "alnum_prop": 0.6937669376693767, "repo_name": "milesj/emojibase", "id": "c17055cfb15230042c4743348c81cef6a8499940", "size": "369", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/generator/src/parsers/extractQualifier.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3009" }, { "name": "JavaScript", "bytes": "8368" }, { "name": "TypeScript", "bytes": "418433" } ], "symlink_target": "" }
 #pragma once #include <aws/drs/Drs_EXPORTS.h> #include <aws/drs/model/ReplicationConfigurationDataPlaneRouting.h> #include <aws/drs/model/ReplicationConfigurationDefaultLargeStagingDiskType.h> #include <aws/drs/model/ReplicationConfigurationEbsEncryption.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <aws/core/utils/memory/stl/AWSVector.h> #include <aws/core/utils/memory/stl/AWSMap.h> #include <aws/drs/model/PITPolicyRule.h> #include <aws/drs/model/ReplicationConfigurationReplicatedDisk.h> #include <utility> namespace Aws { template<typename RESULT_TYPE> class AmazonWebServiceResult; namespace Utils { namespace Json { class JsonValue; } // namespace Json } // namespace Utils namespace drs { namespace Model { class AWS_DRS_API UpdateReplicationConfigurationResult { public: UpdateReplicationConfigurationResult(); UpdateReplicationConfigurationResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); UpdateReplicationConfigurationResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); /** * <p>Whether to associate the default Elastic Disaster Recovery Security group * with the Replication Configuration.</p> */ inline bool GetAssociateDefaultSecurityGroup() const{ return m_associateDefaultSecurityGroup; } /** * <p>Whether to associate the default Elastic Disaster Recovery Security group * with the Replication Configuration.</p> */ inline void SetAssociateDefaultSecurityGroup(bool value) { m_associateDefaultSecurityGroup = value; } /** * <p>Whether to associate the default Elastic Disaster Recovery Security group * with the Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithAssociateDefaultSecurityGroup(bool value) { SetAssociateDefaultSecurityGroup(value); return *this;} /** * <p>Configure bandwidth throttling for the outbound data transfer rate of the * Source Server in Mbps.</p> */ inline long long GetBandwidthThrottling() const{ return m_bandwidthThrottling; } /** * <p>Configure bandwidth throttling for the outbound data transfer rate of the * Source Server in Mbps.</p> */ inline void SetBandwidthThrottling(long long value) { m_bandwidthThrottling = value; } /** * <p>Configure bandwidth throttling for the outbound data transfer rate of the * Source Server in Mbps.</p> */ inline UpdateReplicationConfigurationResult& WithBandwidthThrottling(long long value) { SetBandwidthThrottling(value); return *this;} /** * <p>Whether to create a Public IP for the Recovery Instance by default.</p> */ inline bool GetCreatePublicIP() const{ return m_createPublicIP; } /** * <p>Whether to create a Public IP for the Recovery Instance by default.</p> */ inline void SetCreatePublicIP(bool value) { m_createPublicIP = value; } /** * <p>Whether to create a Public IP for the Recovery Instance by default.</p> */ inline UpdateReplicationConfigurationResult& WithCreatePublicIP(bool value) { SetCreatePublicIP(value); return *this;} /** * <p>The data plane routing mechanism that will be used for replication.</p> */ inline const ReplicationConfigurationDataPlaneRouting& GetDataPlaneRouting() const{ return m_dataPlaneRouting; } /** * <p>The data plane routing mechanism that will be used for replication.</p> */ inline void SetDataPlaneRouting(const ReplicationConfigurationDataPlaneRouting& value) { m_dataPlaneRouting = value; } /** * <p>The data plane routing mechanism that will be used for replication.</p> */ inline void SetDataPlaneRouting(ReplicationConfigurationDataPlaneRouting&& value) { m_dataPlaneRouting = std::move(value); } /** * <p>The data plane routing mechanism that will be used for replication.</p> */ inline UpdateReplicationConfigurationResult& WithDataPlaneRouting(const ReplicationConfigurationDataPlaneRouting& value) { SetDataPlaneRouting(value); return *this;} /** * <p>The data plane routing mechanism that will be used for replication.</p> */ inline UpdateReplicationConfigurationResult& WithDataPlaneRouting(ReplicationConfigurationDataPlaneRouting&& value) { SetDataPlaneRouting(std::move(value)); return *this;} /** * <p>The Staging Disk EBS volume type to be used during replication.</p> */ inline const ReplicationConfigurationDefaultLargeStagingDiskType& GetDefaultLargeStagingDiskType() const{ return m_defaultLargeStagingDiskType; } /** * <p>The Staging Disk EBS volume type to be used during replication.</p> */ inline void SetDefaultLargeStagingDiskType(const ReplicationConfigurationDefaultLargeStagingDiskType& value) { m_defaultLargeStagingDiskType = value; } /** * <p>The Staging Disk EBS volume type to be used during replication.</p> */ inline void SetDefaultLargeStagingDiskType(ReplicationConfigurationDefaultLargeStagingDiskType&& value) { m_defaultLargeStagingDiskType = std::move(value); } /** * <p>The Staging Disk EBS volume type to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithDefaultLargeStagingDiskType(const ReplicationConfigurationDefaultLargeStagingDiskType& value) { SetDefaultLargeStagingDiskType(value); return *this;} /** * <p>The Staging Disk EBS volume type to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithDefaultLargeStagingDiskType(ReplicationConfigurationDefaultLargeStagingDiskType&& value) { SetDefaultLargeStagingDiskType(std::move(value)); return *this;} /** * <p>The type of EBS encryption to be used during replication.</p> */ inline const ReplicationConfigurationEbsEncryption& GetEbsEncryption() const{ return m_ebsEncryption; } /** * <p>The type of EBS encryption to be used during replication.</p> */ inline void SetEbsEncryption(const ReplicationConfigurationEbsEncryption& value) { m_ebsEncryption = value; } /** * <p>The type of EBS encryption to be used during replication.</p> */ inline void SetEbsEncryption(ReplicationConfigurationEbsEncryption&& value) { m_ebsEncryption = std::move(value); } /** * <p>The type of EBS encryption to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithEbsEncryption(const ReplicationConfigurationEbsEncryption& value) { SetEbsEncryption(value); return *this;} /** * <p>The type of EBS encryption to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithEbsEncryption(ReplicationConfigurationEbsEncryption&& value) { SetEbsEncryption(std::move(value)); return *this;} /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline const Aws::String& GetEbsEncryptionKeyArn() const{ return m_ebsEncryptionKeyArn; } /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline void SetEbsEncryptionKeyArn(const Aws::String& value) { m_ebsEncryptionKeyArn = value; } /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline void SetEbsEncryptionKeyArn(Aws::String&& value) { m_ebsEncryptionKeyArn = std::move(value); } /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline void SetEbsEncryptionKeyArn(const char* value) { m_ebsEncryptionKeyArn.assign(value); } /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithEbsEncryptionKeyArn(const Aws::String& value) { SetEbsEncryptionKeyArn(value); return *this;} /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithEbsEncryptionKeyArn(Aws::String&& value) { SetEbsEncryptionKeyArn(std::move(value)); return *this;} /** * <p>The ARN of the EBS encryption key to be used during replication.</p> */ inline UpdateReplicationConfigurationResult& WithEbsEncryptionKeyArn(const char* value) { SetEbsEncryptionKeyArn(value); return *this;} /** * <p>The name of the Replication Configuration.</p> */ inline const Aws::String& GetName() const{ return m_name; } /** * <p>The name of the Replication Configuration.</p> */ inline void SetName(const Aws::String& value) { m_name = value; } /** * <p>The name of the Replication Configuration.</p> */ inline void SetName(Aws::String&& value) { m_name = std::move(value); } /** * <p>The name of the Replication Configuration.</p> */ inline void SetName(const char* value) { m_name.assign(value); } /** * <p>The name of the Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithName(const Aws::String& value) { SetName(value); return *this;} /** * <p>The name of the Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithName(Aws::String&& value) { SetName(std::move(value)); return *this;} /** * <p>The name of the Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithName(const char* value) { SetName(value); return *this;} /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline const Aws::Vector<PITPolicyRule>& GetPitPolicy() const{ return m_pitPolicy; } /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline void SetPitPolicy(const Aws::Vector<PITPolicyRule>& value) { m_pitPolicy = value; } /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline void SetPitPolicy(Aws::Vector<PITPolicyRule>&& value) { m_pitPolicy = std::move(value); } /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline UpdateReplicationConfigurationResult& WithPitPolicy(const Aws::Vector<PITPolicyRule>& value) { SetPitPolicy(value); return *this;} /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline UpdateReplicationConfigurationResult& WithPitPolicy(Aws::Vector<PITPolicyRule>&& value) { SetPitPolicy(std::move(value)); return *this;} /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline UpdateReplicationConfigurationResult& AddPitPolicy(const PITPolicyRule& value) { m_pitPolicy.push_back(value); return *this; } /** * <p>The Point in time (PIT) policy to manage snapshots taken during * replication.</p> */ inline UpdateReplicationConfigurationResult& AddPitPolicy(PITPolicyRule&& value) { m_pitPolicy.push_back(std::move(value)); return *this; } /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline const Aws::Vector<ReplicationConfigurationReplicatedDisk>& GetReplicatedDisks() const{ return m_replicatedDisks; } /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline void SetReplicatedDisks(const Aws::Vector<ReplicationConfigurationReplicatedDisk>& value) { m_replicatedDisks = value; } /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline void SetReplicatedDisks(Aws::Vector<ReplicationConfigurationReplicatedDisk>&& value) { m_replicatedDisks = std::move(value); } /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline UpdateReplicationConfigurationResult& WithReplicatedDisks(const Aws::Vector<ReplicationConfigurationReplicatedDisk>& value) { SetReplicatedDisks(value); return *this;} /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline UpdateReplicationConfigurationResult& WithReplicatedDisks(Aws::Vector<ReplicationConfigurationReplicatedDisk>&& value) { SetReplicatedDisks(std::move(value)); return *this;} /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline UpdateReplicationConfigurationResult& AddReplicatedDisks(const ReplicationConfigurationReplicatedDisk& value) { m_replicatedDisks.push_back(value); return *this; } /** * <p>The configuration of the disks of the Source Server to be replicated.</p> */ inline UpdateReplicationConfigurationResult& AddReplicatedDisks(ReplicationConfigurationReplicatedDisk&& value) { m_replicatedDisks.push_back(std::move(value)); return *this; } /** * <p>The instance type to be used for the replication server.</p> */ inline const Aws::String& GetReplicationServerInstanceType() const{ return m_replicationServerInstanceType; } /** * <p>The instance type to be used for the replication server.</p> */ inline void SetReplicationServerInstanceType(const Aws::String& value) { m_replicationServerInstanceType = value; } /** * <p>The instance type to be used for the replication server.</p> */ inline void SetReplicationServerInstanceType(Aws::String&& value) { m_replicationServerInstanceType = std::move(value); } /** * <p>The instance type to be used for the replication server.</p> */ inline void SetReplicationServerInstanceType(const char* value) { m_replicationServerInstanceType.assign(value); } /** * <p>The instance type to be used for the replication server.</p> */ inline UpdateReplicationConfigurationResult& WithReplicationServerInstanceType(const Aws::String& value) { SetReplicationServerInstanceType(value); return *this;} /** * <p>The instance type to be used for the replication server.</p> */ inline UpdateReplicationConfigurationResult& WithReplicationServerInstanceType(Aws::String&& value) { SetReplicationServerInstanceType(std::move(value)); return *this;} /** * <p>The instance type to be used for the replication server.</p> */ inline UpdateReplicationConfigurationResult& WithReplicationServerInstanceType(const char* value) { SetReplicationServerInstanceType(value); return *this;} /** * <p>The security group IDs that will be used by the replication server.</p> */ inline const Aws::Vector<Aws::String>& GetReplicationServersSecurityGroupsIDs() const{ return m_replicationServersSecurityGroupsIDs; } /** * <p>The security group IDs that will be used by the replication server.</p> */ inline void SetReplicationServersSecurityGroupsIDs(const Aws::Vector<Aws::String>& value) { m_replicationServersSecurityGroupsIDs = value; } /** * <p>The security group IDs that will be used by the replication server.</p> */ inline void SetReplicationServersSecurityGroupsIDs(Aws::Vector<Aws::String>&& value) { m_replicationServersSecurityGroupsIDs = std::move(value); } /** * <p>The security group IDs that will be used by the replication server.</p> */ inline UpdateReplicationConfigurationResult& WithReplicationServersSecurityGroupsIDs(const Aws::Vector<Aws::String>& value) { SetReplicationServersSecurityGroupsIDs(value); return *this;} /** * <p>The security group IDs that will be used by the replication server.</p> */ inline UpdateReplicationConfigurationResult& WithReplicationServersSecurityGroupsIDs(Aws::Vector<Aws::String>&& value) { SetReplicationServersSecurityGroupsIDs(std::move(value)); return *this;} /** * <p>The security group IDs that will be used by the replication server.</p> */ inline UpdateReplicationConfigurationResult& AddReplicationServersSecurityGroupsIDs(const Aws::String& value) { m_replicationServersSecurityGroupsIDs.push_back(value); return *this; } /** * <p>The security group IDs that will be used by the replication server.</p> */ inline UpdateReplicationConfigurationResult& AddReplicationServersSecurityGroupsIDs(Aws::String&& value) { m_replicationServersSecurityGroupsIDs.push_back(std::move(value)); return *this; } /** * <p>The security group IDs that will be used by the replication server.</p> */ inline UpdateReplicationConfigurationResult& AddReplicationServersSecurityGroupsIDs(const char* value) { m_replicationServersSecurityGroupsIDs.push_back(value); return *this; } /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline const Aws::String& GetSourceServerID() const{ return m_sourceServerID; } /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline void SetSourceServerID(const Aws::String& value) { m_sourceServerID = value; } /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline void SetSourceServerID(Aws::String&& value) { m_sourceServerID = std::move(value); } /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline void SetSourceServerID(const char* value) { m_sourceServerID.assign(value); } /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithSourceServerID(const Aws::String& value) { SetSourceServerID(value); return *this;} /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithSourceServerID(Aws::String&& value) { SetSourceServerID(std::move(value)); return *this;} /** * <p>The ID of the Source Server for this Replication Configuration.</p> */ inline UpdateReplicationConfigurationResult& WithSourceServerID(const char* value) { SetSourceServerID(value); return *this;} /** * <p>The subnet to be used by the replication staging area.</p> */ inline const Aws::String& GetStagingAreaSubnetId() const{ return m_stagingAreaSubnetId; } /** * <p>The subnet to be used by the replication staging area.</p> */ inline void SetStagingAreaSubnetId(const Aws::String& value) { m_stagingAreaSubnetId = value; } /** * <p>The subnet to be used by the replication staging area.</p> */ inline void SetStagingAreaSubnetId(Aws::String&& value) { m_stagingAreaSubnetId = std::move(value); } /** * <p>The subnet to be used by the replication staging area.</p> */ inline void SetStagingAreaSubnetId(const char* value) { m_stagingAreaSubnetId.assign(value); } /** * <p>The subnet to be used by the replication staging area.</p> */ inline UpdateReplicationConfigurationResult& WithStagingAreaSubnetId(const Aws::String& value) { SetStagingAreaSubnetId(value); return *this;} /** * <p>The subnet to be used by the replication staging area.</p> */ inline UpdateReplicationConfigurationResult& WithStagingAreaSubnetId(Aws::String&& value) { SetStagingAreaSubnetId(std::move(value)); return *this;} /** * <p>The subnet to be used by the replication staging area.</p> */ inline UpdateReplicationConfigurationResult& WithStagingAreaSubnetId(const char* value) { SetStagingAreaSubnetId(value); return *this;} /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline const Aws::Map<Aws::String, Aws::String>& GetStagingAreaTags() const{ return m_stagingAreaTags; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline void SetStagingAreaTags(const Aws::Map<Aws::String, Aws::String>& value) { m_stagingAreaTags = value; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline void SetStagingAreaTags(Aws::Map<Aws::String, Aws::String>&& value) { m_stagingAreaTags = std::move(value); } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& WithStagingAreaTags(const Aws::Map<Aws::String, Aws::String>& value) { SetStagingAreaTags(value); return *this;} /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& WithStagingAreaTags(Aws::Map<Aws::String, Aws::String>&& value) { SetStagingAreaTags(std::move(value)); return *this;} /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(const Aws::String& key, const Aws::String& value) { m_stagingAreaTags.emplace(key, value); return *this; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(Aws::String&& key, const Aws::String& value) { m_stagingAreaTags.emplace(std::move(key), value); return *this; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(const Aws::String& key, Aws::String&& value) { m_stagingAreaTags.emplace(key, std::move(value)); return *this; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(Aws::String&& key, Aws::String&& value) { m_stagingAreaTags.emplace(std::move(key), std::move(value)); return *this; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(const char* key, Aws::String&& value) { m_stagingAreaTags.emplace(key, std::move(value)); return *this; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(Aws::String&& key, const char* value) { m_stagingAreaTags.emplace(std::move(key), value); return *this; } /** * <p>A set of tags to be associated with all resources created in the replication * staging area: EC2 replication server, EBS volumes, EBS snapshots, etc.</p> */ inline UpdateReplicationConfigurationResult& AddStagingAreaTags(const char* key, const char* value) { m_stagingAreaTags.emplace(key, value); return *this; } /** * <p>Whether to use a dedicated Replication Server in the replication staging * area.</p> */ inline bool GetUseDedicatedReplicationServer() const{ return m_useDedicatedReplicationServer; } /** * <p>Whether to use a dedicated Replication Server in the replication staging * area.</p> */ inline void SetUseDedicatedReplicationServer(bool value) { m_useDedicatedReplicationServer = value; } /** * <p>Whether to use a dedicated Replication Server in the replication staging * area.</p> */ inline UpdateReplicationConfigurationResult& WithUseDedicatedReplicationServer(bool value) { SetUseDedicatedReplicationServer(value); return *this;} private: bool m_associateDefaultSecurityGroup; long long m_bandwidthThrottling; bool m_createPublicIP; ReplicationConfigurationDataPlaneRouting m_dataPlaneRouting; ReplicationConfigurationDefaultLargeStagingDiskType m_defaultLargeStagingDiskType; ReplicationConfigurationEbsEncryption m_ebsEncryption; Aws::String m_ebsEncryptionKeyArn; Aws::String m_name; Aws::Vector<PITPolicyRule> m_pitPolicy; Aws::Vector<ReplicationConfigurationReplicatedDisk> m_replicatedDisks; Aws::String m_replicationServerInstanceType; Aws::Vector<Aws::String> m_replicationServersSecurityGroupsIDs; Aws::String m_sourceServerID; Aws::String m_stagingAreaSubnetId; Aws::Map<Aws::String, Aws::String> m_stagingAreaTags; bool m_useDedicatedReplicationServer; }; } // namespace Model } // namespace drs } // namespace Aws
{ "content_hash": "112060c6c457ff5ebbc7a978404878ea", "timestamp": "", "source": "github", "line_count": 599, "max_line_length": 208, "avg_line_length": 42.40734557595993, "alnum_prop": 0.7107314384694119, "repo_name": "aws/aws-sdk-cpp", "id": "76f88fc9236dc5900b310a4f328d3f29350ca888", "size": "25521", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "aws-cpp-sdk-drs/include/aws/drs/model/UpdateReplicationConfigurationResult.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "309797" }, { "name": "C++", "bytes": "476866144" }, { "name": "CMake", "bytes": "1245180" }, { "name": "Dockerfile", "bytes": "11688" }, { "name": "HTML", "bytes": "8056" }, { "name": "Java", "bytes": "413602" }, { "name": "Python", "bytes": "79245" }, { "name": "Shell", "bytes": "9246" } ], "symlink_target": "" }
package org.apache.hadoop.hbase.hbql; import org.apache.hadoop.hbase.hbql.client.HBqlException; import org.apache.hadoop.hbase.hbql.impl.Utils; import org.apache.hadoop.hbase.hbql.statement.HBqlStatement; import org.apache.hadoop.hbase.hbql.util.TestSupport; import org.junit.Test; import java.util.Random; public class ParseTest extends TestSupport { static Random randomVal = new Random(); public static void parseSQL(final String sql, final int reps, final long maxTime) throws HBqlException { long start = System.currentTimeMillis(); for (int i = 0; i < reps; i++) { HBqlStatement stmt = Utils.parseHBqlStatement(sql); assertTrue(stmt != null); } long end = System.currentTimeMillis(); assertTrue((end - start) < maxTime); } @Test public void parseTest1() throws HBqlException { parseSQL("CREATE TEMP MAPPING tab2 FOR TABLE table2" + "(" + "keyval key, " + "f1 (" + " val1 string alias val1, " + " val2 string alias val2, " + " val3 string alias notdefinedval " + "), " + "f2 (" + " val1 date alias val3, " + " val2 date alias val4 " + "), " + "f3 (" + " val1 int alias val5, " + " val2 int alias val6, " + " val3 int alias val7, " + " val4 int[] alias val8, " + " mapval1 object alias f3mapval1, " + " mapval2 object alias f3mapval2 " + "))", 1000, 1000); } @Test public void parseTest2() throws HBqlException { parseSQL("CREATE QUERY EXECUTOR POOL pool1", 1000, 1000); } @Test public void parseTest3() throws HBqlException { parseSQL("CREATE QUERY EXECUTOR POOL pool1 (MAX_EXECUTOR_POOL_SIZE: 100, MAX_THREAD_COUNT: 20, THREADS_READ_RESULTS: TRUE, COMPLETION_QUEUE_SIZE: 12 ) ", 1000, 1000); } @Test public void parseTest4() throws HBqlException { final StringBuilder query = new StringBuilder("select * from tab3 WITH KEYS "); boolean firstTime = true; for (int rc = 0; rc < 1000; rc++) { if (!firstTime) query.append(", "); else firstTime = false; query.append("'0000000001'TO '0000000009' "); } query.append("SERVER FILTER where val1+'ss' BETWEEN '11ss' AND '13ss' "); parseSQL(query.toString(), 100, 7500); } }
{ "content_hash": "9b4e12254307ae81d2272ecbc23a49cb", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 174, "avg_line_length": 30.697674418604652, "alnum_prop": 0.5473484848484849, "repo_name": "pambrose/HBql", "id": "74447e07d671991fa6f3e042d2435009dd444bd0", "size": "3503", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/java/org/apache/hadoop/hbase/hbql/ParseTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1441081" }, { "name": "Shell", "bytes": "2185" } ], "symlink_target": "" }
#import "NSObject.h" // Not exported @interface CHBLegend : NSObject { } + (id)readCHDLegendEntryFrom:(const struct XlChartCustomLegend *)arg1 state:(id)arg2; + (id)readFrom:(struct XlChartLegendFrame *)arg1 state:(id)arg2; @end
{ "content_hash": "0cadd7d6898e53a256c8d9fd90e454f6", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 85, "avg_line_length": 16.785714285714285, "alnum_prop": 0.7361702127659574, "repo_name": "matthewsot/CocoaSharp", "id": "b2bc6e7fcb681b4d71cfe0fd27dc55df97802736", "size": "375", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Headers/PrivateFrameworks/OfficeImport/CHBLegend.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "259784" }, { "name": "C#", "bytes": "2789005" }, { "name": "C++", "bytes": "252504" }, { "name": "Objective-C", "bytes": "24301417" }, { "name": "Smalltalk", "bytes": "167909" } ], "symlink_target": "" }
 /* This file contains classes for GTK+ widget. */ //////////////////////////////////////////////////////////////////////////////// // internal header //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// //internal // _os_widget class _os_widget { public: explicit _os_widget(GtkWidget* w = NULL) noexcept : m_w(w) { } _os_widget(const _os_widget& src) noexcept : m_w(src.m_w) { } _os_widget& operator=(GtkWidget* w) noexcept { m_w = w; return *this; } _os_widget& operator=(const _os_widget& src) noexcept { m_w = src.m_w; return *this; } GtkWidget* GetHandle() const noexcept { return m_w; } bool IsValid() const noexcept { return m_w != NULL; } void Destroy() noexcept { if ( m_w != NULL ) { ::gtk_widget_destroy(m_w); m_w = NULL; } } gboolean IsInDestruction() const noexcept { assert( IsValid() ); return ::gtk_widget_in_destrcution(m_w); } void Show() noexcept { assert( IsValid() ); ::gtk_widget_show(m_w); } void Hide() noexcept { assert( IsValid() ); ::gtk_widget_hide(m_w); } void ShowAll() noexcept { assert( IsValid() ); ::gtk_widget_show_all(m_w); } void Draw(cairo_t* cr) noexcept { assert( IsValid() ); ::gtk_widget_draw(m_w, cr); } void QueueDraw() noexcept { assert( IsValid() ); ::gtk_widget_queue_draw(m_w); } GdkFrameClock* GetFrameClock() const noexcept { assert( IsValid() ); return ::gtk_widget_get_frame_clock(m_w); } gint GetScaleFactor() const noexcept { assert( IsValid() ); return ::gtk_widget_get_scale_factor(m_w); } guint AddTickCallback(GtkTickCallback callback, gpointer user_data, GDestroyNotify notify) noexcept { assert( IsValid() ); return ::gtk_widget_add_tick_callback(m_w, callback, user_data, notify); } void RemoveTickCallback(guint id) noexcept { assert( IsValid() ); ::gtk_widget_remove_tick_callback(m_w, id); } void SizeAllocate(GtkAllocation* allocation) noexcept { assert( IsValid() ); ::gtk_widget_size_allocate(m_w, allocation); } void SizeAllocateWithBaseline(GtkAllocation* allocation, gint baseline) noexcept { assert( IsValid() ); ::gtk_widget_size_allocate_with_baseline(m_w, allocation, baseline); } void AddAccelerator(const gchar* accel_signal, GtkAccelGroup* accel_group, guint accel_key, GdkModifierType accel_mods, GtkAccelFlags accel_flags) noexcept { assert( IsValid() ); ::gtk_widget_add_accelerator(m_w, accel_signal, accel_group, accel_key, accel_mods, accel_flags); } gboolean RemoveAccelerator(GtkAccelGroup* accel_group, guint accel_key, GdkModifierType accel_mods) noexcept { assert( IsValid() ); return ::gtk_widget_remove_accelerator(m_w, accel_group, accel_key, accel_mods); } void SetAccelPath(const gchar* accel_path, GtkAccelGroup* accel_group) noexcept { assert( IsValid() ); ::gtk_widget_set_accel_path(m_w, accel_path, accel_group); } GList* ListAccelClosures() noexcept { assert( IsValid() ); return ::gtk_widget_list_accel_closures(m_w); } gboolean CanActivateAccel(guint signal_id) const noexcept { assert( IsValid() ); return ::gtk_widget_can_activate_accel(m_w, signal_id); } gboolean Activate() noexcept { assert( IsValid() ); return ::gtk_widget_activate(m_w); } gboolean Intersect(const GtkRectangle* area, GtkRectangle* intersection) noexcept { assert( IsValid() ); return ::gtk_widget_intersect(m_w, area, intersection); } gboolean IsFocus() const noexcept { assert( IsValid() ); return ::gtk_widget_is_focus(m_w); } void GrabFocus() noexcept { assert( IsValid() ); ::gtk_widget_grab_focus(m_w); } void GrabDefault() noexcept { assert( IsValid() ); ::gtk_widget_grab_default(m_w); } void SetName(const gchar* name) noexcept { assert( IsValid() ); ::gtk_widget_set_name(m_w, name); } const gchar* GetName() const noexcept { assert( IsValid() ); return ::gtk_widget_get_name(m_w); } void SetSensitive(gboolean sensitive) noexcept { assert( IsValid() ); ::gtk_widget_set_sensitive(m_w, sensitive); } void SetParentWindow(GdkWindow* parent_window) noexcept { assert( IsValid() ); ::gtk_widget_set_parent_window(m_w, parent_window); } GdkWindow* GetParentWindow() const noexcept { assert( IsValid() ); return ::gtk_widget_get_parent_window(m_w); } void SetEvents(gint events) noexcept { assert( IsValid() ); ::gtk_widget_set_events(m_w, events); } gint GetEvents() const noexcept { assert( IsValid() ); return ::gtk_widget_get_events(m_w); } void AddEvents(gint events) noexcept { assert( IsValid() ); ::gtk_widget_add_events(m_w, events); } void SetDeviceEvents(GdkDevice* device, GdkEventMask events) noexcept { assert( IsValid() ); ::gtk_widget_set_device_events(m_w, device, events); } GdkEventMask GetDeviceEvents(GdkDevice* device) const noexcept { assert( IsValid() ); return ::gtk_widget_get_device_events(m_w, device); } void AddDeviceEvents(GdkDevice* device, GdkEventMask events) noexcept { assert( IsValid() ); ::gtk_widget_add_device_events(m_w, device, events); } void SetDeviceEnabled(GdkDevice* device, gboolean enabled) noexcept { assert( IsValid() ); ::gtk_widget_set_device_enabled(m_w, device, enabled); } gboolean GetDeviceEnabled(GdkDevice* device) const noexcept { assert( IsValid() ); return ::gtk_widget_get_device_enabled(m_w, device); } GtkWidget* GetTopLevel() const noexcept { assert( IsValid() ); return ::gtk_widget_get_toplevel(m_w); } GtkWidget* GetAncestor(GType widget_type) const noexcept { assert( IsValid() ); return ::gtk_widget_get_ancestor(m_w, widget_type); } GdkVisual* GetVisual() const noexcept { assert( IsValid() ); return ::gtk_widget_get_visual(m_w); } void SetVisual(GdkVisual* visual) noexcept { assert( IsValid() ); ::gtk_widget_set_visual(m_w, visual); } gboolean IsAncestor(GtkWidget* ancestor) const noexcept { assert( IsValid() ); return ::gtk_widget_is_ancestor(m_w, ancestor); } gboolean TranslateCoordinates(GtkWidget* dest_widget, gint src_x, gint src_y, gint* dest_x, gint* dest_y) const noexcept { assert( IsValid() ); return ::gtk_widget_translate_coordinates(m_w, dest_widget, src_x, src_y, dest_x, dest_y); } void SetDirection(GtkTextDirection dir) noexcept { assert( IsValid() ); ::gtk_widget_set_direction(m_w, dir); } GtkTextDirection GetDirection() const noexcept { assert( IsValid() ); return ::gtk_widget_get_direction(m_w); } static void SetDefaultDirection(GtkTextDirection dir) noexcept { ::gtk_widget_set_default_direction(dir); } static GtkTextDirection GetDefaultDirection() noexcept { return ::gtk_widget_get_default_direction(); } void ShapeCombineRegion(cairo_region_t* region) noexcept { assert( IsValid() ); ::gtk_widget_shape_combine_region(m_w, region); } void InputShapeCombineRegion(cairo_region_t* region) noexcept { assert( IsValid() ); ::gtk_widget_input_shape_combine_region(m_w, region); } PangoContext* CreatePangoContext() noexcept { assert( IsValid() ); return ::gtk_widget_create_pango_context(m_w); } PangoContext* GetPangoContext() const noexcept { assert( IsValid() ); return ::gtk_widget_get_pango_context(m_w); } void SetFontOptions(const cairo_font_options_t* options) noexcept { assert( IsValid() ); ::gtk_widget_set_font_options(m_w, options); } const cairo_font_options_t* GetFontOptions() const noexcept { assert( IsValid() ); return ::gtk_widget_get_font_options(m_w); } void SetFontMap(PangoFontMap* font_map) noexcept { assert( IsValid() ); ::gtk_widget_set_font_map(m_w, font_map); } PangoFontMap* GetFontMap() const noexcept { assert( IsValid() ); return ::gtk_widget_get_font_map(m_w); } PangoLayout* CreatePangoLayout(const gchar* text) noexcept { assert( IsValid() ); return ::gtk_widget_create_pango_layout(m_w, text); } void QueueDrawArea(gint x, gint y, gint width, gint height) noexcept { assert( IsValid() ); ::gtk_widget_queue_draw_area(m_w, x, y, width, height); } void QueueDrawRegion(const cairo_region_t* region) noexcept { assert( IsValid() ); ::gtk_widget_queue_draw_region(m_w, region); } void SetAppPaintable(gboolean app_paintable) noexcept { assert( IsValid() ); ::gtk_widget_set_app_paintable(m_w, app_paintable); } void SetRedrawOnAllocate(gboolean redraw_on_allocate) noexcept { assert( IsValid() ); ::gtk_widget_set_redraw_on_allocate(m_w, redraw_on_allocate); } gboolean MnemonicActivate(gboolean group_cycling) noexcept { assert( IsValid() ); return ::gtk_widget_mnemonic_activate(m_w, group_cycling); } void StyleGetProerty(const gchar* property_name, GValue* value) const noexcept { assert( IsValid() ); ::gtk_widget_style_get_property(m_w, property_name, value); } AtkObject* GetAccessible() const noexcept { assert( IsValid() ); return ::gtk_widget_get_acessible(m_w); } void ChildNotify(const gchar* child_property) noexcept { assert( IsValid() ); ::gtk_widget_child_notify(m_w, child_property); } void FreezeChildNotify() noexcept { assert( IsValid() ); ::gtk_widget_freeze_child_notify(m_w); } GtkWidget* GetParnet() const noexcept { assert( IsValid() ); return ::gtk_widget_get_parent(m_w); } GtkSettings* GetSettings() const noexcept { assert( IsValid() ); return ::gtk_widget_get_settings(m_w); } GtkClipboard* GetClipboard(GdkAtom selection) const noexcept { assert( IsValid() ); return ::gtk_widget_get_clipboard(m_w, selection); } GdkDisplay* GetDisplay() const noexcept { assert( IsValid() ); return ::gtk_widget_get_display(m_w); } GdkScreen* GetScreen() const noexcept { assert( IsValid() ); return ::gtk_widget_get_screen(m_w); } gboolean HasScreen() const noexcept { assert( IsValid() ); return ::gtk_widget_has_screen(m_w); } void GetSizeRequest(gint* width, gint* height) const noexcept { assert( IsValid() ); ::gtk_widget_get_size_request(m_w, width, height); } void SetSizeRequest(gint width, gint height) noexcept { assert( IsValid() ); ::gtk_widget_set_size_request(m_w, width, height); } void ThawChildNotify() noexcept { assert( IsValid() ); ::gtk_widget_thaw_child_notify(m_w); } void SetNoShowAll(gboolean no_show_all) noexcept { assert( IsValid() ); ::gtk_widget_set_no_show_all(m_w, no_show_all); } gboolean GetNoShowAll() const noexcept { assert( IsValid() ); return ::gtk_widget_get_no_show_all(m_w); } GList* ListMnemonicLabels() const noexcept { assert( IsValid() ); return ::gtk_widget_list_mnemonic_labels(m_w); } void AddMnemonicLabel(GtkWidget* label) noexcept { assert( IsValid() ); ::gtk_widget_add_mnemonic_label(m_w, label); } void RemoveMnemonicLabel(GtkWidget* label) noexcept { assert( IsValid() ); ::gtk_widget_remove_mnemonic_label(m_w, label); } void ErrorBell() noexcept { assert( IsValid() ); ::gtk_widget_error_bell(m_w); } gboolean KeynavFailed(GtkDirectionType direction) noexcept { assert( IsValid() ); return ::gtk_widget_keynav_failed(m_w, direction); } gchar* GetTooltipMarkup() const noexcept { assert( IsValid() ); return ::gtk_widget_get_tooltip_markup(m_w); } void SetTooltipMarkup(const gchar* markup) noexcept { assert( IsValid() ); ::gtk_widget_set_tooltip_markup(m_w, markup); } gchar* GetTooltipText() const noexcept { assert( IsValid() ); return ::gtk_widget_get_tooltip_text(m_w); } void SetTooltipText(const gchar* text) noexcept { assert( IsValid() ); ::gtk_widget_set_tooltip_text(m_w, text); } GtkWindow* GetTooltipWindow() const noexcept { assert( IsValid() ); return ::gtk_widget_get_tooltip_window(m_w); } void SetTooltipWindow(GtkWindow* custom_window) noexcept { assert( IsValid() ); ::gtk_widget_set_tooltip_window(m_w, custom_window); } gboolean GetHasTooltip() const noexcept { assert( IsValid() ); return ::gtk_widget_get_has_tooltip(m_w); } void SetHasTooltip(gboolean has_tooltip) noexcept { assert( IsValid() ); ::gtk_widget_set_has_tooltip(m_w, has_tooltip); } void TriggerTooltipQuery() noexcept { assert( IsValid() ); ::gtk_widget_trigger_tooltip_query(m_w); } GdkWindow* GetWindow() const noexcept { assert( IsValid() ); return ::gtk_widget_get_window(m_w); } void RegisterWindow(GdkWindow* window) noexcept { assert( IsValid() ); ::gtk_widget_register_window(m_w, window); } void UnregisterWindow(GdkWindow* window) noexcept { assert( IsValid() ); ::gtk_widget_unregister_window(m_w, window); } int GetAllocatedWidth() const noexcept { assert( IsValid() ); return ::gtk_widget_get_allocated_width(m_w); } int GetAllocatedHeight() const noexcept { assert( IsValid() ); return ::gtk_widget_get_allocated_height(m_w); } void GetAllocation(GtkAllocation* allocation) const noexcept { assert( IsValid() ); ::gtk_widget_get_allocation(m_w, allocation); } void SetAllocation(const GtkAllocation* allocation) noexcept { assert( IsValid() ); ::gtk_widget_set_allocation(m_w, allocation); } int GetAllocatedBaseline() const noexcept { assert( IsValid() ); return ::gtk_widget_get_allocated_baseline(m_w); } void GetAllocatedSize(GtkAllocation* allocation, int* baseline) const noexcept { assert( IsValid() ); ::gtk_widget_get_allocated_size(m_w, allocation, baseline); } void GetClip(GtkAllocation* clip) const noexcept { assert( IsValid() ); ::gtk_widget_get_clip(m_w, clip); } void SetClip(const GtkAllocation* clip) noexcept { assert( IsValid() ); ::gtk_widget_set_clip(m_w, clip); } gboolean GetAppPaintable() const noexcept { assert( IsValid() ); return ::gtk_widget_get_app_paintable(m_w); } gboolean GetCanDefault() const noexcept { assert( IsValid() ); return ::gtk_widget_get_can_default(m_w); } void SetCanDefault(gboolean can_default) noexcept { assert( IsValid() ); ::gtk_widget_set_can_default(m_w, can_default); } gboolean GetCanFocus() const noexcept { assert( IsValid() ); return ::gtk_widget_get_can_focus(m_w); } void SetCanFocus(gboolean can_focus) noexcept { assert( IsValid() ); ::gtk_widget_set_can_focus(m_w, can_focus); } gboolean GetFocusOnClick() const noexcept { assert( IsValid() ); return ::gtk_widget_get_focus_on_click(m_w); } void SetFocusOnClick(gboolean focus_on_click) noexcept { assert( IsValid() ); ::gtk_widget_set_focus_on_click(m_w, focus_on_click); } gboolean GetHasWindow() const noexcept { assert( IsValid() ); return ::gtk_widget_get_has_window(m_w); } gboolean GetSensitive() const noexcept { assert( IsValid() ); return ::gtk_widget_get_sensitive(m_w); } gboolean IsSensitive() const noexcept { assert( IsValid() ); return ::gtk_widget_is_sensitive(m_w); } gboolean GetVisible() const noexcept { assert( IsValid() ); return ::gtk_widget_get_visible(m_w); } gboolean IsVisible() const noexcept { assert( IsValid() ); return ::gtk_widget_is_visible(m_w); } void SetVisible(gboolean visible) noexcept { assert( IsValid() ); ::gtk_widget_set_visible(m_w, visible); } GtkStateFlags GetStateFlags() const noexcept { assert( IsValid() ); return ::gtk_widget_get_state_flags(m_w); } gboolean HasDefault() const noexcept { assert( IsValid() ); return ::gtk_widget_has_default(m_w); } gboolean HasFocus() const noexcept { assert( IsValid() ); return ::gtk_widget_has_focus(m_w); } gboolean HasVisibleFocus() const noexcept { assert( IsValid() ); return ::gtk_widget_has_visible_focus(m_w); } gboolean HasGrab() const noexcept { assert( IsValid() ); return ::gtk_widget_has_grab(m_w); } gboolean IsDrawable() const noexcept { assert( IsValid() ); return ::gtk_widget_is_drawable(m_w); } gboolean IsTopLevel() const noexcept { assert( IsValid() ); return ::gtk_widget_is_toplevel(m_w); } void SetWindow(GdkWindow* window) noexcept { assert( IsValid() ); ::gtk_widget_set_window(m_w, window); } void SetReceivesDefault(gboolean receives_default) noexcept { assert( IsValid() ); ::gtk_widget_set_receives_default(m_w, receives_default); } gboolean GetReceivesDefault() const noexcept { assert( IsValid() ); return ::gtk_widget_get_receives_default(m_w); } void SetSupportMultidevice(gboolean support_multidevice) noexcept { assert( IsValid() ); ::gtk_widget_set_support_multidevice(m_w, support_multidevice); } gboolean GetSupportMultidevice() const noexcept { assert( IsValid() ); return ::gtk_widget_get_support_multidevice(m_w); } gboolean GetRealized() const noexcept { assert( IsValid() ); return ::gtk_widget_get_realized(m_w); } gboolean GetMapped() const noexcept { assert( IsValid() ); return ::gtk_widget_get_mapped(m_w); } gboolean DeviceIsShadowed(GdkDevice* device) const noexcept { assert( IsValid() ); return ::gtk_widget_device_is_shadowed(m_w, device); } GdkModifierType GetModifierMask(GdkModifierIntent intent) const noexcept { assert( IsValid() ); return ::gtk_widget_get_modifier_mask(m_w, intent); } void InsertActionGroup(const gchar* name, GActionGroup* group) noexcept { assert( IsValid() ); ::gtk_widget_insert_action_group(m_w, name, group); } double GetOpacity() const noexcept { assert( IsValid() ); return ::gtk_widget_get_opacity(m_w); } void SetOpacity(double opacity) noexcept { assert( IsValid() ); ::gtk_widget_set_opacity(m_w, opacity); } const gchar** ListActionPrefixes() const noexcept { assert( IsValid() ); return ::gtk_widget_list_action_prefixes(m_w); } GActionGroup* GetActionGroup(const gchar* prefix) const noexcept { assert( IsValid() ); return ::gtk_widget_get_action_group(m_w, prefix); } GtkWidgetPath* GetPath() const noexcept { assert( IsValid() ); return ::gtk_widget_get_path(m_w); } GtkStyleContext* GetStyleContext() const noexcept { assert( IsValid() ); return ::gtk_widget_get_style_context(m_w); } void ResetStyle() noexcept { assert( IsValid() ); ::gtk_widget_reset_style(m_w); } void GetPreferredHeight(gint* minimum_height, gint* natural_height) const noexcept { assert( IsValid() ); ::gtk_widget_get_preferred_height(m_w, minimum_height, natural_height); } void GetPreferredWidth(gint* minimum_width, gint* natural_width) const noexcept { assert( IsValid() ); ::gtk_widget_get_preferred_width(m_w, minimum_width, natural_width); } void GetPreferredHeightForWidth(gint width, gint* minimum_height, gint* natural_height) const noexcept { assert( IsValid() ); ::gtk_widget_get_preferred_height_for_width(m_w, width, minimum_height, natural_height); } void GetPreferredWidthForHeight(gint height, gint* minimum_width, gint* natural_width) const noexcept { assert( IsValid() ); ::gtk_widget_get_preferred_width_for_height(m_w, height, minimum_width, natural_width); } void GetPreferredHeightAndBaselineForWidth(gint width, gint* minimum_height, gint* natural_height, gint* minimum_baseline, gint* natural_baseline) const noexcept { assert( IsValid() ); ::gtk_widget_get_preferred_height_and_baseline_for_width(m_w, width, minimum_height, natural_height, minimum_baseline, natural_baseline); } GtkSizeRequestMode GetRequestMode() const noexcept { assert( IsValid() ); return ::gtk_widget_get_request_mode(m_w); } void GetPreferredSize(GtkRequisition* minimum_size, GtkRequisition* natural_size) const noexcept { assert( IsValid() ); ::gtk_widget_get_preferred_size(m_w, minimum_size, natural_size); } GtkAlign GetHalign() const noexcept { assert( IsValid() ); return ::gtk_widget_get_halign(m_w); } void SetHalign(GtkAlign align) noexcept { assert( IsValid() ); ::gtk_widget_set_halign(m_w, align); } GtkAlign GetValign() const noexcept { assert( IsValid() ); return ::gtk_widget_get_valign(m_w); } GtkAlign GetValignWithBaseline() const noexcept { assert( IsValid() ); return ::gtk_widget_get_valign_with_baseline(m_w); } void SetValign(GtkAlign align) noexcept { assert( IsValid() ); ::gtk_widget_set_valign(m_w, align); } gint GetMarginStart() const noexcept { assert( IsValid() ); return ::gtk_widget_get_margin_start(m_w); } void SetMarginStart(gint margin) noexcept { assert( IsValid() ); ::gtk_widget_set_margin_start(m_w, margin); } gint GetMarginEnd() const noexcept { assert( IsValid() ); return ::gtk_widget_get_margin_end(m_w); } void SetMarginEnd(gint margin) noexcept { assert( IsValid() ); ::gtk_widget_set_margin_end(m_w, margin); } gint GetMarginTop() const noexcept { assert( IsValid() ); return ::gtk_widget_get_margin_top(m_w); } void SetMarginTop(gint margin) noexcept { assert( IsValid() ); ::gtk_widget_set_margin_top(m_w, margin); } gint GetMarginBottom() const noexcept { assert( IsValid() ); return ::gtk_widget_get_margin_bottom(m_w); } void SetMarginBottom(gint margin) noexcept { assert( IsValid() ); ::gtk_widget_set_margin_bottom(m_w, margin); } gboolean GetHexpand() const noexcept { assert( IsValid() ); return ::gtk_widget_get_hexpand(m_w); } void SetHexpand(gboolean expand) noexcept { assert( IsValid() ); ::gtk_widget_set_hexpand(m_w, expand); } gboolean GetHexpandSet() const noexcept { assert( IsValid() ); return ::gtk_widget_get_hexpand_set(m_w); } void SetHexpandSet(gboolean set) noexcept { assert( IsValid() ); ::gtk_widget_set_hexpand_set(m_w, set); } gboolean GetVexpand() const noexcept { assert( IsValid() ); return ::gtk_widget_get_vexpand(m_w); } void SetVexpand(gboolean expand) noexcept { assert( IsValid() ); ::gtk_widget_set_vexpand(m_w, expand); } gboolean GetVexpandSet() const noexcept { assert( IsValid() ); return ::gtk_widget_get_vexpand_set(m_w); } void SetVexpandSet(gboolean set) noexcept { assert( IsValid() ); ::gtk_widget_set_vexpand_set(m_w, set); } void QueueComputeExpand() noexcept { assert( IsValid() ); ::gtk_widget_queue_compute_expand(m_w); } gboolean ComputeExpand(GtkOrientation orientation) const noexcept { assert( IsValid() ); return ::gtk_widget_compute_expand(m_w, orientation); } private: GtkWidget* m_w; }; ////////////////////////////////////////////////////////////////////////////////
{ "content_hash": "81f381c9dffd459d1b8266dd9e1609d5", "timestamp": "", "source": "github", "line_count": 886, "max_line_length": 162, "avg_line_length": 26.146726862302483, "alnum_prop": 0.6525511525511526, "repo_name": "ZJUGKC/GKC", "id": "ab48bf0e648cf246acbeac0da0c113fc7740aa75", "size": "23460", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "references/public/include/ui/system/Linux/_os_widget.h", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "2342" }, { "name": "C", "bytes": "175003" }, { "name": "C++", "bytes": "2162268" }, { "name": "CMake", "bytes": "23132" }, { "name": "Rich Text Format", "bytes": "371530" }, { "name": "Shell", "bytes": "20824" }, { "name": "VBScript", "bytes": "21358" } ], "symlink_target": "" }
layout: page title: Lee - Higgins Wedding date: 2016-05-24 author: Raymond Mullen tags: weekly links, java status: published summary: Donec ut purus id magna dictum. banner: images/banner/leisure-04.jpg booking: startDate: 05/11/2018 endDate: 05/15/2018 ctyhocn: ELMNYHX groupCode: LHW published: true --- Quisque auctor mi quis orci facilisis, ac elementum nulla aliquam. Praesent ultricies facilisis massa. Ut ex purus, ullamcorper ac lobortis et, blandit non ex. Nam ac elementum tellus. Vivamus molestie enim sem, at ornare mauris rutrum quis. Cras quis faucibus quam. Proin fringilla velit ac turpis auctor venenatis et a odio. Nulla facilisi. Proin efficitur, ligula non aliquam sodales, nisl magna pharetra tortor, vel faucibus nisi urna nec felis. Nunc eget tempor ligula. Nunc a massa porta, porta risus at, sollicitudin orci. Maecenas turpis augue, dapibus nec nulla vitae, feugiat commodo risus. Phasellus elementum velit sed molestie luctus. Sed fringilla congue elit, vel finibus est volutpat non. Pellentesque in eros ac sapien dapibus tincidunt. Pellentesque ac sem tellus. Vestibulum congue pellentesque enim at semper. Morbi bibendum vulputate lacinia. Fusce finibus eu nunc non volutpat. Nam mollis ipsum at sem imperdiet aliquet. Phasellus eu varius turpis. Praesent at viverra ligula. Aliquam at quam ipsum. Aenean nec efficitur dui. Aenean gravida, diam vel sagittis dignissim, mauris nisi porta dui, vel consectetur felis ex et augue. Pellentesque non sem ut nibh vehicula euismod. Suspendisse potenti. * Nullam elementum tellus non lobortis sodales * Suspendisse ut elit volutpat, cursus felis ut, commodo dolor. Sed vitae accumsan ante, vel molestie sapien. Fusce a convallis dui. Integer aliquam vel est varius dignissim. Donec ac viverra augue, sit amet rhoncus arcu. Mauris urna purus, pellentesque in ante feugiat, scelerisque mollis dui. Sed aliquet aliquet accumsan. Morbi pellentesque augue molestie, sollicitudin sapien at, tincidunt neque. Curabitur ullamcorper augue dolor, id luctus nunc aliquet nec.
{ "content_hash": "83734e1b3dcef706f279b6af5ee982b4", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 647, "avg_line_length": 93, "alnum_prop": 0.8059628543499511, "repo_name": "KlishGroup/prose-pogs", "id": "9444f837b5f350b9681055fab7a1f600d7dc4d34", "size": "2050", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "pogs/E/ELMNYHX/LHW/index.md", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
--- layout: post title: Custom Validators in Angular imageUrl: /images/custom-validators-bg.jpeg date: 2016-03-14T00:00:00.000Z update_date: 2016-12-18T00:00:00.000Z summary: >- Often, we need to add custom validation capabilities to our application's form. In this article we're going to explore how to implement custom validators. categories: - angular tags: - angular2 - forms topic: forms author: pascal_precht related_posts: - Validators Pipeline in Angular 1.3 - Two-way Data Binding in Angular - Custom Form Controls in Angular - Reactive Forms in Angular - Template-driven Forms in Angular - RxJS Master Class and courseware updates related_videos: - '189792758' - '189785428' - '175255006' - '193524896' - '175218351' - '189618526' --- Forms are part of almost every web application out there. Angular strives for making working with forms a breeze. While there are a couple of built-in validators provided by the framework, we often need to add some custom validation capabilities to our application's form, in order to fulfill our needs. We can easily extend the browser vocabulary with additional custom validators and in this article we are going to explore how to do that. <div class="thtrm-toc is-sticky" markdown="1"> ### TABLE OF CONTENTS {:.no_toc} * TOC {:toc} </div> ## Built-in Validators Angular comes with a subset of built-in validators out of the box. We can apply them either declaratively as directives on elements in our DOM, in case we're building a <strong>template-driven</strong> form, or imperatively using the `FormControl` and `FormGroup` or `FormBuilder` APIs, in case we're building a <strong>reactive</strong> forms. If you don't know what it's all about with template-driven and reactive forms, don't worry, we have an articles about both topics [here](/angular/2016/03/21/template-driven-forms-in-angular-2.html) and [here](/angular/2016/06/22/model-driven-forms-in-angular-2.html). The supported built-in validators, at the time of writing this article, are: - **required** - Requires a form control to have a non-empty value - **minlength** - Requires a form control to have a value of a minimum length - **maxlength** - Requires a form control to have a value of a maximum length - **pattern** - Requires a form control's value to match a given regex As mentioned earlier, validators can be applied by simply using their corresponding directives. To make these directives available, we need to import Angular's `FormsModule` to our application module first: {% highlight js %} {% raw %} import { NgModule } from '@angular/core'; import { BrowserModule } from '@angular/platform-browser'; import { FormsModule } from '@angular/forms'; @NgModule({ imports: [BrowserModule, FormsModule], // we add FormsModule here declarations: [AppComponent], bootstrap: [AppComponent] }) export class AppModule {} {% endraw %} {% endhighlight %} Once this is done, we can use all directives provided by this module in our application. The following form shows how the built-in validators are applied to dedicated form controls: {% highlight html %} {% raw %} <form novalidate> <input type="text" name="name" ngModel required> <input type="text" name="street" ngModel minlength="3"> <input type="text" name="city" ngModel maxlength="10"> <input type="text" name="zip" ngModel pattern="[A-Za-z]{5}"> </form> {% endraw %} {% endhighlight %} Or, if we had a reactive form, we'd need to import the `ReactiveFormsModule` first: {% highlight js %} {% raw %} import { ReactiveFormsModule } from '@angular/forms'; @NgModule({ imports: [BrowserModule, ReactiveFormsModule], ... }) export class AppModule {} {% endraw %} {% endhighlight %} And can then build our form either using `FormControl` and `FormGroup` APIs: {% highlight js %} {% raw %} @Component() class Cmp { form: FormGroup; ngOnInit() { this.form = new FormGroup({ name: new FormControl('', Validators.required)), street: new FormControl('', Validators.minLength(3)), city: new FormControl('', Validators.maxLength(10)), zip: new FormControl('', Validators.pattern('[A-Za-z]{5}')) }); } } {% endraw %} {% endhighlight %} Or use the less verbose `FormBuilder` API that does the same work for us: {% highlight js %} {% raw %} @Component() class Cmp { constructor(private fb: FormBuilder) {} ngOnInit() { this.form = this.fb.group({ name: ['', Validators.required], street: ['', Validators.minLength(3)], city: ['', Validators.maxLength(10)], zip: ['', Validators.pattern('[A-Za-z]{5}')] }); } } {% endraw %} {% endhighlight %} We would still need to associate a form model with a form in the DOM using the `[formGroup]` directive likes this: {% highlight html %} {% raw %} <form novalidate [formGroup]="form"> ... </form> {% endraw %} {% endhighlight %} Observing these two to three different methods of creating a form, we might wonder how it is done that we can use the validator methods imperatively in our component code, and apply them as directives to input controls declaratively in our HTML code. It turns out there's really not such a big magic involved, so let's build our own custom email validator. ## Building a custom validator In it's simplest form, a validator is really just a function that takes a `Control` and returns either `null` when it's valid, or and error object if it's not. A TypeScript interface for such a validator looks something like this: {% highlight js %} {% raw %} interface Validator<T extends FormControl> { (c:T): {[error: string]:any}; } {% endraw %} {% endhighlight %} Let's implement a validator function `validateEmail` which implements that interface. All we need to do is to define a function that takes a `FormControl`, checks if it's value matches the regex of an email address, and if not, returns an error object, or `null` in case the value is valid. Here's what such an implementation could look like: {% highlight js %} {% raw %} import { FormControl } from '@angular/forms'; function validateEmail(c: FormControl) { let EMAIL_REGEXP = ... return EMAIL_REGEXP.test(c.value) ? null : { validateEmail: { valid: false } }; } {% endraw %} {% endhighlight %} Pretty straight forward right? We import `FormControl` from `@angular/forms` to have the type information the function's signature and simply test a regular expression with the `FormControl`'s value. That's it. That's a validator. But how do we apply them to other form controls? Well, we've seen how `Validators.required` and the other validators are added to the `new FormControl()` calls. `FormControl()` takes an initial value, a synchronous validator and an asynchronous validator. Which means, we do exactly the same with our custom validators. {% highlight js %} {% raw %} ngOnInit() { this.form = new FormGroup({ ... email: new FormControl('', validateEmail) }); } {% endraw %} {% endhighlight %} Don't forget to import `validateEmail` accordinlgy, if necessary. Okay cool, now we know how to add our custom validator to a form control. However, what if we want to combine multiple validators on a single control? Let's say our email field is `required` **and** needs to match the shape of an email address. `FormControl`s takes a single synchronous and a single asynchronous validator, or, a collection of synchronous and asynchronous validators. Here's what it looks like if we'd combine the `required` validator with our custom one: {% highlight js %} {% raw %} ngOnInit() { this.form = new FormGroup({ ... email: new FormControl('', [ Validators.required, validateEmail ]) }); } {% endraw %} {% endhighlight %} ## Building custom validator directives Now that we're able to add our custom validator to our form controls imperatively when building model-driven forms, we might also enable our validator to be used in template driven forms. In other words: We need a directive. The validator should be usable like this: {% highlight html %} {% raw %} <form novalidate> ... <input type="email" name="email" ngModel validateEmail> </form> {% endraw %} {% endhighlight %} `validateEmail` is applied as an attribute to the `<input>` DOM element, which already gives us an idea what we need to do. We need to build a directive with a matching selector so it will be executed on all input controls where the directive is applied. Let's start off with that. {% highlight js %} {% raw %} import { Directive } from '@angular/core'; @Directive({ selector: '[validateEmail][ngModel]' }) export class EmailValidator {} {% endraw %} {% endhighlight %} We import the `@Directive` decorator form `@angular/core` and use it on a new `EmailValidator` class. If you're familiar with the `@Component` decorator that this is probably not new to you. In fact, `@Directive` is a superset of `@Component` which is why most of the configuration properties are available. Okay, technically we could already make this directive execute in our app, all we need to do is to add it to our module's `declarations`: {% highlight js %} {% raw %} import { EmailValidator } from './email.validator'; @NgModule({ ... declarations: [AppComponent, EmailValidator], }) export class AppModule {} {% endraw %} {% endhighlight %} Even though this works, there's nothing our directive does at the moment. What we want to do is to make sure that our custom validator is executed when Angular compiles this directive. How do we get there? Angular has an internal mechanism to execute validators on a form control. It maintains a **multi provider** for a dependency token called `NG_VALIDATORS`. If you've read our article on [multi providers in Angular](/angular2/2015/11/23/multi-providers-in-angular-2.html), you know that Angular injects multiple values for a single token that is used for a multi provider. If you haven't, we highly recommend checking it out as the rest of this article is based on it. It turns out that **all** built-in validators are already added to the `NG_VALIDATORS` token. So whenever Angular instantiates a form control and performs validation, it basically injects the dependency for the `NG_VALIDATORS` token, which is a list of all validators, and executes them one by one on that form control. Since multi providers can be extended by adding more multi providers to a token, we can consider `NG_VALIDATORS` as a hook to add our own validators. Let's add our validator to the `NG_VALIDATORS` via our new directive: {% highlight js %} {% raw %} import { Directive } from '@angular/core'; import { NG_VALIDATORS } from '@angular/forms'; @Directive({ selector: '[validateEmail][ngModel]', providers: [ { provide: NG_VALIDATORS, useValue: validateEmail, multi: true } ] }) class EmailValidator {} {% endraw %} {% endhighlight %} Again, if you've read our article on multi providers, this should look very familiar to you. We basically add a new value to the `NG_VALIDATORS` token by taking advantage of multi providers. Angular will pick our validator up by injecting what it gets for `NG_VALIDATORS`, and performs validation on a form control. Awesome, we can now use our validator for reactive**and** for template-driven forms! ## Custom Validators with dependencies Sometimes, a custom validator has dependencies so we need a way to inject them. Let's say our email validator needs an `EmailBlackList` service, to check if the given control value is not only a valid email address but also not on our email black list (in an ideal world, we'd build a separate validator for checking against an email black list, but we use that as a motivation for now to have a dependency). **The not-so-nice way** One way to handle this is to create a factory function that returns our `validateEmail` function, which then uses an instance of `EmailBlackList` service. Here's what such a factory function could look like: {% highlight js %} {% raw %} import { FormControl } from '@angular/forms'; function validateEmailFactory(emailBlackList: EmailBlackList) { return (c: FormControl) => { let EMAIL_REGEXP = ... let isValid = /* check validation with emailBlackList */ return isValid ? null : { validateEmail: { valid: false } }; }; } {% endraw %} {% endhighlight %} This would allow us to register our custom validator via dependency injection like this: {% highlight js %} {% raw %} @Directive({ ... providers: [ { provide: NG_VALIDATORS, useFactory: (emailBlackList) => { return validateEmailFactory(emailBlackList); }, deps: [EmailBlackList] multi: true } ] }) class EmailValidator {} {% endraw %} {% endhighlight %} We can't use `useValue` as provider recipe anymore, because we don't want to return the factory function, but rather what the factory function **returns**. And since our factory function has a dependency itself, we need to have access to dependency tokens, which is why we use `useFactory` and `deps`. If this is entirely new to you, you might want to read our article on [Dependency Injection in Angular](/angular/2015/05/18/dependency-injection-in-angular-2.html) before we move on. Even though this would work, it's quite a lot of work and also very verbose. We can do better here. **The better way** Wouldn't it be nice if we could use constructor injection as we're used to it in Angular? Yes, and guess what, Angular has us covered. It turns out that a validator can also be a **class** as long as it implements a `validate(c: FormControl)` method. Why is that nice? Well, we can inject our dependency using constructor injection and don't have to setup a provider factory as we did before. Here's what our `EmailValidator` class would look like when we apply this pattern to it: {% highlight js %} {% raw %} @Directive({ ... }) class EmailValidator { validator: Function; constructor(emailBlackList: EmailBlackList) { this.validator = validateEmailFactory(emailBlackList); } validate(c: FormControl) { return this.validator(c); } } {% endraw %} {% endhighlight %} However, we now need to adjust the provider for `NG_VALIDATORS`, because we want to use an instance of `EmailValidator` to be used for validation, not the factory function. This seems easy to fix, because we know that we create instances of classes for dependency injection using the `useClass` recipe. However, we already added `EmailValidator` to the `directives` property of our component, which is a provider with the `useClass` recipe. We want to make sure that we get the exact same instance of `EmailValidator` on our form control, even though, we define a new provider for it. Luckily we have the `useExisting` recipe for that. `useExisting` defines an alias token for but returns the same instance as the original token: {% highlight js %} {% raw %} @Directive({ ... providers: [ { provide: NG_VALIDATORS, useExisting: EmailValidator, multi: true } ] }) class EmailValidator { ... } {% endraw %} {% endhighlight %} **Yikes! This won't work** . We're referencing a token (`EmailValidator`) which is undefined at the point we're using it because the class definition itself happens later in the code. That's where `forwardRef()` comes into play. {% highlight js %} {% raw %} import { forwardRef } from '@angular/core'; @Directive({ ... providers: [ { provide: NG_VALIDATORS, useExisting: forwardRef(() => EmailValidator), multi: true } ] }) class EmailValidator { ... } {% endraw %} {% endhighlight %} If you don't know what `forwardRef()` does, you might want to read our article on [Forward References in Angular](/angular/2015/09/03/forward-references-in-angular-2.html). Here's the full code for our custom email validator: {% highlight js %} {% raw %} import { Directive, forwardRef } from '@angular/core'; import { NG_VALIDATORS, FormControl } from '@angular/forms'; function validateEmailFactory(emailBlackList: EmailBlackList) { return (c: FormControl) => { let EMAIL_REGEXP = /^[a-z0-9!#$%&'*+\/=?^_`{|}~.-]+@[a-z0-9]([a-z0-9-]*[a-z0-9])?(\.[a-z0-9]([a-z0-9-]*[a-z0-9])?)*$/i; return EMAIL_REGEXP.test(c.value) ? null : { validateEmail: { valid: false } }; }; } @Directive({ selector: '[validateEmail][ngModel],[validateEmail][formControl]', providers: [ { provide: NG_VALIDATORS, useExisting: forwardRef(() => EmailValidator), multi: true } ] }) export class EmailValidator { validator: Function; constructor(emailBlackList: EmailBlackList) { this.validator = validateEmailFactory(emailBlackList); } validate(c: FormControl) { return this.validator(c); } } {% endraw %} {% endhighlight %} You might notice that we've extended the selector, so that our validator not only works with `ngModel` but also with `formControl` directives. If you're interested in more articles on forms in Angular, we've written a couple about [template-driven forms](/angular/2016/03/21/template-driven-forms-in-angular-2.html) and [reactive forms](http://blog.thoughtram.io/angular/2016/06/22/model-driven-forms-in-angular-2.html).
{ "content_hash": "49c0bdfad6fee7a6bda592463fb1bfef", "timestamp": "", "source": "github", "line_count": 452, "max_line_length": 612, "avg_line_length": 38.057522123893804, "alnum_prop": 0.7209626787582839, "repo_name": "thoughtram/blog", "id": "81ee120c8a710b1b60c1e3dc65d572c4cfa4af64", "size": "17202", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2016-03-14-custom-validators-in-angular-2.md", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "520" }, { "name": "HTML", "bytes": "64742" }, { "name": "JavaScript", "bytes": "62722" }, { "name": "SCSS", "bytes": "149841" }, { "name": "Shell", "bytes": "840" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" android:orientation="vertical" android:layout_width="match_parent" android:layout_height="match_parent" android:layout_margin="6dp" android:background="@drawable/card_add_collection"> <LinearLayout android:layout_centerInParent="true" android:layout_width="match_parent" android:layout_height="match_parent" android:orientation="vertical" android:gravity="center"> <TextView android:background="@drawable/round_button" android:layout_width="36dp" android:layout_height="36dp" android:gravity="center" android:text="+" android:textColor="#FFFFFF" android:textSize="24sp" android:id="@+id/button"/> <TextView android:layout_marginTop="8dp" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="컬렉션 만들기" android:textSize="14sp" android:textStyle="bold" android:textColor="#000" android:alpha="0.54"/> </LinearLayout> </RelativeLayout>
{ "content_hash": "a6f991ce381ac69644b6a061a727c0b4", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 62, "avg_line_length": 34.08108108108108, "alnum_prop": 0.6082474226804123, "repo_name": "shygiants/fit_android", "id": "ba452582aac30f67036abd2eb61102ef28971981", "size": "1273", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/layout/card_add_collection.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "158863" } ], "symlink_target": "" }
import { Component, Input } from '@angular/core'; import { DragulaService } from 'ng2-dragula/ng2-dragula'; import { Monitoring } from '../model/monitoring'; import { QuestionBase } from '../model/question-base'; import { DropdownQuestion, DropdownOption } from '../model/question-dropdown'; import { TextboxQuestion } from '../model/question-textbox'; import { LongTextQuestion } from '../model/question-longtext'; @Component({ selector: 'sowp-questions', template: require('./questions.component.html'), }) export class QuestionsComponent { @Input() monitoring: Monitoring; constructor( private _dragula: DragulaService) { this._dragula.setOptions('questions', { moves: function(el, container, handle:HTMLElement) { return handle.classList.contains('dragula-handle'); } }); } addDropdownQuestion() { this.monitoring.questions = [...this.monitoring.questions, new DropdownQuestion()]; } addTextBoxQuestion() { this.monitoring.questions = [...this.monitoring.questions, new TextboxQuestion()]; } addLongTextQuestion() { this.monitoring.questions = [...this.monitoring.questions, new LongTextQuestion()]; } removeQuestion(question: QuestionBase<any>) { var index = this.monitoring.questions.indexOf(question); if(index >= 0) { this.monitoring.questions = [ ...this.monitoring.questions.slice(0, index), ...this.monitoring.questions.slice(index + 1), ]; } } moveQuestion(question: QuestionBase<any>, change: number, ev: MouseEvent) { let index = this.monitoring.questions.indexOf(question); let questions = this.monitoring.questions.slice(); questions[index] = questions[index + change]; questions[index + change] = question; this.monitoring.questions = questions; ev.preventDefault(); } }
{ "content_hash": "2257605608f7cadea7643d250e6584b0", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 91, "avg_line_length": 33.40677966101695, "alnum_prop": 0.6423135464231354, "repo_name": "watchdogpolska/bliski_publikator", "id": "f435d620fbd418f44b8a072537d512142521eea3", "size": "1971", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bliski_publikator/angular2/src/app/editor/questions.component.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "38778" }, { "name": "HTML", "bytes": "109977" }, { "name": "JavaScript", "bytes": "14459" }, { "name": "Python", "bytes": "184036" }, { "name": "TypeScript", "bytes": "38566" } ], "symlink_target": "" }
<?php namespace stlrnz\json\converter\annotation; use Doctrine\Common\Annotations\Annotation\Attribute; use Doctrine\Common\Annotations\Annotation\Attributes; use stlrnz\json\converter\exception\AnnotationException; /** * JsonDate annotation * * @author Stefan Lorenz * @license Apache License 2.0 * @license http://www.apache.org/licenses/LICENSE-2.0 * * @package stlrnz\json\converter\annotation * * * @Annotation * @Target({"PROPERTY"}) * @Attributes({ * @Attribute("format", type="string") * }) */ class JsonDate { /** * @var string */ protected $format = null; /** * @param array $values * @throws AnnotationException */ public function __construct($values) { if(isset($values['format'])) { $this->format = $values['format']; } else { throw new AnnotationException('Annotation JsonDate must define parameter \'format\''); } } /** * @return string */ public function getFormat() { return $this->format; } }
{ "content_hash": "cef8d8817e3c7998d3714d49d924ea75", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 89, "avg_line_length": 16.879310344827587, "alnum_prop": 0.6700715015321757, "repo_name": "stlrnz/PHPJsonConverter", "id": "3588b665300e4b8942ab8cf9c057c720ae1ccd60", "size": "1573", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/stlrnz/json/converter/annotation/JsonDate.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "PHP", "bytes": "139295" } ], "symlink_target": "" }
'use strict'; var fs = require('graceful-fs'); var jasminePit = require('jasmine-pit'); var JasmineReporter = require('./JasmineReporter'); var path = require('path'); var utils = require('../lib/utils'); var JASMINE_PATH = require.resolve('../../vendor/jasmine/jasmine-1.3.0'); var jasmineFileContent = fs.readFileSync(require.resolve(JASMINE_PATH), 'utf8'); var JASMINE_ONLY_ROOT = path.dirname(require.resolve('jasmine-only')); var POTENTIALLY_PRECOMPILED_FILE = path.join( JASMINE_ONLY_ROOT, 'app', 'js', 'jasmine_only.js' ); var COFFEE_SCRIPT_FILE = path.join( JASMINE_ONLY_ROOT, 'app', 'js', 'jasmine_only.coffee' ); var jasmineOnlyContent = fs.existsSync(POTENTIALLY_PRECOMPILED_FILE) ? fs.readFileSync(POTENTIALLY_PRECOMPILED_FILE, 'utf8') : require('coffee-script').compile( fs.readFileSync(COFFEE_SCRIPT_FILE, 'utf8') ); function jasmineTestRunner(config, environment, moduleLoader, testPath) { // Jasmine does stuff with timers that affect running the tests. However, we // also mock out all the timer APIs (to make them test-controllable). // // To account for this conflict, we set up jasmine in an environment with real // timers (instead of mock timers). environment.fakeTimers.runWithRealTimers(function() { // Execute jasmine's main code environment.runSourceText(jasmineFileContent, JASMINE_PATH); // Install jasmine-pit -- because it's amazing jasminePit.install(environment.global); // Install jasmine-only environment.runSourceText(jasmineOnlyContent); // Node must have been run with --harmony in order for WeakMap to be // available prior to version 0.12 if (typeof WeakMap !== 'function') { throw new Error( 'Please run node with the --harmony flag! jest requires WeakMap ' + 'which is only available with the --harmony flag in node < v0.12' ); } // Mainline Jasmine sets __Jasmine_been_here_before__ on each object to // detect cycles, but that doesn't work on frozen objects so we use a // WeakMap instead. var _comparedObjects = new WeakMap(); environment.global.jasmine.Env.prototype.compareObjects_ = function(a, b, mismatchKeys, mismatchValues) { if (_comparedObjects.get(a) === b && _comparedObjects.get(b) === a) { return true; } var areArrays = environment.global.jasmine.isArray_(a) && environment.global.jasmine.isArray_(b); _comparedObjects.set(a, b); _comparedObjects.set(b, a); var hasKey = function(obj, keyName) { return ( obj !== null && obj !== undefined && obj[keyName] !== environment.global.jasmine.undefined ); }; for (var property in b) { if (areArrays && typeof b[property] === 'function') { continue; } if (!hasKey(a, property) && hasKey(b, property)) { mismatchKeys.push( 'expected has key \'' + property + '\', but missing from actual.' ); } } for (property in a) { if (areArrays && typeof a[property] === 'function') { continue; } if (!hasKey(b, property) && hasKey(a, property)) { mismatchKeys.push( 'expected missing key \'' + property + '\', but present in ' + 'actual.' ); } } for (property in b) { // The only different implementation from the original jasmine if (areArrays && (typeof a[property] === 'function' || typeof b[property] === 'function')) { continue; } var areEqual = this.equals_( a[property], b[property], mismatchKeys, mismatchValues ); if (!areEqual) { var aprop; var bprop; if (!a[property]) { aprop = a[property]; } else if (a[property].toString) { aprop = environment.global.jasmine.util.htmlEscape( a[property].toString() ); } else { aprop = Object.prototype.toString.call(a[property]); } if (!b[property]) { bprop = b[property]; } else if (b[property].toString) { bprop = environment.global.jasmine.util.htmlEscape( b[property].toString() ); } else { bprop = Object.prototype.toString.call(b[property]); } mismatchValues.push( '\'' + property + '\' was \'' + bprop + '\' in expected, but was \'' + aprop + '\' in actual.' ); } } if (areArrays && a.length !== b.length) { mismatchValues.push('arrays were not the same length'); } _comparedObjects.delete(a); _comparedObjects.delete(b); return (mismatchKeys.length === 0 && mismatchValues.length === 0); }; if (config.setupTestFrameworkScriptFile) { var setupScriptContent = utils.readAndPreprocessFileContent( config.setupTestFrameworkScriptFile, config ); utils.runContentWithLocalBindings( environment.runSourceText.bind(environment), setupScriptContent, config.setupTestFrameworkScriptFile, { __dirname: path.dirname(config.setupTestFrameworkScriptFile), __filename: config.setupTestFrameworkScriptFile, require: moduleLoader.constructBoundRequire( config.setupTestFrameworkScriptFile ), jest: moduleLoader.getJestRuntime(config.setupTestFrameworkScriptFile) } ); } }); var jasmine = environment.global.jasmine; jasmine.getEnv().beforeEach(function() { this.addMatchers({ toBeCalled: function() { if (this.actual.mock === undefined) { throw Error('toBeCalled() should be used on a mock function'); } return this.actual.mock.calls.length !== 0; }, lastCalledWith: function() { if (this.actual.mock === undefined) { throw Error('lastCalledWith() should be used on a mock function'); } var calls = this.actual.mock.calls; var args = Array.prototype.slice.call(arguments); this.env.currentSpec.expect(calls[calls.length - 1]).toEqual(args); return true; }, toBeCalledWith: function() { if (this.actual.mock === undefined) { throw Error('toBeCalledWith() should be used on a mock function'); } var calls = this.actual.mock.calls; var args = Array.prototype.slice.call(arguments); // Often toBeCalledWith is called on a mock that only has one call, so // we can give a better error message in this case. if (calls.length === 1) { var expect = this.env.currentSpec.expect(calls[0]); if (this.isNot) { expect = expect.not; } expect.toEqual(args); return !this.isNot; } return calls.some(function(call) { return this.env.equals_(call, args); }, this); } }); if (!config.persistModuleRegistryBetweenSpecs) { moduleLoader.requireModule( __filename, 'jest-runtime' ).resetModuleRegistry(); } }); var jasmineReporter = new JasmineReporter({ noHighlight: config.noHighlight, }); jasmine.getEnv().addReporter(jasmineReporter); // Run the test by require()ing it moduleLoader.requireModule(testPath, './' + path.basename(testPath)); jasmine.getEnv().execute(); return jasmineReporter.getResults(); } module.exports = jasmineTestRunner;
{ "content_hash": "27889f40d22933f207de2a4073d7f96a", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 80, "avg_line_length": 32.08163265306123, "alnum_prop": 0.5821882951653944, "repo_name": "WildDylan/ReactNativeDemo", "id": "701771b420a381086b75d8eab8996259a9805693", "size": "8157", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "ReactNativeDemo/ReactNativeDemo/Pods/React/node_modules/jest-cli/src/jasmineTestRunner/jasmineTestRunner.js", "mode": "33188", "license": "mit", "language": [ { "name": "AppleScript", "bytes": "1171" }, { "name": "C", "bytes": "36792" }, { "name": "CSS", "bytes": "1645" }, { "name": "HTML", "bytes": "4770" }, { "name": "JavaScript", "bytes": "783162" }, { "name": "Objective-C", "bytes": "617722" }, { "name": "Shell", "bytes": "5439" } ], "symlink_target": "" }
<?php require_once('SecurityException.php'); /** * * @author Tobias Sarnowski */ class SecurityConfigurationException extends SecurityException { }
{ "content_hash": "c9eecfbd34f76fd0e9414452bb9f76d5", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 64, "avg_line_length": 12.076923076923077, "alnum_prop": 0.7261146496815286, "repo_name": "sarnowski/TypeSafe-security", "id": "d273d7ee2c5b85dc852e600d248ec61505900771", "size": "761", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/php/TypeSafe/security/SecurityConfigurationException.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "PHP", "bytes": "21361" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_60-ea) on Tue Aug 16 17:15:35 EDT 2016 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Package org.wildfly.swarm.netflix.hystrix (Public javadocs 2016.8.1 API)</title> <meta name="date" content="2016-08-16"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Package org.wildfly.swarm.netflix.hystrix (Public javadocs 2016.8.1 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li class="navBarCell1Rev">Use</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2016.8.1</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/netflix/hystrix/package-use.html" target="_top">Frames</a></li> <li><a href="package-use.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Uses of Package org.wildfly.swarm.netflix.hystrix" class="title">Uses of Package<br>org.wildfly.swarm.netflix.hystrix</h1> </div> <div class="contentContainer">No usage of org.wildfly.swarm.netflix.hystrix</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li class="navBarCell1Rev">Use</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2016.8.1</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/netflix/hystrix/package-use.html" target="_top">Frames</a></li> <li><a href="package-use.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2016 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
{ "content_hash": "87a225e1f95dbbf14f851fc85d34fdb7", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 135, "avg_line_length": 35.1484375, "alnum_prop": 0.6159146476994888, "repo_name": "wildfly-swarm/wildfly-swarm-javadocs", "id": "cd4beae50675cf9fc8896811aa92eceb125f1158", "size": "4499", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "2016.8.1/apidocs/org/wildfly/swarm/netflix/hystrix/package-use.html", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
from typing import Tuple from CommonServerPython import * def get_default_from_date(date_range: str) -> str: """ Gets a range string (eg. 30 days) and return a date string in the relevant Demisto query format. :param date_range: string Range (eg. 2 months) to create the date string from :return: string Date string in the relevant Demisto query format, e.g: 2016-01-02T15:04:05Z. """ from_date, _ = parse_date_range(date_range=date_range) str_from_date = from_date.strftime('%Y-%m-%dT%H:%M:%SZ') return str_from_date def get_relevant_incidents(email_to, email_from, from_date) -> Tuple[int, int]: """ Gets a email to and from addresses, and a date from string. :param email_to: string email to address :param email_from: string email from address :param from_date: string Date string in the relevant Demisto query format :return: int, int number of relevant to and from incidents """ resp = demisto.executeCommand("getIncidents", {"query": f"email_to:{email_to} --status:Closed fromdate: {from_date}"}) if isError(resp[0]): raise Exception(resp) email_to_total = demisto.get(resp[0], "Contents.total") resp = demisto.executeCommand("getIncidents", {"query": f"email_from:{email_from} --status:Closed fromdate: {from_date}"}) if isError(resp[0]): raise Exception(resp) email_from_total = demisto.get(resp[0], "Contents.total") return email_to_total, email_from_total def create_widget_entry(email_to, email_from, email_to_total, email_from_total) -> dict: """ Gets a email to and from addresses, and a to and from total incidents number. :param email_to: string email to address :param email_from: string email from address :param email_to_total: int email to relevant total incidents :param email_from_total: int email from relevant total incidents :return: data the relevant bar table """ data = { "Type": 17, "ContentsFormat": "bar", "Contents": { "stats": [ { "data": [ email_to_total ], "groups": None, "name": str(email_to), "label": f"To: {str(email_to)}", "color": "rgb(255, 23, 68)" }, { "data": [ email_from_total ], "groups": None, "name": str(email_from), "label": f"From: {str(email_from)}", "color": "rgb(255, 144, 0)" } ], "params": { "layout": "vertical" } } } return data def main(): try: # Get current incident data email_to = demisto.get(demisto.incidents()[0], 'CustomFields.email_to') email_from = demisto.get(demisto.incidents()[0], 'CustomFields.email_from') if not (email_to and email_from): demisto.results("None") else: default_from_date = get_default_from_date('30 days') email_to_total, email_from_total = get_relevant_incidents(email_to, email_from, default_from_date) data = create_widget_entry(email_to, email_from, email_to_total, email_from_total) demisto.results(data) except Exception as err: return_error(str(err)) if __name__ in ('__main__', '__builtin__', 'builtins'): main()
{ "content_hash": "4b919a81629f882f287b53046f72e907", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 110, "avg_line_length": 33.63636363636363, "alnum_prop": 0.5445945945945946, "repo_name": "VirusTotal/content", "id": "9aba07903344f0a0f197e3adefe8393f63f43786", "size": "3700", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Packs/CommonScripts/Scripts/NumberOfPhishingAttemptPerUser/NumberOfPhishingAttemptPerUser.py", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "2146" }, { "name": "HTML", "bytes": "205901" }, { "name": "JavaScript", "bytes": "1584075" }, { "name": "PowerShell", "bytes": "442288" }, { "name": "Python", "bytes": "47594464" }, { "name": "Rich Text Format", "bytes": "480911" }, { "name": "Shell", "bytes": "108066" }, { "name": "YARA", "bytes": "1185" } ], "symlink_target": "" }
<?php require_once('lib/bootstrap.php'); class RoleTest extends KirbyTestCase { protected $role; public function setUp(): void { $this->role = new Role(array( 'id' => 'testrole', 'name' => 'Test role', 'permissions' => array( '*' => true, 'panel.site.update' => false, 'panel.user.*' => false, 'panel.user.error' => 'this is invalid', 'panel.user.update' => function($user) { switch($user) { case 'testuser': return true; case 'failuser1': return false; case 'failuser2': return 'This user should fail.'; case 'returnerror': return 123; default: throw new Error('Invalid user.'); } } ) )); } public function testMeta() { $this->assertEquals('testrole', $this->role->id()); $this->assertEquals('Test role', $this->role->name()); $this->assertFalse($this->role->isDefault()); $this->role->default = true; $this->assertTrue($this->role->isDefault()); } public function testPermission() { $result = $this->role->permission('testpermission'); $this->assertTrue($result->status()); $this->assertNull($result->message()); $result = $this->role->permission('panel.site.update'); $this->assertFalse($result->status()); $this->assertNull($result->message()); $result = $this->role->permission('panel.user.test'); $this->assertFalse($result->status()); $this->assertNull($result->message()); $result = $this->role->permission('panel.user.update', 'testuser'); $this->assertTrue($result->status()); $this->assertNull($result->message()); $result = $this->role->permission('panel.user.update', 'failuser1'); $this->assertFalse($result->status()); $this->assertNull($result->message()); $result = $this->role->permission('panel.user.update', 'failuser2'); $this->assertFalse($result->status()); $this->assertEquals('This user should fail.', $result->message()); $event = new Kirby\Event('panel.user.update'); $result = $this->role->permission($event, 'failuser2'); $this->assertFalse($result->status()); $this->assertEquals('This user should fail.', $result->message()); } public function testPermissionInvalidEvent() { $this->expectException('Error'); $this->expectExceptionMessage('Invalid event'); $this->role->permission(new Obj()); } public function testPermissionValueError() { $this->expectException('Error'); $this->expectExceptionMessage('Permission panel.user.error of role testrole is invalid.'); $this->role->permission('panel.user.error'); } public function testPermissionCallbackError() { $this->expectException('Error'); $this->expectExceptionMessage('Invalid user.'); $this->role->permission('panel.user.update', 'someotheruser'); } public function testPermissionCallbackReturnError() { $this->expectException('Error'); $this->expectExceptionMessage('Permission panel.user.update of role testrole must return a boolean or error string.'); $this->role->permission('panel.user.update', 'returnerror'); } public function testCan() { $result = $this->role->can('testpermission'); $this->assertTrue($result); $result = $this->role->can('panel.site.update'); $this->assertFalse($result); $result = $this->role->can('panel.user.update', 'testuser'); $this->assertTrue($result); $result = $this->role->can('panel.user.update', 'failuser1'); $this->assertFalse($result); $result = $this->role->can('panel.user.update', 'failuser2'); $this->assertFalse($result); } public function testCannot() { $result = $this->role->cannot('testpermission'); $this->assertFalse($result); $result = $this->role->cannot('panel.site.update'); $this->assertTrue($result); $result = $this->role->cannot('panel.user.update', 'testuser'); $this->assertFalse($result); $result = $this->role->cannot('panel.user.update', 'failuser1'); $this->assertTrue($result); $result = $this->role->cannot('panel.user.update', 'failuser2'); $this->assertTrue($result); } }
{ "content_hash": "00a18fa4592cd585d1b535b391a3d29a", "timestamp": "", "source": "github", "line_count": 136, "max_line_length": 122, "avg_line_length": 31.63235294117647, "alnum_prop": 0.6111111111111112, "repo_name": "AhoyLemon/TheFPlus", "id": "3791d1c4f3b7adbe7996c8e9ba736045d3f96e02", "size": "4302", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "kirby/test/RoleTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "160036" }, { "name": "HTML", "bytes": "49599" }, { "name": "Hack", "bytes": "2356" }, { "name": "JavaScript", "bytes": "472236" }, { "name": "PHP", "bytes": "920421" }, { "name": "Pug", "bytes": "23223" }, { "name": "SCSS", "bytes": "158484" }, { "name": "XSLT", "bytes": "116687" } ], "symlink_target": "" }
package org.elasticsearch.script.expression; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.DoubleValues; /** * A {@link org.apache.lucene.queries.function.FunctionValues} which wrap field data. */ class FieldDataFunctionValues extends DoubleDocValues { DoubleValues dataAccessor; FieldDataFunctionValues(ValueSource parent, AtomicNumericFieldData d) { super(parent); dataAccessor = d.getDoubleValues(); } @Override public double doubleVal(int i) { int numValues = dataAccessor.setDocument(i); if (numValues == 0) { // sparse fields get a value of 0 when the field doesn't exist return 0.0; } return dataAccessor.nextValue(); } }
{ "content_hash": "8b94f39c64fdd4bf5ffb26783d6107ef", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 85, "avg_line_length": 30.566666666666666, "alnum_prop": 0.7175572519083969, "repo_name": "peschlowp/elasticsearch", "id": "ed374908cd6c3c046f28237a7ea38c5d3afd0285", "size": "1705", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "9" }, { "name": "Java", "bytes": "24768875" }, { "name": "Perl", "bytes": "5805" }, { "name": "Python", "bytes": "30533" }, { "name": "Ruby", "bytes": "31602" }, { "name": "Shell", "bytes": "28251" } ], "symlink_target": "" }
#pragma once // ibmxlc_long_double.hpp: nonconstexpr implementation of IEEE-754 long double manipulators // // Copyright (C) 2017-2021 Stillwater Supercomputing, Inc. // // This file is part of the universal numbers project, which is released under an MIT Open Source license. #if defined(__IBMC__) || defined(__IBMCPP__) /* IBM XL C/C++. -------------------------------------------- */ namespace sw { namespace universal { //////////////////////////////////////////////////////////////////////// // numerical helpers ///////////////////////////////////////////////////////////////////////////////////////////////////////// // compiler specific long double IEEE floating point // generate a binary string for a native long double precision IEEE floating point inline std::string to_hex(const long double& number) { return std::string("to_hex() not implemented for IBM compiler"); } // generate a binary string for a native long double precision IEEE floating point inline std::string to_binary(const long double& number, bool bNibbleMarker = false) { return std::string("to_binary() not implemented for IBM compiler"); } // return in triple form (+, scale, fraction) inline std::string to_triple(const long double& number) { return std::string("to_triple() not implemented for IBM compiler"); } inline void extract_fp_components(long double fp, bool& _sign, int& _exponent, float& _fr, uint32_t& _fraction) { std::cerr << "extract_fp_components not implemented for IBM compiler"); } }} // namespace sw::universal #endif // IBM XL C/C++.
{ "content_hash": "ed81b391643bcdc63ec21387bc942e12", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 113, "avg_line_length": 37.853658536585364, "alnum_prop": 0.625, "repo_name": "stillwater-sc/universal", "id": "90c535a293927793f772375ac68a6b235ed3f4df", "size": "1552", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "include/universal/native/nonconstexpr/ibmxlc_long_double.hpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "106542" }, { "name": "C++", "bytes": "10972688" }, { "name": "CMake", "bytes": "104848" }, { "name": "Dockerfile", "bytes": "4577" }, { "name": "Ruby", "bytes": "9935" }, { "name": "Shell", "bytes": "8297" } ], "symlink_target": "" }
SecurityTools ============= Tools for WebPenetration PHP ============= ## 1. AdminFinder.php To Find web administrator zone ## 2. MakeFiles.php To create files ## 3. SimpleUploader.php To upload files ## 4. BypassSafeMode.php To Bypass Safe Mode ## 5. PhpShell.php To exec shell commands ## 6. SymlinkV2.php To make a symlink ## 7. PhpMailer.php Boomber Mailer
{ "content_hash": "6b5e101c556849f283202079c6a159fd", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 32, "avg_line_length": 10.88888888888889, "alnum_prop": 0.6479591836734694, "repo_name": "ralphcorleone/SecurityTools", "id": "21edddd785f7b9093f462dc4fbee01cea949446f", "size": "392", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "25671" } ], "symlink_target": "" }
.PHONY: r d p sh cr cd cp csh lr ld lp lsh config all install clean \ distclean all: r lr lsh ## Load Previous Configuration #################################################################### -include config.mk ## Configurable options ########################################################################### # Directory to store object files, libraries, executables, and dependencies: BUILD_DIR ?= build # Include debug-symbols in release builds MINISATP_RELSYM ?= -g # Sets of compile flags for different build types MINISATP_REL ?= -O3 -D NDEBUG MINISATP_DEB ?= -O0 -D DEBUG MINISATP_PRF ?= -O3 -D NDEBUG MINISATP_FPIC ?= -fpic # GNU Standard Install Variables exec_prefix ?= $(prefix) includedir ?= $(prefix)/include bindir ?= $(exec_prefix)/bin libdir ?= $(exec_prefix)/lib datarootdir ?= $(prefix)/share mandir ?= $(datarootdir)/man # Dependencies MINISAT_INCLUDE?=-I$(includedir) MINISAT_LIB ?=-L$(libdir) -lminisat ## Write Configuration ########################################################################### config: @( echo 'BUILD_DIR?=$(BUILD_DIR)' ; \ echo 'MINISATP_RELSYM?=$(MINISATP_RELSYM)' ; \ echo 'MINISATP_REL?=$(MINISATP_REL)' ; \ echo 'MINISATP_DEB?=$(MINISATP_DEB)' ; \ echo 'MINISATP_PRF?=$(MINISATP_PRF)' ; \ echo 'MINISATP_FPIC?=$(MINISATP_FPIC)' ; \ echo 'MINISAT_INCLUDE?=$(MINISAT_INCLUDE)' ; \ echo 'MINISAT_LIB?=$(MINISAT_LIB)' ; \ echo 'MCL_INCLUDE?=$(MCL_INCLUDE)' ; \ echo 'MCL_LIB?=$(MCL_LIB)' ; \ echo 'prefix?=$(prefix)' ) > config.mk ## Configurable options end ####################################################################### INSTALL ?= install # Target file names MINISATP = minisatp# Name of MiniSat+ main executable. MINISATP_SLIB = libminisatp.a# Name of MiniSat+ static library. MINISATP_DLIB = libminisatp.so# Name of MiniSat+ shared library. # Shared Library Version SOMAJOR=1 SOMINOR=0 SORELEASE?=.0# Declare empty to leave out from library file name. MINISATP_CXXFLAGS = -IADTs -include Global.h -include Main.h -D_FILE_OFFSET_BITS=64 -D __STDC_LIMIT_MACROS -D __STDC_FORMAT_MACROS -Wall -Wno-parentheses -Wextra -Wno-unused-parameter -Wno-unused-function $(MCL_INCLUDE) $(MINISAT_INCLUDE) MINISATP_LDFLAGS = -Wall $(MCL_LIB) $(MINISAT_LIB) #-lz -lgmp ifeq ($(VERB),) ECHO=@ VERB=@ else ECHO=# VERB= endif SRCS = $(wildcard *.cc) $(wildcard ADTs/*.cc) HDRS = $(wildcard *.h) OBJS = $(filter-out %Main.o, $(SRCS:.cc=.o)) r: $(BUILD_DIR)/release/bin/$(MINISATP) d: $(BUILD_DIR)/debug/bin/$(MINISATP) p: $(BUILD_DIR)/profile/bin/$(MINISATP) sh: $(BUILD_DIR)/dynamic/bin/$(MINISATP) lr: $(BUILD_DIR)/release/lib/$(MINISATP_SLIB) ld: $(BUILD_DIR)/debug/lib/$(MINISATP_SLIB) lp: $(BUILD_DIR)/profile/lib/$(MINISATP_SLIB) lsh: $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB).$(SOMAJOR).$(SOMINOR)$(SORELEASE) ## Build-type Compile-flags: $(BUILD_DIR)/release/%.o: MINISATP_CXXFLAGS +=$(MINISATP_REL) $(MINISATP_RELSYM) $(BUILD_DIR)/debug/%.o: MINISATP_CXXFLAGS +=$(MINISATP_DEB) -g $(BUILD_DIR)/profile/%.o: MINISATP_CXXFLAGS +=$(MINISATP_PRF) -pg $(BUILD_DIR)/dynamic/%.o: MINISATP_CXXFLAGS +=$(MINISATP_REL) $(MINISATP_FPIC) ## Build-type Link-flags: $(BUILD_DIR)/profile/bin/$(MINISATP): MINISATP_LDFLAGS += -pg $(BUILD_DIR)/release/bin/$(MINISATP): MINISATP_LDFLAGS += --static $(MINISATP_RELSYM) $(BUILD_DIR)/debug/bin/$(MINISATP): MINISATP_LDFLAGS += --static ## Executable dependencies $(BUILD_DIR)/release/bin/$(MINISATP): $(BUILD_DIR)/release/Main.o $(BUILD_DIR)/release/lib/$(MINISATP_SLIB) $(BUILD_DIR)/debug/bin/$(MINISATP): $(BUILD_DIR)/debug/Main.o $(BUILD_DIR)/debug/lib/$(MINISATP_SLIB) $(BUILD_DIR)/profile/bin/$(MINISATP): $(BUILD_DIR)/profile/Main.o $(BUILD_DIR)/profile/lib/$(MINISATP_SLIB) # need the main-file be compiled with fpic? $(BUILD_DIR)/dynamic/bin/$(MINISATP): $(BUILD_DIR)/dynamic/Main.o $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB) ## Library dependencies $(BUILD_DIR)/release/lib/$(MINISATP_SLIB): $(foreach o,$(OBJS),$(BUILD_DIR)/release/$(o)) $(BUILD_DIR)/debug/lib/$(MINISATP_SLIB): $(foreach o,$(OBJS),$(BUILD_DIR)/debug/$(o)) $(BUILD_DIR)/profile/lib/$(MINISATP_SLIB): $(foreach o,$(OBJS),$(BUILD_DIR)/profile/$(o)) $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB).$(SOMAJOR).$(SOMINOR)$(SORELEASE): $(foreach o,$(OBJS),$(BUILD_DIR)/dynamic/$(o)) ## Compile rules (these should be unified, buit I have not yet found a way which works in GNU Make) $(BUILD_DIR)/release/%.o: %.cc $(ECHO) echo Compiling: $@ $(VERB) mkdir -p $(dir $@) $(dir $(BUILD_DIR)/dep/$*.d) $(VERB) $(CXX) $(MINISATP_CXXFLAGS) $(CXXFLAGS) -c -o $@ $< -MMD -MF $(BUILD_DIR)/dep/$*.d $(BUILD_DIR)/profile/%.o: %.cc $(ECHO) echo Compiling: $@ $(VERB) mkdir -p $(dir $@) $(dir $(BUILD_DIR)/dep/$*.d) $(VERB) $(CXX) $(MINISATP_CXXFLAGS) $(CXXFLAGS) -c -o $@ $< -MMD -MF $(BUILD_DIR)/dep/$*.d $(BUILD_DIR)/debug/%.o: %.cc $(ECHO) echo Compiling: $@ $(VERB) mkdir -p $(dir $@) $(dir $(BUILD_DIR)/dep/$*.d) $(VERB) $(CXX) $(MINISATP_CXXFLAGS) $(CXXFLAGS) -c -o $@ $< -MMD -MF $(BUILD_DIR)/dep/$*.d $(BUILD_DIR)/dynamic/%.o: %.cc $(ECHO) echo Compiling: $@ $(VERB) mkdir -p $(dir $@) $(dir $(BUILD_DIR)/dep/$*.d) $(VERB) $(CXX) $(MINISATP_CXXFLAGS) $(CXXFLAGS) -c -o $@ $< -MMD -MF $(BUILD_DIR)/dep/$*.d ## Linking rule $(BUILD_DIR)/release/bin/$(MINISATP) $(BUILD_DIR)/debug/bin/$(MINISATP) $(BUILD_DIR)/profile/bin/$(MINISATP) $(BUILD_DIR)/dynamic/bin/$(MINISATP): $(ECHO) echo Linking Binary: $@ $(VERB) mkdir -p $(dir $@) $(VERB) $(CXX) $^ $(MINISATP_LDFLAGS) $(LDFLAGS) -o $@ ## Static Library rule %/lib/$(MINISATP_SLIB): $(ECHO) echo Linking Static Library: $@ $(VERB) mkdir -p $(dir $@) $(VERB) $(AR) rcs $@ $^ ## Shared Library rule $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB).$(SOMAJOR).$(SOMINOR)$(SORELEASE)\ $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB).$(SOMAJOR)\ $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB): $(ECHO) echo Linking Shared Library: $@ $(VERB) mkdir -p $(dir $@) $(VERB) $(CXX) $(MINISATP_LDFLAGS) -o $@ -shared -Wl,-soname,$(MINISATP_DLIB).$(SOMAJOR) $^ $(VERB) ln -sf $(MINISATP_DLIB).$(SOMAJOR).$(SOMINOR)$(SORELEASE) $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB).$(SOMAJOR) $(VERB) ln -sf $(MINISATP_DLIB).$(SOMAJOR) $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB) install: install-bin install-bin: $(BUILD_DIR)/release/bin/$(MINISATP) $(INSTALL) -d $(DESTDIR)$(bindir) $(INSTALL) $(BUILD_DIR)/release/bin/$(MINISATP) $(DESTDIR)$(bindir)/ clean: rm -f $(foreach t, release debug profile dynamic, $(foreach o, $(SRCS:.cc=.o), $(BUILD_DIR)/$t/$o)) \ $(foreach d, $(SRCS:.cc=.d), $(BUILD_DIR)/dep/$d) \ $(foreach t, release debug profile dynamic, $(BUILD_DIR)/$t/bin/$(MINISATP)) \ $(foreach t, release debug profile, $(BUILD_DIR)/$t/lib/$(MINISATP_SLIB)) \ $(BUILD_DIR)/dynamic/lib/$(MINISATP_DLIB).$(SOMAJOR).$(SOMINOR)$(SORELEASE) distclean: clean rm -f config.mk ## Include generated dependencies ## NOTE: dependencies are assumed to be the same in all build modes at the moment! -include $(foreach s, $(SRCS:.cc=.d), $(BUILD_DIR)/dep/$s)
{ "content_hash": "49eb39d833b214db3f23423260bf69b8", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 238, "avg_line_length": 41.50574712643678, "alnum_prop": 0.6121572971476046, "repo_name": "meteor/minisatp", "id": "bf1dbed723b6649940f9308e26059a8189b03df3", "size": "7325", "binary": false, "copies": "1", "ref": "refs/heads/emscripten", "path": "Makefile", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "3952" }, { "name": "C++", "bytes": "192500" }, { "name": "JavaScript", "bytes": "3395" }, { "name": "Makefile", "bytes": "7594" }, { "name": "Shell", "bytes": "1588" } ], "symlink_target": "" }
package com.wzc.criminalintent.database; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import com.wzc.criminalintent.database.CrimeDbSchema.CrimeTable; /** * Created by wzc on 2017/7/26. */ public class CrimeBaseHelper extends SQLiteOpenHelper { private static final int VERSION = 1; private static final String DB_NAME = "crimeBase.db"; public CrimeBaseHelper(Context context) { super(context, DB_NAME, null, VERSION); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL("create table " + CrimeTable.NAME + "(" + "_id integer primary key autoincrement, " + CrimeTable.Cols.UUID + ", " + CrimeTable.Cols.TITLE + ", " + CrimeTable.Cols.DATE + ", " + CrimeTable.Cols.SOLVED + ", " + CrimeTable.Cols.SUSPECT + ", " + CrimeTable.Cols.SUSPECTID + ")"); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { } }
{ "content_hash": "a3047df4f8691bd2d7be834bab1f452a", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 78, "avg_line_length": 29.68421052631579, "alnum_prop": 0.62322695035461, "repo_name": "jhwsx/AndroidProgramming", "id": "cc917201e59d6ef07fa5954c47bedd87ed0e9a2d", "size": "1128", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "criminalintent/src/main/java/com/wzc/criminalintent/database/CrimeBaseHelper.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "169304" } ], "symlink_target": "" }
/*! HTML5 Boilerplate v5.1.0 | MIT License | https://html5boilerplate.com/ */ /* * What follows is the result of much research on cross-browser styling. * Credit left inline and big thanks to Nicolas Gallagher, Jonathan Neal, * Kroc Camen, and the H5BP dev community and team. */ /* ========================================================================== Base styles: opinionated defaults ========================================================================== */ html { color: #222; font-size: 1em; line-height: 1.4; } /* * Remove text-shadow in selection highlight: * https://twitter.com/miketaylr/status/12228805301 * * These selection rule sets have to be separate. * Customize the background color to match your design. */ ::-moz-selection { background: #b3d4fc; text-shadow: none; } ::selection { background: #b3d4fc; text-shadow: none; } /* * A better looking default horizontal rule */ hr { display: block; height: 1px; border: 0; border-top: 1px solid #ccc; margin: 1em 0; padding: 0; } /* * Remove the gap between audio, canvas, iframes, * images, videos and the bottom of their containers: * https://github.com/h5bp/html5-boilerplate/issues/440 */ audio, canvas, iframe, img, svg, video { vertical-align: middle; } /* * Remove default fieldset styles. */ fieldset { border: 0; margin: 0; padding: 0; } /* * Allow only vertical resizing of textareas. */ textarea { resize: vertical; } /* ========================================================================== Browser Upgrade Prompt ========================================================================== */ .browserupgrade { margin: 0.2em 0; background: #ccc; color: #000; padding: 0.2em 0; } /* ========================================================================== Author's custom styles ========================================================================== */ /*Font Import*/ @import url(http://fonts.googleapis.com/css?family=Open+Sans:300italic,400italic,600italic,700italic,800italic,400,300,600,700,800); h1 { font-family: 'Open Sans'; font-weight: 800; } p { font-family: 'Open Sans'; font-weight: 400; } /*layout and placement*/ .center { width: 80%; margin-left: auto; margin-right: auto; } @media only screen and (min-device-width : 320px) and (max-device-width : 568px){ h1 { font-family: 'Open Sans', sans-serif; font-weight: 800; } p { font-family: 'Open Sans', sans-serif; font-weight: 400; } } /* ========================================================================== Helper classes ========================================================================== */ /* * Hide visually and from screen readers: */ .hidden { display: none !important; } /* * Hide only visually, but have it available for screen readers: * http://snook.ca/archives/html_and_css/hiding-content-for-accessibility */ .visuallyhidden { border: 0; clip: rect(0 0 0 0); height: 1px; margin: -1px; overflow: hidden; padding: 0; position: absolute; width: 1px; } /* * Extends the .visuallyhidden class to allow the element * to be focusable when navigated to via the keyboard: * https://www.drupal.org/node/897638 */ .visuallyhidden.focusable:active, .visuallyhidden.focusable:focus { clip: auto; height: auto; margin: 0; overflow: visible; position: static; width: auto; } /* * Hide visually and from screen readers, but maintain layout */ .invisible { visibility: hidden; } /* * Clearfix: contain floats * * For modern browsers * 1. The space content is one way to avoid an Opera bug when the * `contenteditable` attribute is included anywhere else in the document. * Otherwise it causes space to appear at the top and bottom of elements * that receive the `clearfix` class. * 2. The use of `table` rather than `block` is only necessary if using * `:before` to contain the top-margins of child elements. */ .clearfix:before, .clearfix:after { content: " "; /* 1 */ display: table; /* 2 */ } .clearfix:after { clear: both; } /* ========================================================================== EXAMPLE Media Queries for Responsive Design. These examples override the primary ('mobile first') styles. Modify as content requires. ========================================================================== */ @media only screen and (min-width: 35em) { /* Style adjustments for viewports that meet the condition */ } @media print, (-webkit-min-device-pixel-ratio: 1.25), (min-resolution: 120dpi) { /* Style adjustments for high resolution devices */ } /* ========================================================================== Print styles. Inlined to avoid the additional HTTP request: http://www.phpied.com/delay-loading-your-print-css/ ========================================================================== */ @media print { *, *:before, *:after { background: transparent !important; color: #000 !important; /* Black prints faster: http://www.sanbeiji.com/archives/953 */ box-shadow: none !important; text-shadow: none !important; } a, a:visited { text-decoration: underline; } a[href]:after { content: " (" attr(href) ")"; } abbr[title]:after { content: " (" attr(title) ")"; } /* * Don't show links that are fragment identifiers, * or use the `javascript:` pseudo protocol */ a[href^="#"]:after, a[href^="javascript:"]:after { content: ""; } pre, blockquote { border: 1px solid #999; page-break-inside: avoid; } /* * Printing Tables: * http://css-discuss.incutio.com/wiki/Printing_Tables */ thead { display: table-header-group; } tr, img { page-break-inside: avoid; } img { max-width: 100% !important; } p, h2, h3 { orphans: 3; widows: 3; } h2, h3 { page-break-after: avoid; } }
{ "content_hash": "4ba88d5241371d870ab50e54cd732993", "timestamp": "", "source": "github", "line_count": 308, "max_line_length": 132, "avg_line_length": 20.58116883116883, "alnum_prop": 0.5220066256507335, "repo_name": "ProKno/prokno.github.io", "id": "325c65754f87b32b04a880971875eb9c1069b121", "size": "6339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "css/main.css", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "14442" }, { "name": "HTML", "bytes": "9429" }, { "name": "JavaScript", "bytes": "760" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <Deployment xmlns="http://schemas.microsoft.com/windowsphone/2012/deployment" AppPlatformVersion="8.0"> <DefaultLanguage xmlns="" code="en-US" /> <App xmlns="" ProductID="{cdc67ef6-fbf5-4e08-93b3-7ad2fd249c74}" Title="Booker" RuntimeType="Silverlight" Version="1.0.0.0" Genre="apps.normal" Author="Booker author" Description="A Small self of Books." Publisher="Booker" PublisherID="{a6e16f82-4649-457c-a45f-b06b9adabfe5}"> <IconPath IsRelative="true" IsResource="false">Assets\ApplicationIcon.png</IconPath> <Capabilities> <Capability Name="ID_CAP_NETWORKING" /> <Capability Name="ID_CAP_MEDIALIB_AUDIO" /> <Capability Name="ID_CAP_MEDIALIB_PLAYBACK" /> <Capability Name="ID_CAP_SENSORS" /> <Capability Name="ID_CAP_WEBBROWSERCOMPONENT" /> </Capabilities> <Tasks> <DefaultTask Name="_default" NavigationPage="MainPage.xaml" /> </Tasks> <Tokens> <PrimaryToken TokenID="BookerToken" TaskName="_default"> <TemplateFlip> <SmallImageURI IsRelative="true" IsResource="false">Assets\Tiles\FlipCycleTileSmall.png</SmallImageURI> <Count>0</Count> <BackgroundImageURI IsRelative="true" IsResource="false">Assets\Tiles\FlipCycleTileMedium.png</BackgroundImageURI> <Title>Booker</Title> <BackContent> </BackContent> <BackBackgroundImageURI> </BackBackgroundImageURI> <BackTitle> </BackTitle> <LargeBackgroundImageURI IsRelative="true" IsResource="false">336x336.png</LargeBackgroundImageURI> <LargeBackContent /> <LargeBackBackgroundImageURI IsRelative="true" IsResource="false"> </LargeBackBackgroundImageURI> <DeviceLockImageURI> </DeviceLockImageURI> <HasLarge>True</HasLarge> </TemplateFlip> </PrimaryToken> </Tokens> <ScreenResolutions> <ScreenResolution Name="ID_RESOLUTION_WVGA" /> <ScreenResolution Name="ID_RESOLUTION_WXGA" /> <ScreenResolution Name="ID_RESOLUTION_HD720P" /> </ScreenResolutions> </App> </Deployment>
{ "content_hash": "42af9a52c9eb36b93915df1c67a8788f", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 278, "avg_line_length": 47.733333333333334, "alnum_prop": 0.6727188081936686, "repo_name": "greenSyntax/BookerWP", "id": "738ee7b40e54c2417080be425e2bc3d9be8504e0", "size": "2150", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Booker/Booker/Properties/WMAppManifest.xml", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "526" }, { "name": "C#", "bytes": "149752" }, { "name": "C++", "bytes": "28065" } ], "symlink_target": "" }
<?php namespace Knob\Models; /** * For who can have images. * * @author José María Valera Reales */ abstract class Image extends ModelBase { /** * Return a list with the sizes of img to delete. */ protected abstract function getImageSizesToDelete(); /** * Set image * * @param string $keyImg Key from the image * @param file $imgFile The image * * @throws \Exception * * @return void|string */ protected function setImage($keyImg, $imgFile) { // If it's false or null we have to remove it from the server if (!$imgFile || is_null($imgFile)) { return $this->removeImage($keyImg); } if (strpos($imgFile['name'], '.php') !== false) { throw new \Exception('For security reasons, the extension ".php" cannot be in your file name.'); } $avatar = wp_handle_upload($_FILES[$keyImg], [ 'mimes' => [ 'jpg|jpeg|jpe' => 'image/jpeg', 'gif' => 'image/gif', 'png' => 'image/png', ], 'test_form' => false, 'unique_filename_callback' => function ($dir, $name, $ext) use ($keyImg) { $name = $base_name = sanitize_file_name($this->user_login . '_' . $keyImg); $number = 1; while (file_exists($dir . "/$name$ext")) { $name = $base_name . '_' . $number; $number++; } return $name . $ext; }, ]); // Remove the last image $this->removeImage($keyImg); $metaValue = []; $url_or_media_id = $avatar['url']; // Set the new image if (is_int($url_or_media_id)) { $metaValue['media_id'] = $url_or_media_id; $url_or_media_id = wp_get_attachment_url($url_or_media_id); } $metaValue['full'] = $url_or_media_id; return update_user_meta($this->ID, $keyImg, $metaValue); } /** * Get the image * * @param string $keyImg Key image * @param int $sizeW weight * @param int $sizeH height * @return string URL to the image */ protected function getImage($keyImg, $sizeW, $sizeH = false) { $sizeH = ($sizeH) ? $sizeH : $sizeW; // fetch local avatar from meta and make sure it's properly ste $local_avatars = get_user_meta($this->ID, $keyImg, true); if (empty($local_avatars['full'])) { return ''; } // generate a new size if (!array_key_exists($sizeW, $local_avatars)) { $local_avatars[$sizeW] = $local_avatars['full']; // just in case of failure elsewhere $upload_path = wp_upload_dir(); // get path for image by converting URL, unless its already been set, thanks to using // media library approach if (!isset($avatar_full_path)) { $avatar_full_path = str_replace($upload_path['baseurl'], $upload_path['basedir'], $local_avatars['full']); } // generate the new size $editor = wp_get_image_editor($avatar_full_path); if (!is_wp_error($editor)) { $resized = $editor->resize($sizeW, $sizeH, true); if (!is_wp_error($resized)) { $dest_file = $editor->generate_filename(); $saved = $editor->save($dest_file); if (!is_wp_error($saved)) { $local_avatars[$sizeW] = str_replace($upload_path['basedir'], $upload_path['baseurl'], $dest_file); } } } // save updated avatar sizes update_user_meta($user_id, $keyImg, $local_avatars); } if ('http' != substr($local_avatars[$sizeW], 0, 4)) { $local_avatars[$sizeW] = home_url($local_avatars[$sizeW]); } return esc_url($local_avatars[$sizeW]); } /** * Remove the image * * @param unknown $keyImg */ private function removeImage($keyImg) { // Save the path in one temporal var $getImagePath = $this->getImagePath($keyImg); $sizes = $this->getImageSizesToDelete(); foreach ($sizes as $size) { $imgPath = $getImagePath['virgen'] . "-{$size}x{$size}" . $getImagePath['ext']; if (file_exists($imgPath)) { unlink($imgPath); } } if (file_exists($getImagePath['base'])) { unlink($getImagePath['base']); } if (file_exists($getImagePath['current'])) { unlink($getImagePath['current']); } // remove his meta info return delete_user_meta($this->ID, $keyImg); } /** * Return the base name of the img and the name of the current img. * * for example [ * 'current' => 'Chemaclass_avatar-26x26.png', * 'base' => 'Chemaclass_avatar.png', * 'virgen' => 'Chemaclass_avatar', * 'ext' => '.png', * ]; * * @return array<string> List with the current name, base, virgen and extension of the image */ private function getImagePath($keyImg) { $upload_path = wp_upload_dir(); $img = $this->getImage($keyImg, User::AVATAR_SIZE_ICO); $path = str_replace($upload_path['baseurl'], $upload_path['basedir'], $img); $current = $base = basename($path); if (strpos($base, '-') !== false) { preg_match('/\.[^\.]+$/i', $current, $ext); $_base = substr($base, 0, strpos($base, "-")) . $ext[0]; $pathBase = str_replace($current, $_base, $path); } // and the virgen path $_base_virgen = substr($base, 0, strpos($base, "-")); $virgen = str_replace($current, $_base_virgen, $path); return [ 'current' => $path, 'base' => $pathBase, 'virgen' => $virgen, 'ext' => $ext[0], ]; } }
{ "content_hash": "689b9e67ec24d0ca4c3efe63d36fc02d", "timestamp": "", "source": "github", "line_count": 180, "max_line_length": 110, "avg_line_length": 34.266666666666666, "alnum_prop": 0.49238002594033725, "repo_name": "Chemaclass/knob-base", "id": "1c169465c12d1fa73f30ea07bbf67c636227c8ff", "size": "6414", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/models/Image.php", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "437" }, { "name": "PHP", "bytes": "133961" } ], "symlink_target": "" }
<?php namespace Proyecto\ExtensionBundle\Entity; use Doctrine\ORM\Mapping as ORM; /** * Proyecto\ExtensionBundle\Entity\Extension * * @ORM\Table() * @ORM\Entity(repositoryClass="Proyecto\ExtensionBundle\Entity\ExtensionRepository") */ class Extension { /** * @var integer $id * * @ORM\Column(name="id", type="integer") * @ORM\Id * @ORM\GeneratedValue(strategy="AUTO") */ private $id; /** * @ORM\ManyToOne(targetEntity="Proyecto", inversedBy="extensiones") * @ORM\JoinColumn(name="proyecto_id", referencedColumnName="id") * @return integer */ private $proyecto; /** * @ORM\ManyToOne(targetEntity="Periodo",cascade={"all"}) * @ORM\JoinColumn(name="periodo_id", referencedColumnName="id") * @return integer */ private $periodo; /** * @ORM\ManyToOne(targetEntity="Lugar") * @ORM\JoinColumn(name="lugar_id", referencedColumnName="id") * @return integer */ private $lugar; /** * Get id * * @return integer */ public function getId() { return $this->id; } /** * * @param \Proyecto\ExtensionBundle\Entity\Proyecto $proyecto */ public function setProyecto(\Proyecto\ExtensionBundle\Entity\Proyecto $proyecto) { $this->proyecto = $proyecto; } /** * * @return type */ public function getProyecto() { return $this->proyecto; } /** * * @param \Proyecto\ExtensionBundle\Entity\Periodo $periodo */ public function setPeriodo(\Proyecto\ExtensionBundle\Entity\Periodo $periodo) { $this->periodo = $periodo; } /** * * @return type */ public function getPeriodo() { return $this->periodo; } /** * * @param \Proyecto\ExtensionBundle\Entity\Lugar $lugar */ public function setLugar(\Proyecto\ExtensionBundle\Entity\Lugar $lugar) { $this->lugar = $lugar; } /** * * @return type */ public function getLugar() { return $this->lugar; } /** * * @return type */ public function getLink() { return $this->getProyecto()->getLink(); } }
{ "content_hash": "3432f50b0e6c8618912193cc7c21c14d", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 86, "avg_line_length": 20.427272727272726, "alnum_prop": 0.5683133066310636, "repo_name": "luchocapo1077/Proyecto", "id": "d70c4bdc9eefbc60f366cdb28f439e08185fa0d1", "size": "2247", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Proyecto/ExtensionBundle/Entity/Extension.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "125423" } ], "symlink_target": "" }
package org.sleuthkit.autopsy.modules.embeddedfileextractor; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.lang.IllegalArgumentException; import java.lang.IndexOutOfBoundsException; import java.lang.NullPointerException; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import org.apache.poi.POIXMLException; import org.apache.poi.hwpf.usermodel.Picture; import org.apache.poi.hslf.usermodel.HSLFPictureData; import org.apache.poi.hslf.usermodel.HSLFSlideShow; import org.apache.poi.hssf.record.RecordInputStream.LeftoverDataException; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.hwpf.HWPFDocument; import org.apache.poi.hwpf.model.PicturesTable; import org.apache.poi.sl.usermodel.PictureData.PictureType; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.util.RecordFormatException; import org.apache.poi.xslf.usermodel.XMLSlideShow; import org.apache.poi.xslf.usermodel.XSLFPictureData; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.apache.poi.xwpf.usermodel.XWPFDocument; import org.apache.poi.xwpf.usermodel.XWPFPictureData; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.EncodedFileOutputStream; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; class ImageExtractor { private final FileManager fileManager; private final IngestServices services; private static final Logger logger = Logger.getLogger(ImageExtractor.class.getName()); private final IngestJobContext context; private String parentFileName; private final String UNKNOWN_NAME_PREFIX = "image_"; //NON-NLS private final FileTypeDetector fileTypeDetector; private String moduleDirRelative; private String moduleDirAbsolute; /** * Enum of mimetypes which support image extraction */ enum SupportedImageExtractionFormats { DOC("application/msword"), //NON-NLS DOCX("application/vnd.openxmlformats-officedocument.wordprocessingml.document"), //NON-NLS PPT("application/vnd.ms-powerpoint"), //NON-NLS PPTX("application/vnd.openxmlformats-officedocument.presentationml.presentation"), //NON-NLS XLS("application/vnd.ms-excel"), //NON-NLS XLSX("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"); //NON-NLS private final String mimeType; SupportedImageExtractionFormats(final String mimeType) { this.mimeType = mimeType; } @Override public String toString() { return this.mimeType; } // TODO Expand to support more formats } private SupportedImageExtractionFormats abstractFileExtractionFormat; ImageExtractor(IngestJobContext context, FileTypeDetector fileTypeDetector, String moduleDirRelative, String moduleDirAbsolute) { this.fileManager = Case.getCurrentCase().getServices().getFileManager(); this.services = IngestServices.getInstance(); this.context = context; this.fileTypeDetector = fileTypeDetector; this.moduleDirRelative = moduleDirRelative; this.moduleDirAbsolute = moduleDirAbsolute; } /** * This method returns true if the file format is currently supported. Else * it returns false. Performs only Apache Tika based detection. * * @param abstractFile The AbstractFilw whose mimetype is to be determined. * * @return This method returns true if the file format is currently * supported. Else it returns false. */ boolean isImageExtractionSupported(AbstractFile abstractFile) { try { String abstractFileMimeType = fileTypeDetector.getFileType(abstractFile); for (SupportedImageExtractionFormats s : SupportedImageExtractionFormats.values()) { if (s.toString().equals(abstractFileMimeType)) { abstractFileExtractionFormat = s; return true; } } return false; } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error executing FileTypeDetector.getFileType()", ex); // NON-NLS return false; } } /** * This method selects the appropriate process of extracting images from * files using POI classes. Once the images have been extracted, the method * adds them to the DB and fires a ModuleContentEvent. ModuleContent Event * is not fired if the no images were extracted from the processed file. * * @param format * @param abstractFile The abstract file to be processed. */ void extractImage(AbstractFile abstractFile) { // // switchcase for different supported formats // process abstractFile according to the format by calling appropriate methods. List<ExtractedImage> listOfExtractedImages = null; List<AbstractFile> listOfExtractedImageAbstractFiles = null; this.parentFileName = EmbeddedFileExtractorIngestModule.getUniqueName(abstractFile); //check if already has derived files, skip try { if (abstractFile.hasChildren()) { //check if local unpacked dir exists if (new File(getOutputFolderPath(parentFileName)).exists()) { logger.log(Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: {0}", abstractFile.getName()); //NON-NLS return; } } } catch (TskCoreException e) { logger.log(Level.SEVERE, String.format("Error checking if file already has been processed, skipping: %s", parentFileName), e); //NON-NLS return; } switch (abstractFileExtractionFormat) { case DOC: listOfExtractedImages = extractImagesFromDoc(abstractFile); break; case DOCX: listOfExtractedImages = extractImagesFromDocx(abstractFile); break; case PPT: listOfExtractedImages = extractImagesFromPpt(abstractFile); break; case PPTX: listOfExtractedImages = extractImagesFromPptx(abstractFile); break; case XLS: listOfExtractedImages = extractImagesFromXls(abstractFile); break; case XLSX: listOfExtractedImages = extractImagesFromXlsx(abstractFile); break; default: break; } if (listOfExtractedImages == null) { return; } // the common task of adding abstractFile to derivedfiles is performed. listOfExtractedImageAbstractFiles = new ArrayList<>(); for (ExtractedImage extractedImage : listOfExtractedImages) { try { listOfExtractedImageAbstractFiles.add(fileManager.addDerivedFile(extractedImage.getFileName(), extractedImage.getLocalPath(), extractedImage.getSize(), extractedImage.getCtime(), extractedImage.getCrtime(), extractedImage.getAtime(), extractedImage.getAtime(), true, abstractFile, null, EmbeddedFileExtractorModuleFactory.getModuleName(), null, null, TskData.EncodingType.XOR1)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.extractImage.addToDB.exception.msg"), ex); //NON-NLS } } if (!listOfExtractedImages.isEmpty()) { services.fireModuleContentEvent(new ModuleContentEvent(abstractFile)); context.addFilesToJob(listOfExtractedImageAbstractFiles); } } /** * Extract images from doc format files. * * @param af the file from which images are to be extracted. * * @return list of extracted images. Returns null in case no images were * extracted. */ private List<ExtractedImage> extractImagesFromDoc(AbstractFile af) { List<Picture> listOfAllPictures; try { HWPFDocument doc = new HWPFDocument(new ReadContentInputStream(af)); PicturesTable pictureTable = doc.getPicturesTable(); listOfAllPictures = pictureTable.getAllPictures(); } catch (IOException | IllegalArgumentException | IndexOutOfBoundsException | NullPointerException ex) { // IOException: // Thrown when the document has issues being read. // IllegalArgumentException: // This will catch OldFileFormatException, which is thrown when the // document's format is Word 95 or older. Alternatively, this is // thrown when attempting to load an RTF file as a DOC file. // However, our code verifies the file format before ever running it // through the ImageExtractor. This exception gets thrown in the // "IN10-0137.E01" image regardless. The reason is unknown. // IndexOutOfBoundsException: // NullPointerException: // These get thrown in certain images. The reason is unknown. It is // likely due to problems with the file formats that POI is poorly // handling. return null; } catch (Throwable ex) { // instantiating POI containers throw RuntimeExceptions logger.log(Level.SEVERE, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.docContainer.init.err", af.getName()), ex); //NON-NLS return null; } String outputFolderPath; if (listOfAllPictures.isEmpty()) { return null; } else { outputFolderPath = getOutputFolderPath(this.parentFileName); } if (outputFolderPath == null) { return null; } List<ExtractedImage> listOfExtractedImages = new ArrayList<>(); byte[] data = null; for (Picture picture : listOfAllPictures) { String fileName = picture.suggestFullFileName(); try { data = picture.getContent(); } catch (Exception ex) { return null; } writeExtractedImage(Paths.get(outputFolderPath, fileName).toString(), data); // TODO Extract more info from the Picture viz ctime, crtime, atime, mtime listOfExtractedImages.add(new ExtractedImage(fileName, getFileRelativePath(fileName), picture.getSize(), af)); } return listOfExtractedImages; } /** * Extract images from docx format files. * * @param af the file from which images are to be extracted. * * @return list of extracted images. Returns null in case no images were * extracted. */ private List<ExtractedImage> extractImagesFromDocx(AbstractFile af) { List<XWPFPictureData> listOfAllPictures = null; try { XWPFDocument docx = new XWPFDocument(new ReadContentInputStream(af)); listOfAllPictures = docx.getAllPictures(); } catch (POIXMLException | IOException ex) { // POIXMLException: // Thrown when document fails to load // IOException: // Thrown when the document has issues being read. return null; } catch (Throwable ex) { // instantiating POI containers throw RuntimeExceptions logger.log(Level.SEVERE, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.docxContainer.init.err", af.getName()), ex); //NON-NLS return null; } // if no images are extracted from the PPT, return null, else initialize // the output folder for image extraction. String outputFolderPath; if (listOfAllPictures.isEmpty()) { return null; } else { outputFolderPath = getOutputFolderPath(this.parentFileName); } if (outputFolderPath == null) { return null; } List<ExtractedImage> listOfExtractedImages = new ArrayList<>(); byte[] data = null; for (XWPFPictureData xwpfPicture : listOfAllPictures) { String fileName = xwpfPicture.getFileName(); try { data = xwpfPicture.getData(); } catch (Exception ex) { return null; } writeExtractedImage(Paths.get(outputFolderPath, fileName).toString(), data); listOfExtractedImages.add(new ExtractedImage(fileName, getFileRelativePath(fileName), xwpfPicture.getData().length, af)); } return listOfExtractedImages; } /** * Extract images from ppt format files. * * @param af the file from which images are to be extracted. * * @return list of extracted images. Returns null in case no images were * extracted. */ private List<ExtractedImage> extractImagesFromPpt(AbstractFile af) { List<HSLFPictureData> listOfAllPictures = null; try { HSLFSlideShow ppt = new HSLFSlideShow(new ReadContentInputStream(af)); listOfAllPictures = ppt.getPictureData(); } catch (IOException | IllegalArgumentException | IndexOutOfBoundsException ex) { // IllegalArgumentException: // This will catch OldFileFormatException, which is thrown when the // document version is unsupported. The IllegalArgumentException may // also get thrown for unknown reasons. // IOException: // Thrown when the document has issues being read. // IndexOutOfBoundsException: // This gets thrown in certain images. The reason is unknown. It is // likely due to problems with the file formats that POI is poorly // handling. return null; } catch (Throwable ex) { // instantiating POI containers throw RuntimeExceptions logger.log(Level.SEVERE, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.pptContainer.init.err", af.getName()), ex); //NON-NLS return null; } // if no images are extracted from the PPT, return null, else initialize // the output folder for image extraction. String outputFolderPath; if (listOfAllPictures.isEmpty()) { return null; } else { outputFolderPath = getOutputFolderPath(this.parentFileName); } if (outputFolderPath == null) { return null; } // extract the images to the above initialized outputFolder. // extraction path - outputFolder/image_number.ext int i = 0; List<ExtractedImage> listOfExtractedImages = new ArrayList<>(); byte[] data = null; for (HSLFPictureData pictureData : listOfAllPictures) { // Get image extension, generate image name, write image to the module // output folder, add it to the listOfExtractedImageAbstractFiles PictureType type = pictureData.getType(); String ext; switch (type) { case JPEG: ext = ".jpg"; //NON-NLS break; case PNG: ext = ".png"; //NON-NLS break; case WMF: ext = ".wmf"; //NON-NLS break; case EMF: ext = ".emf"; //NON-NLS break; case PICT: ext = ".pict"; //NON-NLS break; default: continue; } String imageName = UNKNOWN_NAME_PREFIX + i + ext; //NON-NLS try { data = pictureData.getData(); } catch (Exception ex) { return null; } writeExtractedImage(Paths.get(outputFolderPath, imageName).toString(), data); listOfExtractedImages.add(new ExtractedImage(imageName, getFileRelativePath(imageName), pictureData.getData().length, af)); i++; } return listOfExtractedImages; } /** * Extract images from pptx format files. * * @param af the file from which images are to be extracted. * * @return list of extracted images. Returns null in case no images were * extracted. */ private List<ExtractedImage> extractImagesFromPptx(AbstractFile af) { List<XSLFPictureData> listOfAllPictures = null; try { XMLSlideShow pptx = new XMLSlideShow(new ReadContentInputStream(af)); listOfAllPictures = pptx.getPictureData(); } catch (POIXMLException | IOException ex) { // POIXMLException: // Thrown when document fails to load. // IOException: // Thrown when the document has issues being read return null; } catch (Throwable ex) { // instantiating POI containers throw RuntimeExceptions logger.log(Level.SEVERE, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.pptxContainer.init.err", af.getName()), ex); //NON-NLS return null; } // if no images are extracted from the PPT, return null, else initialize // the output folder for image extraction. String outputFolderPath; if (listOfAllPictures.isEmpty()) { return null; } else { outputFolderPath = getOutputFolderPath(this.parentFileName); } if (outputFolderPath == null) { return null; } List<ExtractedImage> listOfExtractedImages = new ArrayList<>(); byte[] data = null; for (XSLFPictureData xslsPicture : listOfAllPictures) { // get image file name, write it to the module outputFolder, and add // it to the listOfExtractedImageAbstractFiles. String fileName = xslsPicture.getFileName(); try { data = xslsPicture.getData(); } catch (Exception ex) { return null; } writeExtractedImage(Paths.get(outputFolderPath, fileName).toString(), data); listOfExtractedImages.add(new ExtractedImage(fileName, getFileRelativePath(fileName), xslsPicture.getData().length, af)); } return listOfExtractedImages; } /** * Extract images from xls format files. * * @param af the file from which images are to be extracted. * * @return list of extracted images. Returns null in case no images were * extracted. */ private List<ExtractedImage> extractImagesFromXls(AbstractFile af) { List<? extends org.apache.poi.ss.usermodel.PictureData> listOfAllPictures = null; try { Workbook xls = new HSSFWorkbook(new ReadContentInputStream(af)); listOfAllPictures = xls.getAllPictures(); } catch (IOException | LeftoverDataException | RecordFormatException | IllegalArgumentException | IndexOutOfBoundsException ex) { // IllegalArgumentException: // This will catch OldFileFormatException, which is thrown when the // document version is unsupported. The IllegalArgumentException may // also get thrown for unknown reasons. // IOException: // Thrown when the document has issues being read. // LeftoverDataException: // This is thrown for poorly formatted files that have more data // than expected. // RecordFormatException: // This is thrown for poorly formatted files that have less data // that expected. // IllegalArgumentException: // IndexOutOfBoundsException: // These get thrown in certain images. The reason is unknown. It is // likely due to problems with the file formats that POI is poorly // handling. return null; } catch (Throwable ex) { // instantiating POI containers throw RuntimeExceptions logger.log(Level.SEVERE, String.format("%s%s", NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.xlsContainer.init.err", af.getName()), af.getName()), ex); //NON-NLS return null; } // if no images are extracted from the PPT, return null, else initialize // the output folder for image extraction. String outputFolderPath; if (listOfAllPictures.isEmpty()) { return null; } else { outputFolderPath = getOutputFolderPath(this.parentFileName); } if (outputFolderPath == null) { return null; } int i = 0; List<ExtractedImage> listOfExtractedImages = new ArrayList<>(); byte[] data = null; for (org.apache.poi.ss.usermodel.PictureData pictureData : listOfAllPictures) { String imageName = UNKNOWN_NAME_PREFIX + i + "." + pictureData.suggestFileExtension(); //NON-NLS try { data = pictureData.getData(); } catch (Exception ex) { return null; } writeExtractedImage(Paths.get(outputFolderPath, imageName).toString(), data); listOfExtractedImages.add(new ExtractedImage(imageName, getFileRelativePath(imageName), pictureData.getData().length, af)); i++; } return listOfExtractedImages; } /** * Extract images from xlsx format files. * * @param af the file from which images are to be extracted. * * @return list of extracted images. Returns null in case no images were * extracted. */ private List<ExtractedImage> extractImagesFromXlsx(AbstractFile af) { List<? extends org.apache.poi.ss.usermodel.PictureData> listOfAllPictures = null; try { Workbook xlsx = new XSSFWorkbook(new ReadContentInputStream(af)); listOfAllPictures = xlsx.getAllPictures(); } catch (POIXMLException | IOException ex) { // POIXMLException: // Thrown when document fails to load. // IOException: // Thrown when the document has issues being read return null; } catch (Throwable ex) { // instantiating POI containers throw RuntimeExceptions logger.log(Level.SEVERE, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.xlsxContainer.init.err", af.getName()), ex); //NON-NLS return null; } // if no images are extracted from the PPT, return null, else initialize // the output folder for image extraction. String outputFolderPath; if (listOfAllPictures.isEmpty()) { return null; } else { outputFolderPath = getOutputFolderPath(this.parentFileName); } if (outputFolderPath == null) { return null; } int i = 0; List<ExtractedImage> listOfExtractedImages = new ArrayList<>(); byte[] data = null; for (org.apache.poi.ss.usermodel.PictureData pictureData : listOfAllPictures) { String imageName = UNKNOWN_NAME_PREFIX + i + "." + pictureData.suggestFileExtension(); try { data = pictureData.getData(); } catch (Exception ex) { return null; } writeExtractedImage(Paths.get(outputFolderPath, imageName).toString(), data); listOfExtractedImages.add(new ExtractedImage(imageName, getFileRelativePath(imageName), pictureData.getData().length, af)); i++; } return listOfExtractedImages; } /** * Writes image to the module output location. * * @param outputPath Path where images is written * @param data byte representation of the data to be written to the * specified location. */ private void writeExtractedImage(String outputPath, byte[] data) { try (EncodedFileOutputStream fos = new EncodedFileOutputStream(new FileOutputStream(outputPath), TskData.EncodingType.XOR1)) { fos.write(data); } catch (IOException ex) { logger.log(Level.WARNING, "Could not write to the provided location: " + outputPath, ex); //NON-NLS } } /** * Gets path to the output folder for image extraction. If the path does not * exist, it is created. * * @param parentFileName name of the abstract file being processed for image * extraction. * * @return path to the image extraction folder for a given abstract file. */ private String getOutputFolderPath(String parentFileName) { String outputFolderPath = moduleDirAbsolute + File.separator + parentFileName; File outputFilePath = new File(outputFolderPath); if (!outputFilePath.exists()) { try { outputFilePath.mkdirs(); } catch (SecurityException ex) { logger.log(Level.WARNING, NbBundle.getMessage(this.getClass(), "EmbeddedFileExtractorIngestModule.ImageExtractor.getOutputFolderPath.exception.msg", parentFileName), ex); return null; } } return outputFolderPath; } /** * Gets the relative path to the file. The path is relative to the case * folder. * * @param fileName name of the the file for which the path is to be * generated. * * @return */ private String getFileRelativePath(String fileName) { // Used explicit FWD slashes to maintain DB consistency across operating systems. return "/" + moduleDirRelative + "/" + this.parentFileName + "/" + fileName; //NON-NLS } /** * Represents the image extracted using POI methods. Currently, POI is not * capable of extracting ctime, crtime, mtime, and atime; these values are * set to 0. */ private static class ExtractedImage { //String fileName, String localPath, long size, long ctime, long crtime, //long atime, long mtime, boolean isFile, AbstractFile parentFile, String rederiveDetails, String toolName, String toolVersion, String otherDetails private final String fileName; private final String localPath; private final long size; private final long ctime; private final long crtime; private final long atime; private final long mtime; private final AbstractFile parentFile; ExtractedImage(String fileName, String localPath, long size, AbstractFile parentFile) { this(fileName, localPath, size, 0, 0, 0, 0, parentFile); } ExtractedImage(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, AbstractFile parentFile) { this.fileName = fileName; this.localPath = localPath; this.size = size; this.ctime = ctime; this.crtime = crtime; this.atime = atime; this.mtime = mtime; this.parentFile = parentFile; } public String getFileName() { return fileName; } public String getLocalPath() { return localPath; } public long getSize() { return size; } public long getCtime() { return ctime; } public long getCrtime() { return crtime; } public long getAtime() { return atime; } public long getMtime() { return mtime; } public AbstractFile getParentFile() { return parentFile; } } }
{ "content_hash": "ba8fb27edd4e0eb7f509995741655ca7", "timestamp": "", "source": "github", "line_count": 715, "max_line_length": 214, "avg_line_length": 40.73006993006993, "alnum_prop": 0.6213515555250326, "repo_name": "narfindustries/autopsy", "id": "d58d935e976b58a7609642b92cc9f30e88ea7835", "size": "29808", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/ImageExtractor.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "4467" }, { "name": "HTML", "bytes": "9201" }, { "name": "Java", "bytes": "8824279" }, { "name": "Python", "bytes": "273829" } ], "symlink_target": "" }
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #ifndef BITCOIN_MAIN_H #define BITCOIN_MAIN_H #include "bignum.h" #include "sync.h" #include "net.h" #include "script.h" #include "scrypt.h" #include <list> class CWallet; class CBlock; class CBlockIndex; class CKeyItem; class CReserveKey; class CAddress; class CInv; class CNode; struct CBlockIndexWorkComparator; /** The maximum allowed size for a serialized block, in bytes (network rule) */ static const unsigned int MAX_BLOCK_SIZE = 1000000; // 1000KB block hard limit /** Obsolete: maximum size for mined blocks */ static const unsigned int MAX_BLOCK_SIZE_GEN = MAX_BLOCK_SIZE/4; // 250KB block soft limit /** Default for -blockmaxsize, maximum size for mined blocks **/ static const unsigned int DEFAULT_BLOCK_MAX_SIZE = 250000; /** Default for -blockprioritysize, maximum space for zero/low-fee transactions **/ static const unsigned int DEFAULT_BLOCK_PRIORITY_SIZE = 17000; /** The maximum size for transactions we're willing to relay/mine */ static const unsigned int MAX_STANDARD_TX_SIZE = 100000; /** The maximum allowed number of signature check operations in a block (network rule) */ static const unsigned int MAX_BLOCK_SIGOPS = MAX_BLOCK_SIZE/50; /** The maximum number of orphan transactions kept in memory */ static const unsigned int MAX_ORPHAN_TRANSACTIONS = MAX_BLOCK_SIZE/100; /** The maximum number of entries in an 'inv' protocol message */ static const unsigned int MAX_INV_SZ = 50000; /** The maximum size of a blk?????.dat file (since 0.8) */ static const unsigned int MAX_BLOCKFILE_SIZE = 0x8000000; // 128 MiB /** The pre-allocation chunk size for blk?????.dat files (since 0.8) */ static const unsigned int BLOCKFILE_CHUNK_SIZE = 0x1000000; // 16 MiB /** The pre-allocation chunk size for rev?????.dat files (since 0.8) */ static const unsigned int UNDOFILE_CHUNK_SIZE = 0x100000; // 1 MiB /** Fake height value used in CCoins to signify they are only in the memory pool (since 0.8) */ static const unsigned int MEMPOOL_HEIGHT = 0x7FFFFFFF; /** Dust Soft Limit, allowed with additional fee per output */ static const int64 DUST_SOFT_LIMIT = 100000; // 0.001 RPC /** Dust Hard Limit, ignored as wallet inputs (mininput default) */ static const int64 DUST_HARD_LIMIT = 1000; // 0.00001 RPC mininput /** No amount larger than this (in satoshi) is valid */ static const int64 MAX_MONEY = 2100000 * COIN; inline bool MoneyRange(int64 nValue) { return (nValue >= 0 && nValue <= MAX_MONEY); } /** Coinbase transaction outputs can only be spent after this number of new blocks (network rule) */ static const int COINBASE_MATURITY = 100; /** Threshold for nLockTime: below this value it is interpreted as block number, otherwise as UNIX timestamp. */ static const unsigned int LOCKTIME_THRESHOLD = 500000000; // Tue Nov 5 00:53:20 1985 UTC /** Maximum number of script-checking threads allowed */ static const int MAX_SCRIPTCHECK_THREADS = 16; #ifdef USE_UPNP static const int fHaveUPnP = true; #else static const int fHaveUPnP = false; #endif extern CScript COINBASE_FLAGS; extern CCriticalSection cs_main; extern std::map<uint256, CBlockIndex*> mapBlockIndex; extern std::set<CBlockIndex*, CBlockIndexWorkComparator> setBlockIndexValid; extern uint256 hashGenesisBlock; extern CBlockIndex* pindexGenesisBlock; extern int nBestHeight; extern uint256 nBestChainWork; extern uint256 nBestInvalidWork; extern uint256 hashBestChain; extern CBlockIndex* pindexBest; extern unsigned int nTransactionsUpdated; extern uint64 nLastBlockTx; extern uint64 nLastBlockSize; extern const std::string strMessageMagic; extern double dHashesPerSec; extern int64 nHPSTimerStart; extern int64 nTimeBestReceived; extern CCriticalSection cs_setpwalletRegistered; extern std::set<CWallet*> setpwalletRegistered; extern std::map<uint256, CBlock*> mapOrphanBlocks; extern unsigned char pchMessageStart[4]; extern bool fImporting; extern bool fReindex; extern bool fBenchmark; extern int nScriptCheckThreads; extern bool fTxIndex; extern unsigned int nCoinCacheSize; // Settings extern int64 nTransactionFee; extern int64 nMinimumInputValue; // Minimum disk space required - used in CheckDiskSpace() static const uint64 nMinDiskSpace = 52428800; class CReserveKey; class CCoinsDB; class CBlockTreeDB; struct CDiskBlockPos; class CCoins; class CTxUndo; class CCoinsView; class CCoinsViewCache; class CScriptCheck; class CValidationState; struct CBlockTemplate; /** Register a wallet to receive updates from core */ void RegisterWallet(CWallet* pwalletIn); /** Unregister a wallet from core */ void UnregisterWallet(CWallet* pwalletIn); /** Push an updated transaction to all registered wallets */ void SyncWithWallets(const uint256 &hash, const CTransaction& tx, const CBlock* pblock = NULL, bool fUpdate = false); /** Process an incoming block */ bool ProcessBlock(CValidationState &state, CNode* pfrom, CBlock* pblock, CDiskBlockPos *dbp = NULL); /** Check whether enough disk space is available for an incoming block */ bool CheckDiskSpace(uint64 nAdditionalBytes = 0); /** Open a block file (blk?????.dat) */ FILE* OpenBlockFile(const CDiskBlockPos &pos, bool fReadOnly = false); /** Open an undo file (rev?????.dat) */ FILE* OpenUndoFile(const CDiskBlockPos &pos, bool fReadOnly = false); /** Import blocks from an external file */ bool LoadExternalBlockFile(FILE* fileIn, CDiskBlockPos *dbp = NULL); /** Initialize a new block tree database + block data on disk */ bool InitBlockIndex(); /** Load the block tree and coins database from disk */ bool LoadBlockIndex(); /** Unload database information */ void UnloadBlockIndex(); /** Verify consistency of the block and coin databases */ bool VerifyDB(int nCheckLevel, int nCheckDepth); /** Print the loaded block tree */ void PrintBlockTree(); /** Find a block by height in the currently-connected chain */ CBlockIndex* FindBlockByHeight(int nHeight); /** Process protocol messages received from a given node */ bool ProcessMessages(CNode* pfrom); /** Send queued protocol messages to be sent to a give node */ bool SendMessages(CNode* pto, bool fSendTrickle); /** Run an instance of the script checking thread */ void ThreadScriptCheck(); /** Run the miner threads */ void GenerateBitcoins(bool fGenerate, CWallet* pwallet); /** Generate a new block, without valid proof-of-work */ CBlockTemplate* CreateNewBlock(const CScript& scriptPubKeyIn); CBlockTemplate* CreateNewBlockWithKey(CReserveKey& reservekey); /** Modify the extranonce in a block */ void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce); /** Do mining precalculation */ void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1); /** Check mined block */ bool CheckWork(CBlock* pblock, CWallet& wallet, CReserveKey& reservekey); /** Check whether a block hash satisfies the proof-of-work requirement specified by nBits */ bool CheckProofOfWork(uint256 hash, unsigned int nBits); /** Calculate the minimum amount of work a received block needs, without knowing its direct parent */ unsigned int ComputeMinWork(unsigned int nBase, int64 nTime); /** Get the number of active peers */ int GetNumBlocksOfPeers(); /** Check whether we are doing an initial block download (synchronizing from disk or network) */ bool IsInitialBlockDownload(); /** Format a string that describes several potential problems detected by the core */ std::string GetWarnings(std::string strFor); /** Retrieve a transaction (from memory pool, or from disk, if possible) */ bool GetTransaction(const uint256 &hash, CTransaction &tx, uint256 &hashBlock, bool fAllowSlow = false); /** Connect/disconnect blocks until pindexNew is the new tip of the active block chain */ bool SetBestChain(CValidationState &state, CBlockIndex* pindexNew); /** Find the best known block, and make it the tip of the block chain */ bool ConnectBestBlock(CValidationState &state); /** Create a new block index entry for a given block hash */ CBlockIndex * InsertBlockIndex(uint256 hash); /** Verify a signature */ bool VerifySignature(const CCoins& txFrom, const CTransaction& txTo, unsigned int nIn, unsigned int flags, int nHashType); /** Abort with a message */ bool AbortNode(const std::string &msg); bool GetWalletFile(CWallet* pwallet, std::string &strWalletFileOut); struct CDiskBlockPos { int nFile; unsigned int nPos; IMPLEMENT_SERIALIZE( READWRITE(VARINT(nFile)); READWRITE(VARINT(nPos)); ) CDiskBlockPos() { SetNull(); } CDiskBlockPos(int nFileIn, unsigned int nPosIn) { nFile = nFileIn; nPos = nPosIn; } friend bool operator==(const CDiskBlockPos &a, const CDiskBlockPos &b) { return (a.nFile == b.nFile && a.nPos == b.nPos); } friend bool operator!=(const CDiskBlockPos &a, const CDiskBlockPos &b) { return !(a == b); } void SetNull() { nFile = -1; nPos = 0; } bool IsNull() const { return (nFile == -1); } }; struct CDiskTxPos : public CDiskBlockPos { unsigned int nTxOffset; // after header IMPLEMENT_SERIALIZE( READWRITE(*(CDiskBlockPos*)this); READWRITE(VARINT(nTxOffset)); ) CDiskTxPos(const CDiskBlockPos &blockIn, unsigned int nTxOffsetIn) : CDiskBlockPos(blockIn.nFile, blockIn.nPos), nTxOffset(nTxOffsetIn) { } CDiskTxPos() { SetNull(); } void SetNull() { CDiskBlockPos::SetNull(); nTxOffset = 0; } }; /** An inpoint - a combination of a transaction and an index n into its vin */ class CInPoint { public: CTransaction* ptx; unsigned int n; CInPoint() { SetNull(); } CInPoint(CTransaction* ptxIn, unsigned int nIn) { ptx = ptxIn; n = nIn; } void SetNull() { ptx = NULL; n = (unsigned int) -1; } bool IsNull() const { return (ptx == NULL && n == (unsigned int) -1); } }; /** An outpoint - a combination of a transaction hash and an index n into its vout */ class COutPoint { public: uint256 hash; unsigned int n; COutPoint() { SetNull(); } COutPoint(uint256 hashIn, unsigned int nIn) { hash = hashIn; n = nIn; } IMPLEMENT_SERIALIZE( READWRITE(FLATDATA(*this)); ) void SetNull() { hash = 0; n = (unsigned int) -1; } bool IsNull() const { return (hash == 0 && n == (unsigned int) -1); } friend bool operator<(const COutPoint& a, const COutPoint& b) { return (a.hash < b.hash || (a.hash == b.hash && a.n < b.n)); } friend bool operator==(const COutPoint& a, const COutPoint& b) { return (a.hash == b.hash && a.n == b.n); } friend bool operator!=(const COutPoint& a, const COutPoint& b) { return !(a == b); } std::string ToString() const { return strprintf("COutPoint(%s, %u)", hash.ToString().c_str(), n); } void print() const { printf("%s\n", ToString().c_str()); } }; /** An input of a transaction. It contains the location of the previous * transaction's output that it claims and a signature that matches the * output's public key. */ class CTxIn { public: COutPoint prevout; CScript scriptSig; unsigned int nSequence; CTxIn() { nSequence = std::numeric_limits<unsigned int>::max(); } explicit CTxIn(COutPoint prevoutIn, CScript scriptSigIn=CScript(), unsigned int nSequenceIn=std::numeric_limits<unsigned int>::max()) { prevout = prevoutIn; scriptSig = scriptSigIn; nSequence = nSequenceIn; } CTxIn(uint256 hashPrevTx, unsigned int nOut, CScript scriptSigIn=CScript(), unsigned int nSequenceIn=std::numeric_limits<unsigned int>::max()) { prevout = COutPoint(hashPrevTx, nOut); scriptSig = scriptSigIn; nSequence = nSequenceIn; } IMPLEMENT_SERIALIZE ( READWRITE(prevout); READWRITE(scriptSig); READWRITE(nSequence); ) bool IsFinal() const { return (nSequence == std::numeric_limits<unsigned int>::max()); } friend bool operator==(const CTxIn& a, const CTxIn& b) { return (a.prevout == b.prevout && a.scriptSig == b.scriptSig && a.nSequence == b.nSequence); } friend bool operator!=(const CTxIn& a, const CTxIn& b) { return !(a == b); } std::string ToString() const { std::string str; str += "CTxIn("; str += prevout.ToString(); if (prevout.IsNull()) str += strprintf(", coinbase %s", HexStr(scriptSig).c_str()); else str += strprintf(", scriptSig=%s", scriptSig.ToString().substr(0,24).c_str()); if (nSequence != std::numeric_limits<unsigned int>::max()) str += strprintf(", nSequence=%u", nSequence); str += ")"; return str; } void print() const { printf("%s\n", ToString().c_str()); } }; /** An output of a transaction. It contains the public key that the next input * must be able to sign with to claim it. */ class CTxOut { public: int64 nValue; CScript scriptPubKey; CTxOut() { SetNull(); } CTxOut(int64 nValueIn, CScript scriptPubKeyIn) { nValue = nValueIn; scriptPubKey = scriptPubKeyIn; } IMPLEMENT_SERIALIZE ( READWRITE(nValue); READWRITE(scriptPubKey); ) void SetNull() { nValue = -1; scriptPubKey.clear(); } bool IsNull() const { return (nValue == -1); } uint256 GetHash() const { return SerializeHash(*this); } friend bool operator==(const CTxOut& a, const CTxOut& b) { return (a.nValue == b.nValue && a.scriptPubKey == b.scriptPubKey); } friend bool operator!=(const CTxOut& a, const CTxOut& b) { return !(a == b); } bool IsDust() const; std::string ToString() const { if (scriptPubKey.size() < 6) return "CTxOut(error)"; return strprintf("CTxOut(nValue=%"PRI64d".%08"PRI64d", scriptPubKey=%s)", nValue / COIN, nValue % COIN, scriptPubKey.ToString().substr(0,30).c_str()); } void print() const { printf("%s\n", ToString().c_str()); } }; enum GetMinFee_mode { GMF_BLOCK, GMF_RELAY, GMF_SEND, }; /** The basic transaction that is broadcasted on the network and contained in * blocks. A transaction can contain multiple inputs and outputs. */ class CTransaction { public: static int64 nMinTxFee; static int64 nMinRelayTxFee; static const int CURRENT_VERSION=1; int nVersion; std::vector<CTxIn> vin; std::vector<CTxOut> vout; unsigned int nLockTime; CTransaction() { SetNull(); } IMPLEMENT_SERIALIZE ( READWRITE(this->nVersion); nVersion = this->nVersion; READWRITE(vin); READWRITE(vout); READWRITE(nLockTime); ) void SetNull() { nVersion = CTransaction::CURRENT_VERSION; vin.clear(); vout.clear(); nLockTime = 0; } bool IsNull() const { return (vin.empty() && vout.empty()); } uint256 GetHash() const { return SerializeHash(*this); } bool IsFinal(int nBlockHeight=0, int64 nBlockTime=0) const { // Time based nLockTime implemented in 0.1.6 if (nLockTime == 0) return true; if (nBlockHeight == 0) nBlockHeight = nBestHeight; if (nBlockTime == 0) nBlockTime = GetAdjustedTime(); if ((int64)nLockTime < ((int64)nLockTime < LOCKTIME_THRESHOLD ? (int64)nBlockHeight : nBlockTime)) return true; BOOST_FOREACH(const CTxIn& txin, vin) if (!txin.IsFinal()) return false; return true; } bool IsNewerThan(const CTransaction& old) const { if (vin.size() != old.vin.size()) return false; for (unsigned int i = 0; i < vin.size(); i++) if (vin[i].prevout != old.vin[i].prevout) return false; bool fNewer = false; unsigned int nLowest = std::numeric_limits<unsigned int>::max(); for (unsigned int i = 0; i < vin.size(); i++) { if (vin[i].nSequence != old.vin[i].nSequence) { if (vin[i].nSequence <= nLowest) { fNewer = false; nLowest = vin[i].nSequence; } if (old.vin[i].nSequence < nLowest) { fNewer = true; nLowest = old.vin[i].nSequence; } } } return fNewer; } bool IsCoinBase() const { return (vin.size() == 1 && vin[0].prevout.IsNull()); } /** Check for standard transaction types @return True if all outputs (scriptPubKeys) use only standard transaction forms */ bool IsStandard(std::string& strReason) const; bool IsStandard() const { std::string strReason; return IsStandard(strReason); } /** Check for standard transaction types @param[in] mapInputs Map of previous transactions that have outputs we're spending @return True if all inputs (scriptSigs) use only standard transaction forms */ bool AreInputsStandard(CCoinsViewCache& mapInputs) const; /** Count ECDSA signature operations the old-fashioned (pre-0.6) way @return number of sigops this transaction's outputs will produce when spent */ unsigned int GetLegacySigOpCount() const; /** Count ECDSA signature operations in pay-to-script-hash inputs. @param[in] mapInputs Map of previous transactions that have outputs we're spending @return maximum number of sigops required to validate this transaction's inputs */ unsigned int GetP2SHSigOpCount(CCoinsViewCache& mapInputs) const; /** Amount of bitcoins spent by this transaction. @return sum of all outputs (note: does not include fees) */ int64 GetValueOut() const { int64 nValueOut = 0; BOOST_FOREACH(const CTxOut& txout, vout) { nValueOut += txout.nValue; if (!MoneyRange(txout.nValue) || !MoneyRange(nValueOut)) throw std::runtime_error("CTransaction::GetValueOut() : value out of range"); } return nValueOut; } /** Amount of bitcoins coming in to this transaction Note that lightweight clients may not know anything besides the hash of previous transactions, so may not be able to calculate this. @param[in] mapInputs Map of previous transactions that have outputs we're spending @return Sum of value of all inputs (scriptSigs) */ int64 GetValueIn(CCoinsViewCache& mapInputs) const; static bool AllowFree(double dPriority) { // Large (in bytes) low-priority (new, small-coin) transactions // need a fee. return dPriority > COIN * 576 / 250; } // Apply the effects of this transaction on the UTXO set represented by view void UpdateCoins(const CTransaction& tx, CValidationState &state, CCoinsViewCache &inputs, CTxUndo &txundo, int nHeight, const uint256 &txhash); int64 GetMinFee(unsigned int nBlockSize=1, bool fAllowFree=true, enum GetMinFee_mode mode=GMF_BLOCK) const; friend bool operator==(const CTransaction& a, const CTransaction& b) { return (a.nVersion == b.nVersion && a.vin == b.vin && a.vout == b.vout && a.nLockTime == b.nLockTime); } friend bool operator!=(const CTransaction& a, const CTransaction& b) { return !(a == b); } std::string ToString() const { std::string str; str += strprintf("CTransaction(hash=%s, ver=%d, vin.size=%"PRIszu", vout.size=%"PRIszu", nLockTime=%u)\n", GetHash().ToString().c_str(), nVersion, vin.size(), vout.size(), nLockTime); for (unsigned int i = 0; i < vin.size(); i++) str += " " + vin[i].ToString() + "\n"; for (unsigned int i = 0; i < vout.size(); i++) str += " " + vout[i].ToString() + "\n"; return str; } void print() const { printf("%s", ToString().c_str()); } // Check whether all prevouts of this transaction are present in the UTXO set represented by view bool HaveInputs(CCoinsViewCache &view) const; // Check whether all inputs of this transaction are valid (no double spends, scripts & sigs, amounts) // This does not modify the UTXO set. If pvChecks is not NULL, script checks are pushed onto it // instead of being performed inline. bool CheckInputs(CValidationState &state, CCoinsViewCache &view, bool fScriptChecks = true, unsigned int flags = SCRIPT_VERIFY_P2SH | SCRIPT_VERIFY_STRICTENC, std::vector<CScriptCheck> *pvChecks = NULL) const; // Apply the effects of this transaction on the UTXO set represented by view void UpdateCoins(CValidationState &state, CCoinsViewCache &view, CTxUndo &txundo, int nHeight, const uint256 &txhash) const; // Context-independent validity checks bool CheckTransaction(CValidationState &state) const; // Try to accept this transaction into the memory pool bool AcceptToMemoryPool(CValidationState &state, bool fCheckInputs=true, bool fLimitFree = true, bool* pfMissingInputs=NULL); protected: static const CTxOut &GetOutputFor(const CTxIn& input, CCoinsViewCache& mapInputs); }; /** wrapper for CTxOut that provides a more compact serialization */ class CTxOutCompressor { private: CTxOut &txout; public: static uint64 CompressAmount(uint64 nAmount); static uint64 DecompressAmount(uint64 nAmount); CTxOutCompressor(CTxOut &txoutIn) : txout(txoutIn) { } IMPLEMENT_SERIALIZE(({ if (!fRead) { uint64 nVal = CompressAmount(txout.nValue); READWRITE(VARINT(nVal)); } else { uint64 nVal = 0; READWRITE(VARINT(nVal)); txout.nValue = DecompressAmount(nVal); } CScriptCompressor cscript(REF(txout.scriptPubKey)); READWRITE(cscript); });) }; /** Undo information for a CTxIn * * Contains the prevout's CTxOut being spent, and if this was the * last output of the affected transaction, its metadata as well * (coinbase or not, height, transaction version) */ class CTxInUndo { public: CTxOut txout; // the txout data before being spent bool fCoinBase; // if the outpoint was the last unspent: whether it belonged to a coinbase unsigned int nHeight; // if the outpoint was the last unspent: its height int nVersion; // if the outpoint was the last unspent: its version CTxInUndo() : txout(), fCoinBase(false), nHeight(0), nVersion(0) {} CTxInUndo(const CTxOut &txoutIn, bool fCoinBaseIn = false, unsigned int nHeightIn = 0, int nVersionIn = 0) : txout(txoutIn), fCoinBase(fCoinBaseIn), nHeight(nHeightIn), nVersion(nVersionIn) { } unsigned int GetSerializeSize(int nType, int nVersion) const { return ::GetSerializeSize(VARINT(nHeight*2+(fCoinBase ? 1 : 0)), nType, nVersion) + (nHeight > 0 ? ::GetSerializeSize(VARINT(this->nVersion), nType, nVersion) : 0) + ::GetSerializeSize(CTxOutCompressor(REF(txout)), nType, nVersion); } template<typename Stream> void Serialize(Stream &s, int nType, int nVersion) const { ::Serialize(s, VARINT(nHeight*2+(fCoinBase ? 1 : 0)), nType, nVersion); if (nHeight > 0) ::Serialize(s, VARINT(this->nVersion), nType, nVersion); ::Serialize(s, CTxOutCompressor(REF(txout)), nType, nVersion); } template<typename Stream> void Unserialize(Stream &s, int nType, int nVersion) { unsigned int nCode = 0; ::Unserialize(s, VARINT(nCode), nType, nVersion); nHeight = nCode / 2; fCoinBase = nCode & 1; if (nHeight > 0) ::Unserialize(s, VARINT(this->nVersion), nType, nVersion); ::Unserialize(s, REF(CTxOutCompressor(REF(txout))), nType, nVersion); } }; /** Undo information for a CTransaction */ class CTxUndo { public: // undo information for all txins std::vector<CTxInUndo> vprevout; IMPLEMENT_SERIALIZE( READWRITE(vprevout); ) }; /** Undo information for a CBlock */ class CBlockUndo { public: std::vector<CTxUndo> vtxundo; // for all but the coinbase IMPLEMENT_SERIALIZE( READWRITE(vtxundo); ) bool WriteToDisk(CDiskBlockPos &pos, const uint256 &hashBlock) { // Open history file to append CAutoFile fileout = CAutoFile(OpenUndoFile(pos), SER_DISK, CLIENT_VERSION); if (!fileout) return error("CBlockUndo::WriteToDisk() : OpenUndoFile failed"); // Write index header unsigned int nSize = fileout.GetSerializeSize(*this); fileout << FLATDATA(pchMessageStart) << nSize; // Write undo data long fileOutPos = ftell(fileout); if (fileOutPos < 0) return error("CBlockUndo::WriteToDisk() : ftell failed"); pos.nPos = (unsigned int)fileOutPos; fileout << *this; // calculate & write checksum CHashWriter hasher(SER_GETHASH, PROTOCOL_VERSION); hasher << hashBlock; hasher << *this; fileout << hasher.GetHash(); // Flush stdio buffers and commit to disk before returning fflush(fileout); if (!IsInitialBlockDownload()) FileCommit(fileout); return true; } bool ReadFromDisk(const CDiskBlockPos &pos, const uint256 &hashBlock) { // Open history file to read CAutoFile filein = CAutoFile(OpenUndoFile(pos, true), SER_DISK, CLIENT_VERSION); if (!filein) return error("CBlockUndo::ReadFromDisk() : OpenBlockFile failed"); // Read block uint256 hashChecksum; try { filein >> *this; filein >> hashChecksum; } catch (std::exception &e) { return error("%s() : deserialize or I/O error", __PRETTY_FUNCTION__); } // Verify checksum CHashWriter hasher(SER_GETHASH, PROTOCOL_VERSION); hasher << hashBlock; hasher << *this; if (hashChecksum != hasher.GetHash()) return error("CBlockUndo::ReadFromDisk() : checksum mismatch"); return true; } }; /** pruned version of CTransaction: only retains metadata and unspent transaction outputs * * Serialized format: * - VARINT(nVersion) * - VARINT(nCode) * - unspentness bitvector, for vout[2] and further; least significant byte first * - the non-spent CTxOuts (via CTxOutCompressor) * - VARINT(nHeight) * * The nCode value consists of: * - bit 1: IsCoinBase() * - bit 2: vout[0] is not spent * - bit 4: vout[1] is not spent * - The higher bits encode N, the number of non-zero bytes in the following bitvector. * - In case both bit 2 and bit 4 are unset, they encode N-1, as there must be at * least one non-spent output). * * Example: 0104835800816115944e077fe7c803cfa57f29b36bf87c1d358bb85e * <><><--------------------------------------------><----> * | \ | / * version code vout[1] height * * - version = 1 * - code = 4 (vout[1] is not spent, and 0 non-zero bytes of bitvector follow) * - unspentness bitvector: as 0 non-zero bytes follow, it has length 0 * - vout[1]: 835800816115944e077fe7c803cfa57f29b36bf87c1d35 * * 8358: compact amount representation for 60000000000 (600 BTC) * * 00: special txout type pay-to-pubkey-hash * * 816115944e077fe7c803cfa57f29b36bf87c1d35: address uint160 * - height = 203998 * * * Example: 0109044086ef97d5790061b01caab50f1b8e9c50a5057eb43c2d9563a4eebbd123008c988f1a4a4de2161e0f50aac7f17e7f9555caa486af3b * <><><--><--------------------------------------------------><----------------------------------------------><----> * / \ \ | | / * version code unspentness vout[4] vout[16] height * * - version = 1 * - code = 9 (coinbase, neither vout[0] or vout[1] are unspent, * 2 (1, +1 because both bit 2 and bit 4 are unset) non-zero bitvector bytes follow) * - unspentness bitvector: bits 2 (0x04) and 14 (0x4000) are set, so vout[2+2] and vout[14+2] are unspent * - vout[4]: 86ef97d5790061b01caab50f1b8e9c50a5057eb43c2d9563a4ee * * 86ef97d579: compact amount representation for 234925952 (2.35 BTC) * * 00: special txout type pay-to-pubkey-hash * * 61b01caab50f1b8e9c50a5057eb43c2d9563a4ee: address uint160 * - vout[16]: bbd123008c988f1a4a4de2161e0f50aac7f17e7f9555caa4 * * bbd123: compact amount representation for 110397 (0.001 BTC) * * 00: special txout type pay-to-pubkey-hash * * 8c988f1a4a4de2161e0f50aac7f17e7f9555caa4: address uint160 * - height = 120891 */ class CCoins { public: // whether transaction is a coinbase bool fCoinBase; // unspent transaction outputs; spent outputs are .IsNull(); spent outputs at the end of the array are dropped std::vector<CTxOut> vout; // at which height this transaction was included in the active block chain int nHeight; // version of the CTransaction; accesses to this value should probably check for nHeight as well, // as new tx version will probably only be introduced at certain heights int nVersion; // construct a CCoins from a CTransaction, at a given height CCoins(const CTransaction &tx, int nHeightIn) : fCoinBase(tx.IsCoinBase()), vout(tx.vout), nHeight(nHeightIn), nVersion(tx.nVersion) { } // empty constructor CCoins() : fCoinBase(false), vout(0), nHeight(0), nVersion(0) { } // remove spent outputs at the end of vout void Cleanup() { while (vout.size() > 0 && vout.back().IsNull()) vout.pop_back(); if (vout.empty()) std::vector<CTxOut>().swap(vout); } void swap(CCoins &to) { std::swap(to.fCoinBase, fCoinBase); to.vout.swap(vout); std::swap(to.nHeight, nHeight); std::swap(to.nVersion, nVersion); } // equality test friend bool operator==(const CCoins &a, const CCoins &b) { return a.fCoinBase == b.fCoinBase && a.nHeight == b.nHeight && a.nVersion == b.nVersion && a.vout == b.vout; } friend bool operator!=(const CCoins &a, const CCoins &b) { return !(a == b); } // calculate number of bytes for the bitmask, and its number of non-zero bytes // each bit in the bitmask represents the availability of one output, but the // availabilities of the first two outputs are encoded separately void CalcMaskSize(unsigned int &nBytes, unsigned int &nNonzeroBytes) const { unsigned int nLastUsedByte = 0; for (unsigned int b = 0; 2+b*8 < vout.size(); b++) { bool fZero = true; for (unsigned int i = 0; i < 8 && 2+b*8+i < vout.size(); i++) { if (!vout[2+b*8+i].IsNull()) { fZero = false; continue; } } if (!fZero) { nLastUsedByte = b + 1; nNonzeroBytes++; } } nBytes += nLastUsedByte; } bool IsCoinBase() const { return fCoinBase; } unsigned int GetSerializeSize(int nType, int nVersion) const { unsigned int nSize = 0; unsigned int nMaskSize = 0, nMaskCode = 0; CalcMaskSize(nMaskSize, nMaskCode); bool fFirst = vout.size() > 0 && !vout[0].IsNull(); bool fSecond = vout.size() > 1 && !vout[1].IsNull(); assert(fFirst || fSecond || nMaskCode); unsigned int nCode = 8*(nMaskCode - (fFirst || fSecond ? 0 : 1)) + (fCoinBase ? 1 : 0) + (fFirst ? 2 : 0) + (fSecond ? 4 : 0); // version nSize += ::GetSerializeSize(VARINT(this->nVersion), nType, nVersion); // size of header code nSize += ::GetSerializeSize(VARINT(nCode), nType, nVersion); // spentness bitmask nSize += nMaskSize; // txouts themself for (unsigned int i = 0; i < vout.size(); i++) if (!vout[i].IsNull()) nSize += ::GetSerializeSize(CTxOutCompressor(REF(vout[i])), nType, nVersion); // height nSize += ::GetSerializeSize(VARINT(nHeight), nType, nVersion); return nSize; } template<typename Stream> void Serialize(Stream &s, int nType, int nVersion) const { unsigned int nMaskSize = 0, nMaskCode = 0; CalcMaskSize(nMaskSize, nMaskCode); bool fFirst = vout.size() > 0 && !vout[0].IsNull(); bool fSecond = vout.size() > 1 && !vout[1].IsNull(); assert(fFirst || fSecond || nMaskCode); unsigned int nCode = 8*(nMaskCode - (fFirst || fSecond ? 0 : 1)) + (fCoinBase ? 1 : 0) + (fFirst ? 2 : 0) + (fSecond ? 4 : 0); // version ::Serialize(s, VARINT(this->nVersion), nType, nVersion); // header code ::Serialize(s, VARINT(nCode), nType, nVersion); // spentness bitmask for (unsigned int b = 0; b<nMaskSize; b++) { unsigned char chAvail = 0; for (unsigned int i = 0; i < 8 && 2+b*8+i < vout.size(); i++) if (!vout[2+b*8+i].IsNull()) chAvail |= (1 << i); ::Serialize(s, chAvail, nType, nVersion); } // txouts themself for (unsigned int i = 0; i < vout.size(); i++) { if (!vout[i].IsNull()) ::Serialize(s, CTxOutCompressor(REF(vout[i])), nType, nVersion); } // coinbase height ::Serialize(s, VARINT(nHeight), nType, nVersion); } template<typename Stream> void Unserialize(Stream &s, int nType, int nVersion) { unsigned int nCode = 0; // version ::Unserialize(s, VARINT(this->nVersion), nType, nVersion); // header code ::Unserialize(s, VARINT(nCode), nType, nVersion); fCoinBase = nCode & 1; std::vector<bool> vAvail(2, false); vAvail[0] = nCode & 2; vAvail[1] = nCode & 4; unsigned int nMaskCode = (nCode / 8) + ((nCode & 6) != 0 ? 0 : 1); // spentness bitmask while (nMaskCode > 0) { unsigned char chAvail = 0; ::Unserialize(s, chAvail, nType, nVersion); for (unsigned int p = 0; p < 8; p++) { bool f = (chAvail & (1 << p)) != 0; vAvail.push_back(f); } if (chAvail != 0) nMaskCode--; } // txouts themself vout.assign(vAvail.size(), CTxOut()); for (unsigned int i = 0; i < vAvail.size(); i++) { if (vAvail[i]) ::Unserialize(s, REF(CTxOutCompressor(vout[i])), nType, nVersion); } // coinbase height ::Unserialize(s, VARINT(nHeight), nType, nVersion); Cleanup(); } // mark an outpoint spent, and construct undo information bool Spend(const COutPoint &out, CTxInUndo &undo) { if (out.n >= vout.size()) return false; if (vout[out.n].IsNull()) return false; undo = CTxInUndo(vout[out.n]); vout[out.n].SetNull(); Cleanup(); if (vout.size() == 0) { undo.nHeight = nHeight; undo.fCoinBase = fCoinBase; undo.nVersion = this->nVersion; } return true; } // mark a vout spent bool Spend(int nPos) { CTxInUndo undo; COutPoint out(0, nPos); return Spend(out, undo); } // check whether a particular output is still available bool IsAvailable(unsigned int nPos) const { return (nPos < vout.size() && !vout[nPos].IsNull()); } // check whether the entire CCoins is spent // note that only !IsPruned() CCoins can be serialized bool IsPruned() const { BOOST_FOREACH(const CTxOut &out, vout) if (!out.IsNull()) return false; return true; } }; /** Closure representing one script verification * Note that this stores references to the spending transaction */ class CScriptCheck { private: CScript scriptPubKey; const CTransaction *ptxTo; unsigned int nIn; unsigned int nFlags; int nHashType; public: CScriptCheck() {} CScriptCheck(const CCoins& txFromIn, const CTransaction& txToIn, unsigned int nInIn, unsigned int nFlagsIn, int nHashTypeIn) : scriptPubKey(txFromIn.vout[txToIn.vin[nInIn].prevout.n].scriptPubKey), ptxTo(&txToIn), nIn(nInIn), nFlags(nFlagsIn), nHashType(nHashTypeIn) { } bool operator()() const; void swap(CScriptCheck &check) { scriptPubKey.swap(check.scriptPubKey); std::swap(ptxTo, check.ptxTo); std::swap(nIn, check.nIn); std::swap(nFlags, check.nFlags); std::swap(nHashType, check.nHashType); } }; /** A transaction with a merkle branch linking it to the block chain. */ class CMerkleTx : public CTransaction { public: uint256 hashBlock; std::vector<uint256> vMerkleBranch; int nIndex; // memory only mutable bool fMerkleVerified; CMerkleTx() { Init(); } CMerkleTx(const CTransaction& txIn) : CTransaction(txIn) { Init(); } void Init() { hashBlock = 0; nIndex = -1; fMerkleVerified = false; } IMPLEMENT_SERIALIZE ( nSerSize += SerReadWrite(s, *(CTransaction*)this, nType, nVersion, ser_action); nVersion = this->nVersion; READWRITE(hashBlock); READWRITE(vMerkleBranch); READWRITE(nIndex); ) int SetMerkleBranch(const CBlock* pblock=NULL); int GetDepthInMainChain(CBlockIndex* &pindexRet) const; int GetDepthInMainChain() const { CBlockIndex *pindexRet; return GetDepthInMainChain(pindexRet); } bool IsInMainChain() const { return GetDepthInMainChain() > 0; } int GetBlocksToMaturity() const; bool AcceptToMemoryPool(bool fCheckInputs=true, bool fLimitFree=true); }; /** Data structure that represents a partial merkle tree. * * It respresents a subset of the txid's of a known block, in a way that * allows recovery of the list of txid's and the merkle root, in an * authenticated way. * * The encoding works as follows: we traverse the tree in depth-first order, * storing a bit for each traversed node, signifying whether the node is the * parent of at least one matched leaf txid (or a matched txid itself). In * case we are at the leaf level, or this bit is 0, its merkle node hash is * stored, and its children are not explorer further. Otherwise, no hash is * stored, but we recurse into both (or the only) child branch. During * decoding, the same depth-first traversal is performed, consuming bits and * hashes as they written during encoding. * * The serialization is fixed and provides a hard guarantee about the * encoded size: * * SIZE <= 10 + ceil(32.25*N) * * Where N represents the number of leaf nodes of the partial tree. N itself * is bounded by: * * N <= total_transactions * N <= 1 + matched_transactions*tree_height * * The serialization format: * - uint32 total_transactions (4 bytes) * - varint number of hashes (1-3 bytes) * - uint256[] hashes in depth-first order (<= 32*N bytes) * - varint number of bytes of flag bits (1-3 bytes) * - byte[] flag bits, packed per 8 in a byte, least significant bit first (<= 2*N-1 bits) * The size constraints follow from this. */ class CPartialMerkleTree { protected: // the total number of transactions in the block unsigned int nTransactions; // node-is-parent-of-matched-txid bits std::vector<bool> vBits; // txids and internal hashes std::vector<uint256> vHash; // flag set when encountering invalid data bool fBad; // helper function to efficiently calculate the number of nodes at given height in the merkle tree unsigned int CalcTreeWidth(int height) { return (nTransactions+(1 << height)-1) >> height; } // calculate the hash of a node in the merkle tree (at leaf level: the txid's themself) uint256 CalcHash(int height, unsigned int pos, const std::vector<uint256> &vTxid); // recursive function that traverses tree nodes, storing the data as bits and hashes void TraverseAndBuild(int height, unsigned int pos, const std::vector<uint256> &vTxid, const std::vector<bool> &vMatch); // recursive function that traverses tree nodes, consuming the bits and hashes produced by TraverseAndBuild. // it returns the hash of the respective node. uint256 TraverseAndExtract(int height, unsigned int pos, unsigned int &nBitsUsed, unsigned int &nHashUsed, std::vector<uint256> &vMatch); public: // serialization implementation IMPLEMENT_SERIALIZE( READWRITE(nTransactions); READWRITE(vHash); std::vector<unsigned char> vBytes; if (fRead) { READWRITE(vBytes); CPartialMerkleTree &us = *(const_cast<CPartialMerkleTree*>(this)); us.vBits.resize(vBytes.size() * 8); for (unsigned int p = 0; p < us.vBits.size(); p++) us.vBits[p] = (vBytes[p / 8] & (1 << (p % 8))) != 0; us.fBad = false; } else { vBytes.resize((vBits.size()+7)/8); for (unsigned int p = 0; p < vBits.size(); p++) vBytes[p / 8] |= vBits[p] << (p % 8); READWRITE(vBytes); } ) // Construct a partial merkle tree from a list of transaction id's, and a mask that selects a subset of them CPartialMerkleTree(const std::vector<uint256> &vTxid, const std::vector<bool> &vMatch); CPartialMerkleTree(); // extract the matching txid's represented by this partial merkle tree. // returns the merkle root, or 0 in case of failure uint256 ExtractMatches(std::vector<uint256> &vMatch); }; /** Nodes collect new transactions into a block, hash them into a hash tree, * and scan through nonce values to make the block's hash satisfy proof-of-work * requirements. When they solve the proof-of-work, they broadcast the block * to everyone and the block is added to the block chain. The first transaction * in the block is a special one that creates a new coin owned by the creator * of the block. */ class CBlockHeader { public: // header static const int CURRENT_VERSION=2; int nVersion; uint256 hashPrevBlock; uint256 hashMerkleRoot; unsigned int nTime; unsigned int nBits; unsigned int nNonce; CBlockHeader() { SetNull(); } IMPLEMENT_SERIALIZE ( READWRITE(this->nVersion); nVersion = this->nVersion; READWRITE(hashPrevBlock); READWRITE(hashMerkleRoot); READWRITE(nTime); READWRITE(nBits); READWRITE(nNonce); ) void SetNull() { nVersion = CBlockHeader::CURRENT_VERSION; hashPrevBlock = 0; hashMerkleRoot = 0; nTime = 0; nBits = 0; nNonce = 0; } bool IsNull() const { return (nBits == 0); } uint256 GetHash() const { return Hash(BEGIN(nVersion), END(nNonce)); } int64 GetBlockTime() const { return (int64)nTime; } void UpdateTime(const CBlockIndex* pindexPrev); }; class CBlock : public CBlockHeader { public: // network and disk std::vector<CTransaction> vtx; // memory only mutable std::vector<uint256> vMerkleTree; CBlock() { SetNull(); } CBlock(const CBlockHeader &header) { SetNull(); *((CBlockHeader*)this) = header; } IMPLEMENT_SERIALIZE ( READWRITE(*(CBlockHeader*)this); READWRITE(vtx); ) void SetNull() { CBlockHeader::SetNull(); vtx.clear(); vMerkleTree.clear(); } uint256 GetPoWHash() const { uint256 thash; scrypt_1024_1_1_256(BEGIN(nVersion), BEGIN(thash)); return thash; } CBlockHeader GetBlockHeader() const { CBlockHeader block; block.nVersion = nVersion; block.hashPrevBlock = hashPrevBlock; block.hashMerkleRoot = hashMerkleRoot; block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; return block; } uint256 BuildMerkleTree() const { vMerkleTree.clear(); BOOST_FOREACH(const CTransaction& tx, vtx) vMerkleTree.push_back(tx.GetHash()); int j = 0; for (int nSize = vtx.size(); nSize > 1; nSize = (nSize + 1) / 2) { for (int i = 0; i < nSize; i += 2) { int i2 = std::min(i+1, nSize-1); vMerkleTree.push_back(Hash(BEGIN(vMerkleTree[j+i]), END(vMerkleTree[j+i]), BEGIN(vMerkleTree[j+i2]), END(vMerkleTree[j+i2]))); } j += nSize; } return (vMerkleTree.empty() ? 0 : vMerkleTree.back()); } const uint256 &GetTxHash(unsigned int nIndex) const { assert(vMerkleTree.size() > 0); // BuildMerkleTree must have been called first assert(nIndex < vtx.size()); return vMerkleTree[nIndex]; } std::vector<uint256> GetMerkleBranch(int nIndex) const { if (vMerkleTree.empty()) BuildMerkleTree(); std::vector<uint256> vMerkleBranch; int j = 0; for (int nSize = vtx.size(); nSize > 1; nSize = (nSize + 1) / 2) { int i = std::min(nIndex^1, nSize-1); vMerkleBranch.push_back(vMerkleTree[j+i]); nIndex >>= 1; j += nSize; } return vMerkleBranch; } static uint256 CheckMerkleBranch(uint256 hash, const std::vector<uint256>& vMerkleBranch, int nIndex) { if (nIndex == -1) return 0; BOOST_FOREACH(const uint256& otherside, vMerkleBranch) { if (nIndex & 1) hash = Hash(BEGIN(otherside), END(otherside), BEGIN(hash), END(hash)); else hash = Hash(BEGIN(hash), END(hash), BEGIN(otherside), END(otherside)); nIndex >>= 1; } return hash; } bool WriteToDisk(CDiskBlockPos &pos) { // Open history file to append CAutoFile fileout = CAutoFile(OpenBlockFile(pos), SER_DISK, CLIENT_VERSION); if (!fileout) return error("CBlock::WriteToDisk() : OpenBlockFile failed"); // Write index header unsigned int nSize = fileout.GetSerializeSize(*this); fileout << FLATDATA(pchMessageStart) << nSize; // Write block long fileOutPos = ftell(fileout); if (fileOutPos < 0) return error("CBlock::WriteToDisk() : ftell failed"); pos.nPos = (unsigned int)fileOutPos; fileout << *this; // Flush stdio buffers and commit to disk before returning fflush(fileout); if (!IsInitialBlockDownload()) FileCommit(fileout); return true; } bool ReadFromDisk(const CDiskBlockPos &pos) { SetNull(); // Open history file to read CAutoFile filein = CAutoFile(OpenBlockFile(pos, true), SER_DISK, CLIENT_VERSION); if (!filein) return error("CBlock::ReadFromDisk() : OpenBlockFile failed"); // Read block try { filein >> *this; } catch (std::exception &e) { return error("%s() : deserialize or I/O error", __PRETTY_FUNCTION__); } // Check the header if (!CheckProofOfWork(GetPoWHash(), nBits)) return error("CBlock::ReadFromDisk() : errors in block header"); return true; } void print() const { printf("CBlock(hash=%s, input=%s, PoW=%s, ver=%d, hashPrevBlock=%s, hashMerkleRoot=%s, nTime=%u, nBits=%08x, nNonce=%u, vtx=%"PRIszu")\n", GetHash().ToString().c_str(), HexStr(BEGIN(nVersion),BEGIN(nVersion)+80,false).c_str(), GetPoWHash().ToString().c_str(), nVersion, hashPrevBlock.ToString().c_str(), hashMerkleRoot.ToString().c_str(), nTime, nBits, nNonce, vtx.size()); for (unsigned int i = 0; i < vtx.size(); i++) { printf(" "); vtx[i].print(); } printf(" vMerkleTree: "); for (unsigned int i = 0; i < vMerkleTree.size(); i++) printf("%s ", vMerkleTree[i].ToString().c_str()); printf("\n"); } /** Undo the effects of this block (with given index) on the UTXO set represented by coins. * In case pfClean is provided, operation will try to be tolerant about errors, and *pfClean * will be true if no problems were found. Otherwise, the return value will be false in case * of problems. Note that in any case, coins may be modified. */ bool DisconnectBlock(CValidationState &state, CBlockIndex *pindex, CCoinsViewCache &coins, bool *pfClean = NULL); // Apply the effects of this block (with given index) on the UTXO set represented by coins bool ConnectBlock(CValidationState &state, CBlockIndex *pindex, CCoinsViewCache &coins, bool fJustCheck=false); // Read a block from disk bool ReadFromDisk(const CBlockIndex* pindex); // Add this block to the block index, and if necessary, switch the active block chain to this bool AddToBlockIndex(CValidationState &state, const CDiskBlockPos &pos); // Context-independent validity checks bool CheckBlock(CValidationState &state, bool fCheckPOW=true, bool fCheckMerkleRoot=true) const; // Store block on disk // if dbp is provided, the file is known to already reside on disk bool AcceptBlock(CValidationState &state, CDiskBlockPos *dbp = NULL); }; class CBlockFileInfo { public: unsigned int nBlocks; // number of blocks stored in file unsigned int nSize; // number of used bytes of block file unsigned int nUndoSize; // number of used bytes in the undo file unsigned int nHeightFirst; // lowest height of block in file unsigned int nHeightLast; // highest height of block in file uint64 nTimeFirst; // earliest time of block in file uint64 nTimeLast; // latest time of block in file IMPLEMENT_SERIALIZE( READWRITE(VARINT(nBlocks)); READWRITE(VARINT(nSize)); READWRITE(VARINT(nUndoSize)); READWRITE(VARINT(nHeightFirst)); READWRITE(VARINT(nHeightLast)); READWRITE(VARINT(nTimeFirst)); READWRITE(VARINT(nTimeLast)); ) void SetNull() { nBlocks = 0; nSize = 0; nUndoSize = 0; nHeightFirst = 0; nHeightLast = 0; nTimeFirst = 0; nTimeLast = 0; } CBlockFileInfo() { SetNull(); } std::string ToString() const { return strprintf("CBlockFileInfo(blocks=%u, size=%u, heights=%u...%u, time=%s...%s)", nBlocks, nSize, nHeightFirst, nHeightLast, DateTimeStrFormat("%Y-%m-%d", nTimeFirst).c_str(), DateTimeStrFormat("%Y-%m-%d", nTimeLast).c_str()); } // update statistics (does not update nSize) void AddBlock(unsigned int nHeightIn, uint64 nTimeIn) { if (nBlocks==0 || nHeightFirst > nHeightIn) nHeightFirst = nHeightIn; if (nBlocks==0 || nTimeFirst > nTimeIn) nTimeFirst = nTimeIn; nBlocks++; if (nHeightIn > nHeightFirst) nHeightLast = nHeightIn; if (nTimeIn > nTimeLast) nTimeLast = nTimeIn; } }; extern CCriticalSection cs_LastBlockFile; extern CBlockFileInfo infoLastBlockFile; extern int nLastBlockFile; enum BlockStatus { BLOCK_VALID_UNKNOWN = 0, BLOCK_VALID_HEADER = 1, // parsed, version ok, hash satisfies claimed PoW, 1 <= vtx count <= max, timestamp not in future BLOCK_VALID_TREE = 2, // parent found, difficulty matches, timestamp >= median previous, checkpoint BLOCK_VALID_TRANSACTIONS = 3, // only first tx is coinbase, 2 <= coinbase input script length <= 100, transactions valid, no duplicate txids, sigops, size, merkle root BLOCK_VALID_CHAIN = 4, // outputs do not overspend inputs, no double spends, coinbase output ok, immature coinbase spends, BIP30 BLOCK_VALID_SCRIPTS = 5, // scripts/signatures ok BLOCK_VALID_MASK = 7, BLOCK_HAVE_DATA = 8, // full block available in blk*.dat BLOCK_HAVE_UNDO = 16, // undo data available in rev*.dat BLOCK_HAVE_MASK = 24, BLOCK_FAILED_VALID = 32, // stage after last reached validness failed BLOCK_FAILED_CHILD = 64, // descends from failed block BLOCK_FAILED_MASK = 96 }; /** The block chain is a tree shaped structure starting with the * genesis block at the root, with each block potentially having multiple * candidates to be the next block. pprev and pnext link a path through the * main/longest chain. A blockindex may have multiple pprev pointing back * to it, but pnext will only point forward to the longest branch, or will * be null if the block is not part of the longest chain. */ class CBlockIndex { public: // pointer to the hash of the block, if any. memory is owned by this CBlockIndex const uint256* phashBlock; // pointer to the index of the predecessor of this block CBlockIndex* pprev; // (memory only) pointer to the index of the *active* successor of this block CBlockIndex* pnext; // height of the entry in the chain. The genesis block has height 0 int nHeight; // Which # file this block is stored in (blk?????.dat) int nFile; // Byte offset within blk?????.dat where this block's data is stored unsigned int nDataPos; // Byte offset within rev?????.dat where this block's undo data is stored unsigned int nUndoPos; // (memory only) Total amount of work (expected number of hashes) in the chain up to and including this block uint256 nChainWork; // Number of transactions in this block. // Note: in a potential headers-first mode, this number cannot be relied upon unsigned int nTx; // (memory only) Number of transactions in the chain up to and including this block unsigned int nChainTx; // change to 64-bit type when necessary; won't happen before 2030 // Verification status of this block. See enum BlockStatus unsigned int nStatus; // block header int nVersion; uint256 hashMerkleRoot; unsigned int nTime; unsigned int nBits; unsigned int nNonce; CBlockIndex() { phashBlock = NULL; pprev = NULL; pnext = NULL; nHeight = 0; nFile = 0; nDataPos = 0; nUndoPos = 0; nChainWork = 0; nTx = 0; nChainTx = 0; nStatus = 0; nVersion = 0; hashMerkleRoot = 0; nTime = 0; nBits = 0; nNonce = 0; } CBlockIndex(CBlockHeader& block) { phashBlock = NULL; pprev = NULL; pnext = NULL; nHeight = 0; nFile = 0; nDataPos = 0; nUndoPos = 0; nChainWork = 0; nTx = 0; nChainTx = 0; nStatus = 0; nVersion = block.nVersion; hashMerkleRoot = block.hashMerkleRoot; nTime = block.nTime; nBits = block.nBits; nNonce = block.nNonce; } CDiskBlockPos GetBlockPos() const { CDiskBlockPos ret; if (nStatus & BLOCK_HAVE_DATA) { ret.nFile = nFile; ret.nPos = nDataPos; } return ret; } CDiskBlockPos GetUndoPos() const { CDiskBlockPos ret; if (nStatus & BLOCK_HAVE_UNDO) { ret.nFile = nFile; ret.nPos = nUndoPos; } return ret; } CBlockHeader GetBlockHeader() const { CBlockHeader block; block.nVersion = nVersion; if (pprev) block.hashPrevBlock = pprev->GetBlockHash(); block.hashMerkleRoot = hashMerkleRoot; block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; return block; } uint256 GetBlockHash() const { return *phashBlock; } int64 GetBlockTime() const { return (int64)nTime; } CBigNum GetBlockWork() const { CBigNum bnTarget; bnTarget.SetCompact(nBits); if (bnTarget <= 0) return 0; return (CBigNum(1)<<256) / (bnTarget+1); } bool IsInMainChain() const { return (pnext || this == pindexBest); } bool CheckIndex() const { /** Scrypt is used for block proof-of-work, but for purposes of performance the index internally uses sha256. * This check was considered unneccessary given the other safeguards like the genesis and checkpoints. */ return true; // return CheckProofOfWork(GetBlockHash(), nBits); } enum { nMedianTimeSpan=11 }; int64 GetMedianTimePast() const { int64 pmedian[nMedianTimeSpan]; int64* pbegin = &pmedian[nMedianTimeSpan]; int64* pend = &pmedian[nMedianTimeSpan]; const CBlockIndex* pindex = this; for (int i = 0; i < nMedianTimeSpan && pindex; i++, pindex = pindex->pprev) *(--pbegin) = pindex->GetBlockTime(); std::sort(pbegin, pend); return pbegin[(pend - pbegin)/2]; } int64 GetMedianTime() const { const CBlockIndex* pindex = this; for (int i = 0; i < nMedianTimeSpan/2; i++) { if (!pindex->pnext) return GetBlockTime(); pindex = pindex->pnext; } return pindex->GetMedianTimePast(); } /** * Returns true if there are nRequired or more blocks of minVersion or above * in the last nToCheck blocks, starting at pstart and going backwards. */ static bool IsSuperMajority(int minVersion, const CBlockIndex* pstart, unsigned int nRequired, unsigned int nToCheck); std::string ToString() const { return strprintf("CBlockIndex(pprev=%p, pnext=%p, nHeight=%d, merkle=%s, hashBlock=%s)", pprev, pnext, nHeight, hashMerkleRoot.ToString().c_str(), GetBlockHash().ToString().c_str()); } void print() const { printf("%s\n", ToString().c_str()); } }; struct CBlockIndexWorkComparator { bool operator()(CBlockIndex *pa, CBlockIndex *pb) { if (pa->nChainWork > pb->nChainWork) return false; if (pa->nChainWork < pb->nChainWork) return true; if (pa->GetBlockHash() < pb->GetBlockHash()) return false; if (pa->GetBlockHash() > pb->GetBlockHash()) return true; return false; // identical blocks } }; /** Used to marshal pointers into hashes for db storage. */ class CDiskBlockIndex : public CBlockIndex { public: uint256 hashPrev; CDiskBlockIndex() { hashPrev = 0; } explicit CDiskBlockIndex(CBlockIndex* pindex) : CBlockIndex(*pindex) { hashPrev = (pprev ? pprev->GetBlockHash() : 0); } IMPLEMENT_SERIALIZE ( if (!(nType & SER_GETHASH)) READWRITE(VARINT(nVersion)); READWRITE(VARINT(nHeight)); READWRITE(VARINT(nStatus)); READWRITE(VARINT(nTx)); if (nStatus & (BLOCK_HAVE_DATA | BLOCK_HAVE_UNDO)) READWRITE(VARINT(nFile)); if (nStatus & BLOCK_HAVE_DATA) READWRITE(VARINT(nDataPos)); if (nStatus & BLOCK_HAVE_UNDO) READWRITE(VARINT(nUndoPos)); // block header READWRITE(this->nVersion); READWRITE(hashPrev); READWRITE(hashMerkleRoot); READWRITE(nTime); READWRITE(nBits); READWRITE(nNonce); ) uint256 GetBlockHash() const { CBlockHeader block; block.nVersion = nVersion; block.hashPrevBlock = hashPrev; block.hashMerkleRoot = hashMerkleRoot; block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; return block.GetHash(); } std::string ToString() const { std::string str = "CDiskBlockIndex("; str += CBlockIndex::ToString(); str += strprintf("\n hashBlock=%s, hashPrev=%s)", GetBlockHash().ToString().c_str(), hashPrev.ToString().c_str()); return str; } void print() const { printf("%s\n", ToString().c_str()); } }; /** Capture information about block/transaction validation */ class CValidationState { private: enum mode_state { MODE_VALID, // everything ok MODE_INVALID, // network rule violation (DoS value may be set) MODE_ERROR, // run-time error } mode; int nDoS; bool corruptionPossible; public: CValidationState() : mode(MODE_VALID), nDoS(0), corruptionPossible(false) {} bool DoS(int level, bool ret = false, bool corruptionIn = false) { if (mode == MODE_ERROR) return ret; nDoS += level; mode = MODE_INVALID; corruptionPossible = corruptionIn; return ret; } bool Invalid(bool ret = false) { return DoS(0, ret); } bool Error() { mode = MODE_ERROR; return false; } bool Abort(const std::string &msg) { AbortNode(msg); return Error(); } bool IsValid() { return mode == MODE_VALID; } bool IsInvalid() { return mode == MODE_INVALID; } bool IsError() { return mode == MODE_ERROR; } bool IsInvalid(int &nDoSOut) { if (IsInvalid()) { nDoSOut = nDoS; return true; } return false; } bool CorruptionPossible() { return corruptionPossible; } }; /** Describes a place in the block chain to another node such that if the * other node doesn't have the same branch, it can find a recent common trunk. * The further back it is, the further before the fork it may be. */ class CBlockLocator { protected: std::vector<uint256> vHave; public: CBlockLocator() { } explicit CBlockLocator(const CBlockIndex* pindex) { Set(pindex); } explicit CBlockLocator(uint256 hashBlock) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock); if (mi != mapBlockIndex.end()) Set((*mi).second); } CBlockLocator(const std::vector<uint256>& vHaveIn) { vHave = vHaveIn; } IMPLEMENT_SERIALIZE ( if (!(nType & SER_GETHASH)) READWRITE(nVersion); READWRITE(vHave); ) void SetNull() { vHave.clear(); } bool IsNull() { return vHave.empty(); } void Set(const CBlockIndex* pindex) { vHave.clear(); int nStep = 1; while (pindex) { vHave.push_back(pindex->GetBlockHash()); // Exponentially larger steps back for (int i = 0; pindex && i < nStep; i++) pindex = pindex->pprev; if (vHave.size() > 10) nStep *= 2; } vHave.push_back(hashGenesisBlock); } int GetDistanceBack() { // Retrace how far back it was in the sender's branch int nDistance = 0; int nStep = 1; BOOST_FOREACH(const uint256& hash, vHave) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) return nDistance; } nDistance += nStep; if (nDistance > 10) nStep *= 2; } return nDistance; } CBlockIndex* GetBlockIndex() { // Find the first block the caller has in the main chain BOOST_FOREACH(const uint256& hash, vHave) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) return pindex; } } return pindexGenesisBlock; } uint256 GetBlockHash() { // Find the first block the caller has in the main chain BOOST_FOREACH(const uint256& hash, vHave) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) return hash; } } return hashGenesisBlock; } int GetHeight() { CBlockIndex* pindex = GetBlockIndex(); if (!pindex) return 0; return pindex->nHeight; } }; class CTxMemPool { public: mutable CCriticalSection cs; std::map<uint256, CTransaction> mapTx; std::map<COutPoint, CInPoint> mapNextTx; bool accept(CValidationState &state, CTransaction &tx, bool fCheckInputs, bool fLimitFree, bool* pfMissingInputs); bool addUnchecked(const uint256& hash, const CTransaction &tx); bool remove(const CTransaction &tx, bool fRecursive = false); bool removeConflicts(const CTransaction &tx); void clear(); void queryHashes(std::vector<uint256>& vtxid); void pruneSpent(const uint256& hash, CCoins &coins); unsigned long size() { LOCK(cs); return mapTx.size(); } bool exists(uint256 hash) { return (mapTx.count(hash) != 0); } CTransaction& lookup(uint256 hash) { return mapTx[hash]; } }; extern CTxMemPool mempool; struct CCoinsStats { int nHeight; uint256 hashBlock; uint64 nTransactions; uint64 nTransactionOutputs; uint64 nSerializedSize; uint256 hashSerialized; int64 nTotalAmount; CCoinsStats() : nHeight(0), hashBlock(0), nTransactions(0), nTransactionOutputs(0), nSerializedSize(0), hashSerialized(0), nTotalAmount(0) {} }; /** Abstract view on the open txout dataset. */ class CCoinsView { public: // Retrieve the CCoins (unspent transaction outputs) for a given txid virtual bool GetCoins(const uint256 &txid, CCoins &coins); // Modify the CCoins for a given txid virtual bool SetCoins(const uint256 &txid, const CCoins &coins); // Just check whether we have data for a given txid. // This may (but cannot always) return true for fully spent transactions virtual bool HaveCoins(const uint256 &txid); // Retrieve the block index whose state this CCoinsView currently represents virtual CBlockIndex *GetBestBlock(); // Modify the currently active block index virtual bool SetBestBlock(CBlockIndex *pindex); // Do a bulk modification (multiple SetCoins + one SetBestBlock) virtual bool BatchWrite(const std::map<uint256, CCoins> &mapCoins, CBlockIndex *pindex); // Calculate statistics about the unspent transaction output set virtual bool GetStats(CCoinsStats &stats); // As we use CCoinsViews polymorphically, have a virtual destructor virtual ~CCoinsView() {} }; /** CCoinsView backed by another CCoinsView */ class CCoinsViewBacked : public CCoinsView { protected: CCoinsView *base; public: CCoinsViewBacked(CCoinsView &viewIn); bool GetCoins(const uint256 &txid, CCoins &coins); bool SetCoins(const uint256 &txid, const CCoins &coins); bool HaveCoins(const uint256 &txid); CBlockIndex *GetBestBlock(); bool SetBestBlock(CBlockIndex *pindex); void SetBackend(CCoinsView &viewIn); bool BatchWrite(const std::map<uint256, CCoins> &mapCoins, CBlockIndex *pindex); bool GetStats(CCoinsStats &stats); }; /** CCoinsView that adds a memory cache for transactions to another CCoinsView */ class CCoinsViewCache : public CCoinsViewBacked { protected: CBlockIndex *pindexTip; std::map<uint256,CCoins> cacheCoins; public: CCoinsViewCache(CCoinsView &baseIn, bool fDummy = false); // Standard CCoinsView methods bool GetCoins(const uint256 &txid, CCoins &coins); bool SetCoins(const uint256 &txid, const CCoins &coins); bool HaveCoins(const uint256 &txid); CBlockIndex *GetBestBlock(); bool SetBestBlock(CBlockIndex *pindex); bool BatchWrite(const std::map<uint256, CCoins> &mapCoins, CBlockIndex *pindex); // Return a modifiable reference to a CCoins. Check HaveCoins first. // Many methods explicitly require a CCoinsViewCache because of this method, to reduce // copying. CCoins &GetCoins(const uint256 &txid); // Push the modifications applied to this cache to its base. // Failure to call this method before destruction will cause the changes to be forgotten. bool Flush(); // Calculate the size of the cache (in number of transactions) unsigned int GetCacheSize(); private: std::map<uint256,CCoins>::iterator FetchCoins(const uint256 &txid); }; /** CCoinsView that brings transactions from a memorypool into view. It does not check for spendings by memory pool transactions. */ class CCoinsViewMemPool : public CCoinsViewBacked { protected: CTxMemPool &mempool; public: CCoinsViewMemPool(CCoinsView &baseIn, CTxMemPool &mempoolIn); bool GetCoins(const uint256 &txid, CCoins &coins); bool HaveCoins(const uint256 &txid); }; /** Global variable that points to the active CCoinsView (protected by cs_main) */ extern CCoinsViewCache *pcoinsTip; /** Global variable that points to the active block tree (protected by cs_main) */ extern CBlockTreeDB *pblocktree; struct CBlockTemplate { CBlock block; std::vector<int64_t> vTxFees; std::vector<int64_t> vTxSigOps; }; #if defined(_M_IX86) || defined(__i386__) || defined(__i386) || defined(_M_X64) || defined(__x86_64__) || defined(_M_AMD64) extern unsigned int cpuid_edx; #endif /** Used to relay blocks as header + vector<merkle branch> * to filtered nodes. */ class CMerkleBlock { public: // Public only for unit testing CBlockHeader header; CPartialMerkleTree txn; public: // Public only for unit testing and relay testing // (not relayed) std::vector<std::pair<unsigned int, uint256> > vMatchedTxn; // Create from a CBlock, filtering transactions according to filter // Note that this will call IsRelevantAndUpdate on the filter for each transaction, // thus the filter will likely be modified. CMerkleBlock(const CBlock& block, CBloomFilter& filter); IMPLEMENT_SERIALIZE ( READWRITE(header); READWRITE(txn); ) }; #endif
{ "content_hash": "9edf9c2774956565464b486d19456fbe", "timestamp": "", "source": "github", "line_count": 2288, "max_line_length": 239, "avg_line_length": 31.512237762237763, "alnum_prop": 0.6182108183079057, "repo_name": "ronpaulcoin/ronpaulcoin", "id": "67e03a994bad5b41b2288a860f9198e56e157d2f", "size": "72100", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "102799" }, { "name": "C++", "bytes": "2557941" }, { "name": "CSS", "bytes": "1127" }, { "name": "IDL", "bytes": "14764" }, { "name": "Objective-C", "bytes": "5864" }, { "name": "Python", "bytes": "69712" }, { "name": "Shell", "bytes": "9702" }, { "name": "TypeScript", "bytes": "5248977" } ], "symlink_target": "" }
<?php /** * PlugintdGuestbook form. * * @package tdGuestbookPlugin * @subpackage form * @author Tomasz Ducin <tomasz.ducin@gmail.com> * @version SVN: $Id: sfDoctrineFormPluginTemplate.php 23810 2009-11-12 11:07:44Z Kris.Wallsmith $ */ abstract class PlugintdGuestbookForm extends BasetdGuestbookForm { public function setup() { parent::setup(); $this->removeFields(); $this->manageWidgets(); $this->manageValidators(); // $this->manageCaptcha(); } protected function removeFields() { unset($this['created_at'], $this['updated_at']); } protected function manageWidgets() { $this->setWidget('author', new sfWidgetFormInputText(array(), array('size' => '30'))); $this->setWidget('email', new sfWidgetFormInputText(array(), array('size' => '30'))); $this->setWidget('http', new sfWidgetFormInputText(array(), array('size' => '30'))); $this->setWidget('text', new sfWidgetFormTextarea(array(), array('cols' => '80', 'rows' => '8'))); } protected function manageValidators() { $this->setValidator('author', new sfValidatorString(array(), array('required' => 'Musisz podać autora.'))); $this->setValidator('text', new sfValidatorString(array(), array('required' => 'Musisz podać treść.'))); $this->setValidator('email', new sfValidatorEmail(array('required' => false), array('invalid' => 'Wpisz poprawny adres E-mail.'))); } protected function manageCaptcha() { $this->setWidget('captcha', new sfWidgetFormInput(array(), array('size' => '30'))); $this->widgetSchema->setLabel('captcha', 'Wpisz kod z obrazka'); $this->setValidator('captcha', new sfValidatorSfCryptoCaptcha( array('required' => true, 'trim' => true), array('wrong_captcha' => 'Kod który wpisałeś jest niepoprawny.', 'required' => 'Musisz wpisać kod z obrazka poniżej.'))); $this->errorSchema = new sfValidatorErrorSchema($this->validatorSchema); } }
{ "content_hash": "c245179b9de5d083120869564a51cd20", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 108, "avg_line_length": 30.921875, "alnum_prop": 0.6523496715512885, "repo_name": "Symfony-Plugins/tdGuestbookPlugin", "id": "02601fc66e42abd4008231d16229927385582eeb", "size": "1988", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/form/doctrine/PlugintdGuestbookForm.class.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "19819" } ], "symlink_target": "" }
""" Created on Tue May 23 12:08:24 2017 @author: ibackus """ import os _directory = os.path.dirname(os.path.realpath(__file__)) execfile(os.path.join(_directory, '_default_config.py')) execfile(os.path.join(_directory, '_user_config.py')) # ------------------------------------------- # Setup # ------------------------------------------- # Set-up the icdirs ICBaseDir = os.path.join(_directory, ICBaseDir) for k, v in icdirs.iteritems(): icdirs[k] = os.path.join(ICBaseDir, v) # Keep a list of test names tests = icdirs.keys()
{ "content_hash": "475c701b0f56b86cc4bcd135dfaa408c", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 56, "avg_line_length": 28.105263157894736, "alnum_prop": 0.5786516853932584, "repo_name": "ibackus/compare-changa-builds", "id": "64c4fccaafa3e1f191a6bb5fa3b2b0d6b8e4cbf3", "size": "581", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "compchanga/config.py", "mode": "33188", "license": "mit", "language": [ { "name": "M4", "bytes": "17526" }, { "name": "Makefile", "bytes": "11679" }, { "name": "Python", "bytes": "71735" }, { "name": "Shell", "bytes": "29" } ], "symlink_target": "" }
package org.spanna.event.block; import org.spanna.Location; import org.spanna.block.Block; import org.v.block.BlockFace; import org.spanna.event.HandlerList; /** * Called when a piston retracts */ public class BlockPistonRetractEvent extends BlockPistonEvent { private static final HandlerList handlers = new HandlerList(); public BlockPistonRetractEvent(final Block block, final BlockFace direction) { super(block, direction); } /** * Gets the location where the possible moving block might be if the * retracting piston is sticky. * * @return The possible location of the possibly moving block. */ public Location getRetractLocation() { return getBlock().getRelative(getDirection(), 2).getLocation(); } @Override public HandlerList getHandlers() { return handlers; } public static HandlerList getHandlerList() { return handlers; } }
{ "content_hash": "96e5ea1046438d4d879f905e7e6cfdf4", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 82, "avg_line_length": 26.942857142857143, "alnum_prop": 0.6967126193001061, "repo_name": "SpannaProject/SpannaAPI", "id": "224d5be999ef5ebb702496c3858a51598d714ac6", "size": "943", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/org/spanna/event/block/BlockPistonRetractEvent.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1831512" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="generator" content="rustdoc"> <meta name="description" content="API documentation for the Rust `wopen` fn in crate `libc`."> <meta name="keywords" content="rust, rustlang, rust-lang, wopen"> <title>libc::funcs::posix88::fcntl::wopen - Rust</title> <link rel="stylesheet" type="text/css" href="../../../../main.css"> <link rel="shortcut icon" href="http://www.rust-lang.org/favicon.ico"> </head> <body class="rustdoc"> <!--[if lte IE 8]> <div class="warning"> This old browser is unsupported and will most likely display funky things. </div> <![endif]--> <section class="sidebar"> <a href='../../../../libc/index.html'><img src='http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png' alt='' width='100'></a> <p class='location'><a href='../../../index.html'>libc</a>::<wbr><a href='../../index.html'>funcs</a>::<wbr><a href='../index.html'>posix88</a>::<wbr><a href='index.html'>fcntl</a></p><script>window.sidebarCurrent = {name: 'wopen', ty: 'fn', relpath: ''};</script><script defer src="sidebar-items.js"></script> </section> <nav class="sub"> <form class="search-form js-only"> <div class="search-container"> <input class="search-input" name="search" autocomplete="off" placeholder="Click or press 'S' to search, '?' for more options..." type="search"> </div> </form> </nav> <section id='main' class="content fn"> <h1 class='fqn'><span class='in-band'><a href='../../../index.html'>libc</a>::<wbr><a href='../../index.html'>funcs</a>::<wbr><a href='../index.html'>posix88</a>::<wbr><a href='index.html'>fcntl</a>::<wbr><a class='fn' href=''>wopen</a><wbr><a class='stability Unstable' title=''>Unstable</a></span><span class='out-of-band'><span id='render-detail'> <a id="collapse-all" href="#">[-]</a>&nbsp;<a id="expand-all" href="#">[+]</a> </span><a id='src-2719' href='../../../../src/libc/lib.rs.html#4866-4867'>[src]</a></span></h1> <pre class='rust fn'>pub unsafe fn wopen(path: *const <a class='type' href='../../../../libc/types/os/arch/c95/type.wchar_t.html' title='libc::types::os::arch::c95::wchar_t'>wchar_t</a>, oflag: <a class='type' href='../../../../libc/types/os/arch/c95/type.c_int.html' title='libc::types::os::arch::c95::c_int'>c_int</a>, mode: <a class='type' href='../../../../libc/types/os/arch/c95/type.c_int.html' title='libc::types::os::arch::c95::c_int'>c_int</a>) -&gt; <a class='type' href='../../../../libc/types/os/arch/c95/type.c_int.html' title='libc::types::os::arch::c95::c_int'>c_int</a></pre></section> <section id='search' class="content hidden"></section> <section class="footer"></section> <div id="help" class="hidden"> <div class="shortcuts"> <h1>Keyboard shortcuts</h1> <dl> <dt>?</dt> <dd>Show this help dialog</dd> <dt>S</dt> <dd>Focus the search field</dd> <dt>&larrb;</dt> <dd>Move up in search results</dd> <dt>&rarrb;</dt> <dd>Move down in search results</dd> <dt>&#9166;</dt> <dd>Go to active search result</dd> </dl> </div> <div class="infos"> <h1>Search tricks</h1> <p> Prefix searches with a type followed by a colon (e.g. <code>fn:</code>) to restrict the search to a given type. </p> <p> Accepted types are: <code>fn</code>, <code>mod</code>, <code>struct</code>, <code>enum</code>, <code>trait</code>, <code>typedef</code> (or <code>tdef</code>). </p> </div> </div> <script> window.rootPath = "../../../../"; window.currentCrate = "libc"; window.playgroundUrl = "http://play.rust-lang.org/"; </script> <script src="../../../../jquery.js"></script> <script src="../../../../main.js"></script> <script src="../../../../playpen.js"></script> <script async src="../../../../search-index.js"></script> </body> </html>
{ "content_hash": "a50e9f44ea5c3ccba35106b980cfd5c9", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 601, "avg_line_length": 46.78947368421053, "alnum_prop": 0.5329583802024747, "repo_name": "ArcherSys/ArcherSys", "id": "c549e3f9f8f50be1305156e64dc80dbd4a4ad77a", "size": "4445", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Rust/share/doc/rust/html/libc/funcs/posix88/fcntl/fn.wopen.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
"""Parsing of gitignore files. For details for the matching rules, see https://git-scm.com/docs/gitignore """ import os.path import re from typing import ( BinaryIO, Iterable, List, Optional, TYPE_CHECKING, Dict, Union, ) if TYPE_CHECKING: from dulwich.repo import Repo from dulwich.config import get_xdg_config_home_path, Config def _translate_segment(segment: bytes) -> bytes: if segment == b"*": return b"[^/]+" res = b"" i, n = 0, len(segment) while i < n: c = segment[i : i + 1] i = i + 1 if c == b"*": res += b"[^/]*" elif c == b"?": res += b"[^/]" elif c == b"[": j = i if j < n and segment[j : j + 1] == b"!": j = j + 1 if j < n and segment[j : j + 1] == b"]": j = j + 1 while j < n and segment[j : j + 1] != b"]": j = j + 1 if j >= n: res += b"\\[" else: stuff = segment[i:j].replace(b"\\", b"\\\\") i = j + 1 if stuff.startswith(b"!"): stuff = b"^" + stuff[1:] elif stuff.startswith(b"^"): stuff = b"\\" + stuff res += b"[" + stuff + b"]" else: res += re.escape(c) return res def translate(pat: bytes) -> bytes: """Translate a shell PATTERN to a regular expression. There is no way to quote meta-characters. Originally copied from fnmatch in Python 2.7, but modified for Dulwich to cope with features in Git ignore patterns. """ res = b"(?ms)" if b"/" not in pat[:-1]: # If there's no slash, this is a filename-based match res += b"(.*/)?" if pat.startswith(b"**/"): # Leading **/ pat = pat[2:] res += b"(.*/)?" if pat.startswith(b"/"): pat = pat[1:] for i, segment in enumerate(pat.split(b"/")): if segment == b"**": res += b"(/.*)?" continue else: res += (re.escape(b"/") if i > 0 else b"") + _translate_segment(segment) if not pat.endswith(b"/"): res += b"/?" return res + b"\\Z" def read_ignore_patterns(f: BinaryIO) -> Iterable[bytes]: """Read a git ignore file. Args: f: File-like object to read from Returns: List of patterns """ for line in f: line = line.rstrip(b"\r\n") # Ignore blank lines, they're used for readability. if not line.strip(): continue if line.startswith(b"#"): # Comment continue # Trailing spaces are ignored unless they are quoted with a backslash. while line.endswith(b" ") and not line.endswith(b"\\ "): line = line[:-1] line = line.replace(b"\\ ", b" ") yield line def match_pattern(path: bytes, pattern: bytes, ignorecase: bool = False) -> bool: """Match a gitignore-style pattern against a path. Args: path: Path to match pattern: Pattern to match ignorecase: Whether to do case-sensitive matching Returns: bool indicating whether the pattern matched """ return Pattern(pattern, ignorecase).match(path) class Pattern(object): """A single ignore pattern.""" def __init__(self, pattern: bytes, ignorecase: bool = False): self.pattern = pattern self.ignorecase = ignorecase if pattern[0:1] == b"!": self.is_exclude = False pattern = pattern[1:] else: if pattern[0:1] == b"\\": pattern = pattern[1:] self.is_exclude = True flags = 0 if self.ignorecase: flags = re.IGNORECASE self._re = re.compile(translate(pattern), flags) def __bytes__(self) -> bytes: return self.pattern def __str__(self) -> str: return os.fsdecode(self.pattern) def __eq__(self, other: object) -> bool: return ( isinstance(other, type(self)) and self.pattern == other.pattern and self.ignorecase == other.ignorecase ) def __repr__(self) -> str: return "%s(%r, %r)" % ( type(self).__name__, self.pattern, self.ignorecase, ) def match(self, path: bytes) -> bool: """Try to match a path against this ignore pattern. Args: path: Path to match (relative to ignore location) Returns: boolean """ return bool(self._re.match(path)) class IgnoreFilter(object): def __init__(self, patterns: Iterable[bytes], ignorecase: bool = False, path=None): self._patterns = [] # type: List[Pattern] self._ignorecase = ignorecase self._path = path for pattern in patterns: self.append_pattern(pattern) def append_pattern(self, pattern: bytes) -> None: """Add a pattern to the set.""" self._patterns.append(Pattern(pattern, self._ignorecase)) def find_matching(self, path: Union[bytes, str]) -> Iterable[Pattern]: """Yield all matching patterns for path. Args: path: Path to match Returns: Iterator over iterators """ if not isinstance(path, bytes): path = os.fsencode(path) for pattern in self._patterns: if pattern.match(path): yield pattern def is_ignored(self, path: bytes) -> Optional[bool]: """Check whether a path is ignored. For directories, include a trailing slash. Returns: status is None if file is not mentioned, True if it is included, False if it is explicitly excluded. """ status = None for pattern in self.find_matching(path): status = pattern.is_exclude return status @classmethod def from_path(cls, path, ignorecase: bool = False) -> "IgnoreFilter": with open(path, "rb") as f: return cls(read_ignore_patterns(f), ignorecase, path=path) def __repr__(self) -> str: path = getattr(self, "_path", None) if path is not None: return "%s.from_path(%r)" % (type(self).__name__, path) else: return "<%s>" % (type(self).__name__) class IgnoreFilterStack(object): """Check for ignore status in multiple filters.""" def __init__(self, filters): self._filters = filters def is_ignored(self, path: str) -> Optional[bool]: """Check whether a path is explicitly included or excluded in ignores. Args: path: Path to check Returns: None if the file is not mentioned, True if it is included, False if it is explicitly excluded. """ status = None for filter in self._filters: status = filter.is_ignored(path) if status is not None: return status return status def default_user_ignore_filter_path(config: Config) -> str: """Return default user ignore filter path. Args: config: A Config object Returns: Path to a global ignore file """ try: return config.get((b"core",), b"excludesFile") except KeyError: pass return get_xdg_config_home_path("git", "ignore") class IgnoreFilterManager(object): """Ignore file manager.""" def __init__( self, top_path: str, global_filters: List[IgnoreFilter], ignorecase: bool, ): self._path_filters = {} # type: Dict[str, Optional[IgnoreFilter]] self._top_path = top_path self._global_filters = global_filters self._ignorecase = ignorecase def __repr__(self) -> str: return "%s(%s, %r, %r)" % ( type(self).__name__, self._top_path, self._global_filters, self._ignorecase, ) def _load_path(self, path: str) -> Optional[IgnoreFilter]: try: return self._path_filters[path] except KeyError: pass p = os.path.join(self._top_path, path, ".gitignore") try: self._path_filters[path] = IgnoreFilter.from_path(p, self._ignorecase) except IOError: self._path_filters[path] = None return self._path_filters[path] def find_matching(self, path: str) -> Iterable[Pattern]: """Find matching patterns for path. Args: path: Path to check Returns: Iterator over Pattern instances """ if os.path.isabs(path): raise ValueError("%s is an absolute path" % path) filters = [(0, f) for f in self._global_filters] if os.path.sep != "/": path = path.replace(os.path.sep, "/") parts = path.split("/") matches = [] for i in range(len(parts) + 1): dirname = "/".join(parts[:i]) for s, f in filters: relpath = "/".join(parts[s:i]) if i < len(parts): # Paths leading up to the final part are all directories, # so need a trailing slash. relpath += "/" matches += list(f.find_matching(relpath)) ignore_filter = self._load_path(dirname) if ignore_filter is not None: filters.insert(0, (i, ignore_filter)) return iter(matches) def is_ignored(self, path: str) -> Optional[bool]: """Check whether a path is explicitly included or excluded in ignores. Args: path: Path to check Returns: None if the file is not mentioned, True if it is included, False if it is explicitly excluded. """ matches = list(self.find_matching(path)) if matches: return matches[-1].is_exclude return None @classmethod def from_repo(cls, repo: "Repo") -> "IgnoreFilterManager": """Create a IgnoreFilterManager from a repository. Args: repo: Repository object Returns: A `IgnoreFilterManager` object """ global_filters = [] for p in [ os.path.join(repo.controldir(), "info", "exclude"), default_user_ignore_filter_path(repo.get_config_stack()), ]: try: global_filters.append(IgnoreFilter.from_path(os.path.expanduser(p))) except IOError: pass config = repo.get_config_stack() ignorecase = config.get_boolean((b"core"), (b"ignorecase"), False) return cls(repo.path, global_filters, ignorecase)
{ "content_hash": "e2cdf3d09bcda397210d61e25fb4f0f5", "timestamp": "", "source": "github", "line_count": 372, "max_line_length": 87, "avg_line_length": 28.967741935483872, "alnum_prop": 0.5319227913882703, "repo_name": "sonntagsgesicht/regtest", "id": "b75560f35c84987f5fe9a4ab835faf773f428756", "size": "11688", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": ".aux/venv/lib/python3.9/site-packages/dulwich/ignore.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "13888" } ], "symlink_target": "" }
package com.cache2.manager; import java.util.Map; public interface CacheManager<C extends Map<?, ?>> { /** * Get a cache from the map. If the cache is not found, it will create a new * one and put it in the map. * * @param name * @return cache */ C getCache(String name); /** * Put a cache into the map. * * @param cache */ C putCache(String name, C cache); /** * Remove a cache from the map. * * @param name */ void removeCache(String name); }
{ "content_hash": "a055ae523418440804d60c75dbd6b845", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 77, "avg_line_length": 16.266666666666666, "alnum_prop": 0.6168032786885246, "repo_name": "matthewlsawyer/cache2", "id": "4375aa1eb1bbc322c61e5070352a62cfd6127548", "size": "488", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/cache2/manager/CacheManager.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "30006" } ], "symlink_target": "" }
<?php class SiteHelper { public static function selectSites ( $extra = "", $extraTables = "" ) { $connection = Connection::getInstance(); $retrieveSitesSql = "SELECT site_id FROM core_sites" . $extraTables . " WHERE site_state <> 'D' " . $extra; return $connection->query($retrieveSitesSql); } public static function retrieveSites ( $extra = "", $extraTables = "" ) { $sites = array(); $retrieveSitesResult = self::selectSites ( $extra, $extraTables ); while($siteRow = mysql_fetch_assoc($retrieveSitesResult["query"])) $sites[] = new Site($siteRow["site_id"]); return $sites; } public static function dumpAllSites(&$sites, $module) { if (count($sites) > 0) { echo '<ul>'; foreach ($sites as $site) { $url = 'index.php?site_expand.control/'.$module.'/'.$site->__get('site_id'); $filter = " AND site_parent = ".$site->__get('site_id'); $next_sites = SiteHelper::retrieveSites($filter); $image = (count($next_sites) > 0) ? "imgcontrol/ico_tree.gif" : "imgcontrol/ico_page.gif"; echo " <li><a href=\"".$url."\" class=\"sidebar_02\"> <img src=\"".$image."\" /> <span>".$site->__get('site_name')."</span> </a>"; if (count($next_sites) > 0) self::dumpAllSites($next_sites, $module); echo "</li>"; } echo '</ul>'; } } } ?>
{ "content_hash": "fa83f751c68aa015429b7ac18a9470a2", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 95, "avg_line_length": 15.747252747252746, "alnum_prop": 0.5450104675505931, "repo_name": "hugomarin/artBO", "id": "4117e30c39928cda62018e96837c40d74140584f", "size": "1433", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "en/model/helpers/sitehelper.helper.php", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "124248" }, { "name": "ColdFusion", "bytes": "333388" }, { "name": "JavaScript", "bytes": "4044652" }, { "name": "Lasso", "bytes": "66388" }, { "name": "PHP", "bytes": "1512402" }, { "name": "Perl", "bytes": "74380" }, { "name": "Python", "bytes": "83098" }, { "name": "Ruby", "bytes": "887" }, { "name": "Shell", "bytes": "3304" } ], "symlink_target": "" }
package com.bitdubai.fermat_cbp_api.layer.cbp_sub_app_module.crypto_customer.exceptions; import com.bitdubai.fermat_api.FermatException; /** * Created by eze on 2015.07.30.. */ public class CantSolveRequestLaterException extends FermatException { /** * This is the constructor that every inherited FermatException must implement * * @param message the short description of the why this exception happened, there is a public static constant called DEFAULT_MESSAGE that can be used here * @param cause the exception that triggered the throwing of the current exception, if there are no other exceptions to be declared here, the cause should be null * @param context a String that provides the values of the variables that could have affected the exception * @param possibleReason an explicative reason of why we believe this exception was most likely thrown */ public CantSolveRequestLaterException(String message, Exception cause, String context, String possibleReason) { super(message, cause, context, possibleReason); } }
{ "content_hash": "cad525594416a423938db8ebf752c558", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 175, "avg_line_length": 55.25, "alnum_prop": 0.7475113122171946, "repo_name": "fvasquezjatar/fermat-unused", "id": "e63df4a958889910f2f81b5690357de9b403ad6d", "size": "1105", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "fermat-cbp-api/src/main/java/com/bitdubai/fermat_cbp_api/layer/cbp_sub_app_module/crypto_customer/exceptions/CantSolveRequestLaterException.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1204" }, { "name": "Groovy", "bytes": "76309" }, { "name": "HTML", "bytes": "322840" }, { "name": "Java", "bytes": "14027288" }, { "name": "Scala", "bytes": "1353" } ], "symlink_target": "" }
<html lang="en"> <head> <title>Function-like Macros - The C Preprocessor</title> <meta http-equiv="Content-Type" content="text/html"> <meta name="description" content="The C Preprocessor"> <meta name="generator" content="makeinfo 4.11"> <link title="Top" rel="start" href="index.html#Top"> <link rel="up" href="Macros.html#Macros" title="Macros"> <link rel="prev" href="Object_002dlike-Macros.html#Object_002dlike-Macros" title="Object-like Macros"> <link rel="next" href="Macro-Arguments.html#Macro-Arguments" title="Macro Arguments"> <link href="http://www.gnu.org/software/texinfo/" rel="generator-home" title="Texinfo Homepage"> <!-- Copyright (C) 1987-2014 Free Software Foundation, Inc. Permission is granted to copy, distribute and/or modify this document under the terms of the GNU Free Documentation License, Version 1.3 or any later version published by the Free Software Foundation. A copy of the license is included in the section entitled ``GNU Free Documentation License''. This manual contains no Invariant Sections. The Front-Cover Texts are (a) (see below), and the Back-Cover Texts are (b) (see below). (a) The FSF's Front-Cover Text is: A GNU Manual (b) The FSF's Back-Cover Text is: You have freedom to copy and modify this GNU Manual, like GNU software. Copies published by the Free Software Foundation raise funds for GNU development. --> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"><!-- pre.display { font-family:inherit } pre.format { font-family:inherit } pre.smalldisplay { font-family:inherit; font-size:smaller } pre.smallformat { font-family:inherit; font-size:smaller } pre.smallexample { font-size:smaller } pre.smalllisp { font-size:smaller } span.sc { font-variant:small-caps } span.roman { font-family:serif; font-weight:normal; } span.sansserif { font-family:sans-serif; font-weight:normal; } --></style> </head> <body> <div class="node"> <p> <a name="Function-like-Macros"></a> <a name="Function_002dlike-Macros"></a> Next:&nbsp;<a rel="next" accesskey="n" href="Macro-Arguments.html#Macro-Arguments">Macro Arguments</a>, Previous:&nbsp;<a rel="previous" accesskey="p" href="Object_002dlike-Macros.html#Object_002dlike-Macros">Object-like Macros</a>, Up:&nbsp;<a rel="up" accesskey="u" href="Macros.html#Macros">Macros</a> <hr> </div> <h3 class="section">3.2 Function-like Macros</h3> <p><a name="index-function_002dlike-macros-45"></a> You can also define macros whose use looks like a function call. These are called <dfn>function-like macros</dfn>. To define a function-like macro, you use the same &lsquo;<samp><span class="samp">#define</span></samp>&rsquo; directive, but you put a pair of parentheses immediately after the macro name. For example, <pre class="smallexample"> #define lang_init() c_init() lang_init() ==&gt; c_init() </pre> <p>A function-like macro is only expanded if its name appears with a pair of parentheses after it. If you write just the name, it is left alone. This can be useful when you have a function and a macro of the same name, and you wish to use the function sometimes. <pre class="smallexample"> extern void foo(void); #define foo() /* <span class="roman">optimized inline version</span> */ ... foo(); funcptr = foo; </pre> <p>Here the call to <code>foo()</code> will use the macro, but the function pointer will get the address of the real function. If the macro were to be expanded, it would cause a syntax error. <p>If you put spaces between the macro name and the parentheses in the macro definition, that does not define a function-like macro, it defines an object-like macro whose expansion happens to begin with a pair of parentheses. <pre class="smallexample"> #define lang_init () c_init() lang_init() ==&gt; () c_init()() </pre> <p>The first two pairs of parentheses in this expansion come from the macro. The third is the pair that was originally after the macro invocation. Since <code>lang_init</code> is an object-like macro, it does not consume those parentheses. </body></html>
{ "content_hash": "db8e715fccfb037a9684742b65b74504", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 128, "avg_line_length": 41.65, "alnum_prop": 0.714765906362545, "repo_name": "trfiladelfo/tdk", "id": "ba36c1f615f01821206bd68f3f8d51780b96cb99", "size": "4165", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "gcc-arm-none-eabi/share/doc/gcc-arm-none-eabi/html/cpp/Function_002dlike-Macros.html", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "614531" }, { "name": "Batchfile", "bytes": "101839" }, { "name": "C", "bytes": "12540389" }, { "name": "C++", "bytes": "13332391" }, { "name": "CSS", "bytes": "140569" }, { "name": "HTML", "bytes": "23954553" }, { "name": "Logos", "bytes": "8877" }, { "name": "Makefile", "bytes": "129672" }, { "name": "Perl", "bytes": "9844" }, { "name": "Python", "bytes": "180880" }, { "name": "Scheme", "bytes": "3970" }, { "name": "Shell", "bytes": "10777" }, { "name": "Tcl", "bytes": "128365" }, { "name": "XC", "bytes": "8384" }, { "name": "XS", "bytes": "8334" }, { "name": "XSLT", "bytes": "221100" } ], "symlink_target": "" }
import sys import deproxy import unittest import threading import logging import socket import argparse import time deproxy_port_base = 9999 deproxy_port_iter = None def get_next_deproxy_port(): global deproxy_port_iter if deproxy_port_iter is None: def deproxy_port_iter_func(): for i in xrange(deproxy_port_base): yield deproxy_port_base - i deproxy_port_iter = deproxy_port_iter_func().next return deproxy_port_iter() class TestDefaultHandler(unittest.TestCase): def setUp(self): self.deproxy_port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.end_point = self.deproxy.add_endpoint(self.deproxy_port) def tearDown(self): self.deproxy.shutdown_all_endpoints() def test_default_handler(self): mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port) self.assertEquals(int(mc.received_response.code), 200) class TestEchoHandler(unittest.TestCase): def setUp(self): self.deproxy_port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.end_point = self.deproxy.add_endpoint(self.deproxy_port) def tearDown(self): self.deproxy.shutdown_all_endpoints() def test_echo_handler(self): headers = {'x-header': '12345'} mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port, headers=headers, request_body='this is the body', default_handler=deproxy.echo_handler) self.assertEquals(int(mc.received_response.code), 200) self.assertIn('x-header', mc.received_response.headers) self.assertEquals(mc.received_response.headers['x-header'], '12345') self.assertEquals(mc.received_response.body, 'this is the body') class TestDelayHandler(unittest.TestCase): def setUp(self): self.deproxy_port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.end_point = self.deproxy.add_endpoint(self.deproxy_port) def tearDown(self): self.deproxy.shutdown_all_endpoints() def test_delay_handler(self): handler = deproxy.delay(3, deproxy.simple_handler) t1 = time.time() mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port, default_handler=handler) t2 = time.time() self.assertEquals(int(mc.received_response.code), 200) self.assertGreaterEqual(t2 - t1, 3) self.assertLessEqual(t2 - t1, 3.5) class TestRoute(unittest.TestCase): def setUp(self): self.deproxy_port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.end_point = self.deproxy.add_endpoint(self.deproxy_port) def tearDown(self): self.deproxy.shutdown_all_endpoints() def test_route(self): handler = deproxy.route('http', 'httpbin.org', self.deproxy) mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port, default_handler=handler) self.assertEquals(int(mc.received_response.code), 200) class TestCustomHandlers(unittest.TestCase): def setUp(self): self.deproxy_port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.end_point = self.deproxy.add_endpoint(self.deproxy_port) def tearDown(self): self.deproxy.shutdown_all_endpoints() def test_custom_handler_function(self): def custom_handler(request): return deproxy.Response(code=606, message="Spoiler", headers={"Header-Name": "Header-Value"}, body='Snape Kills Dumbledore') mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port, default_handler=custom_handler) self.assertEquals(int(mc.received_response.code), 606) def handler_method(self, request): return deproxy.Response(code=606, message="Spoiler", headers={"Header-Name": "Header-Value"}, body='Snape Kills Dumbledore') def test_custom_handler_method(self): mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port, default_handler=self.handler_method) self.assertEquals(int(mc.received_response.code), 606) class TestEndpointDefaultHandler(unittest.TestCase): def setUp(self): self.port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() def test_endpoint_default_handler_function(self): def custom_handler(request): return deproxy.Response(code='601', message='Custom', headers={}, body=None) self.deproxy.add_endpoint(port=self.port, default_handler=custom_handler) url = 'http://localhost:{0}/'.format(self.port) mc = self.deproxy.make_request(url=url) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '601') self.assertEqual(mc.received_response.code, '601') def custom_handler_method(self, request): return deproxy.Response(code='602', message='Custom', headers={}, body=None) def test_endpoint_default_handler_method(self): self.deproxy.add_endpoint(port=self.port, default_handler=self.custom_handler_method) url = 'http://localhost:{0}/'.format(self.port) mc = self.deproxy.make_request(url=url) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '602') self.assertEqual(mc.received_response.code, '602') def tearDown(self): self.deproxy.shutdown_all_endpoints() class TestDeproxyDefaultHandler(unittest.TestCase): def setUp(self): self.port = get_next_deproxy_port() def test_deproxy_default_handler_function(self): def custom_handler(request): return deproxy.Response(code='603', message='Custom', headers={}, body=None) self.deproxy = deproxy.Deproxy(default_handler=custom_handler) self.deproxy.add_endpoint(port=self.port) url = 'http://localhost:{0}/'.format(self.port) mc = self.deproxy.make_request(url=url) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '603') self.assertEqual(mc.received_response.code, '603') def custom_handler_method(self, request): return deproxy.Response(code='604', message='Custom', headers={}, body=None) def test_deproxy_default_handler_method(self): self.deproxy = deproxy.Deproxy( default_handler=self.custom_handler_method) self.deproxy.add_endpoint(port=self.port) url = 'http://localhost:{0}/'.format(self.port) mc = self.deproxy.make_request(url=url) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '604') self.assertEqual(mc.received_response.code, '604') def tearDown(self): if hasattr(self, 'deproxy') and self.deproxy is not None: self.deproxy.shutdown_all_endpoints() class TestOrphanedHandlings(unittest.TestCase): def setUp(self): self.deproxy_port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.end_point = self.deproxy.add_endpoint(self.deproxy_port) self.other_client = deproxy.Deproxy() def tearDown(self): self.deproxy.shutdown_all_endpoints() def test_orphaned_handling(self): delayed_handler = deproxy.delay(2, deproxy.simple_handler) self.long_running_mc = None class Helper: mc = None helper = Helper() def other_thread(): mc = self.deproxy.make_request('http://localhost:%i/' % self.deproxy_port, default_handler=delayed_handler) helper.mc = mc t = threading.Thread(target=other_thread) t.daemon = True t.start() self.other_client.make_request('http://localhost:%i/' % self.deproxy_port) t.join() self.assertEqual(len(helper.mc.orphaned_handlings), 1) class TestEndpointShutdown(unittest.TestCase): def setUp(self): self.deproxy_port1 = get_next_deproxy_port() self.deproxy_port2 = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() def test_shutdown(self): e1 = self.deproxy.add_endpoint(self.deproxy_port1) e2 = self.deproxy.add_endpoint(self.deproxy_port2) e1.shutdown() try: e3 = self.deproxy.add_endpoint(self.deproxy_port1) except socket.error as e: self.fail('Address already in use: %s' % e) class TestShutdownAllEndpoints(unittest.TestCase): def setUp(self): self.deproxy_port1 = get_next_deproxy_port() self.deproxy_port2 = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() def test_shutdown(self): e1 = self.deproxy.add_endpoint(self.deproxy_port1) e2 = self.deproxy.add_endpoint(self.deproxy_port2) self.deproxy.shutdown_all_endpoints() try: e3 = self.deproxy.add_endpoint(self.deproxy_port1) except socket.error as e: self.fail('add_endpoint through an exception: %s' % e) try: e4 = self.deproxy.add_endpoint(self.deproxy_port2) except socket.error as e: self.fail('add_endpoint through an exception: %s' % e) class TestAutomaticRequestHeaders(unittest.TestCase): def setUp(self): self.port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.endpoint = self.deproxy.add_endpoint(self.port) self.url = 'http://localhost:{}/'.format(self.port) def tearDown(self): if self.deproxy is not None: self.deproxy.shutdown_all_endpoints() def test_not_specified(self): mc = self.deproxy.make_request(url=self.url) self.assertIn('Host', mc.sent_request.headers) #self.assertIn('host', mc.sent_request.headers) self.assertIn('Accept', mc.sent_request.headers) self.assertIn('Accept-Encoding', mc.sent_request.headers) self.assertIn('User-Agent', mc.sent_request.headers) def test_explicit_on(self): mc = self.deproxy.make_request(url=self.url, add_default_headers=True) self.assertIn('Host', mc.sent_request.headers) #self.assertIn('host', mc.sent_request.headers) self.assertIn('Accept', mc.sent_request.headers) self.assertIn('Accept-Encoding', mc.sent_request.headers) self.assertIn('User-Agent', mc.sent_request.headers) def test_explicit_off(self): mc = self.deproxy.make_request(url=self.url, add_default_headers=False) self.assertNotIn('Host', mc.sent_request.headers) #self.assertNotIn('host', mc.sent_request.headers) self.assertNotIn('Accept', mc.sent_request.headers) self.assertNotIn('Accept-Encoding', mc.sent_request.headers) self.assertNotIn('User-Agent', mc.sent_request.headers) class TestDefaultResponseHeaders(unittest.TestCase): @classmethod def setUpClass(self): self.port = get_next_deproxy_port() self.deproxy = deproxy.Deproxy() self.endpoint = self.deproxy.add_endpoint(self.port) self.url = 'http://localhost:{}/'.format(self.port) @classmethod def tearDownClass(self): if self.deproxy is not None: self.deproxy.shutdown_all_endpoints() def handler1(self, request): return deproxy.Response(code=606, message="Spoiler", headers={"Header-Name": "Header-Value"}, body='Snape Kills Dumbledore') def handler2(self, request): return (deproxy.Response(code=606, message="Spoiler", headers={"Header-Name": "Header-Value"}, body='Snape Kills Dumbledore'), True) def handler3(self, request): return (deproxy.Response(code=606, message="Spoiler", headers={"Header-Name": "Header-Value"}, body='Snape Kills Dumbledore'), False) def test_not_specified(self): mc = self.deproxy.make_request(url=self.url, default_handler=self.handler1) self.assertEqual(len(mc.handlings), 1) self.assertIn('server', mc.received_response.headers) self.assertIn('date', mc.received_response.headers) self.assertIn('Server', mc.handlings[0].response.headers) self.assertIn('Date', mc.handlings[0].response.headers) def test_explicit_on(self): mc = self.deproxy.make_request(url=self.url, default_handler=self.handler2) self.assertEqual(len(mc.handlings), 1) self.assertIn('server', mc.received_response.headers) self.assertIn('date', mc.received_response.headers) self.assertIn('Server', mc.handlings[0].response.headers) self.assertIn('Date', mc.handlings[0].response.headers) def test_explicit_off(self): mc = self.deproxy.make_request(url=self.url, default_handler=self.handler3) self.assertEqual(len(mc.handlings), 1) self.assertNotIn('server', mc.received_response.headers) self.assertNotIn('date', mc.received_response.headers) self.assertNotIn('server', mc.handlings[0].response.headers) self.assertNotIn('date', mc.handlings[0].response.headers) self.assertNotIn('Server', mc.received_response.headers) self.assertNotIn('Date', mc.received_response.headers) self.assertNotIn('Server', mc.handlings[0].response.headers) self.assertNotIn('Date', mc.handlings[0].response.headers) class TestHeaderCollection(unittest.TestCase): def setUp(self): self.headers = deproxy.HeaderCollection() def test_length(self): self.assertEqual(len(self.headers), 0) self.headers.add('Name', 'Value') self.assertEqual(len(self.headers), 1) def test_contains(self): self.headers.add('Name', 'Value') self.assertTrue('Name' in self.headers) def test_contains_case(self): self.headers.add('Name', 'Value') self.assertTrue('name' in self.headers) def test_assertIn_case(self): self.headers.add('Name', 'Value') self.assertIn('name', self.headers) def test_find_all(self): self.headers.add('A', 'qwerty') self.headers.add('B', 'asdf') self.headers.add('C', 'zxcv') self.headers.add('A', 'uiop') self.headers.add('A', 'jkl;') result = [value for value in self.headers.find_all('A')] self.assertEqual(result, ['qwerty', 'uiop', 'jkl;']) def test_bracket_case(self): self.headers.add('Name', 'Value') try: self.assertEqual(self.headers['name'], 'Value') except: self.fail() def test_get(self): self.headers.add('Name', 'Value') self.assertIn('name', self.headers) self.assertEqual(self.headers.get('Name'), 'Value') self.assertEqual(self.headers.get('name'), 'Value') self.assertIsNone(self.headers.get('asdf')) self.assertEqual(self.headers.get('name', default='zxcv'), 'Value') self.assertEqual(self.headers.get('asdf', default='zxcv'), 'zxcv') class TestBodies(unittest.TestCase): def setUp(self): self.deproxy = deproxy.Deproxy() self.port = get_next_deproxy_port() self.deproxy.add_endpoint(self.port) self.url = 'http://localhost:{0}/'.format(self.port) def test_request_body(self): body = """ This is the body This is the next paragraph. """ mc = self.deproxy.make_request(url=self.url, method='POST', request_body=body) self.assertEqual(mc.sent_request.body, body) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].request.body, body) def test_response_body(self): body = """ This is another body This is the next paragraph. """ def custom_handler(request): return deproxy.Response(code=200, message='OK', headers=None, body=body) mc = self.deproxy.make_request(url=self.url, default_handler=custom_handler) self.assertEqual(mc.received_response.body, body) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.body, body) @unittest.expectedFailure def test_request_body_chunked(self): self.fail() def test_response_body_chunked(self): chunked_body = "4\r\nWiki\r\n5\r\npedia\r\n0\r\n\r\n" body = "Wikipedia" def custom_handler(request): return deproxy.Response(code=200, message='OK', headers={'transfer-encoding': 'chunked'}, body=chunked_body) mc = self.deproxy.make_request(url=self.url, default_handler=custom_handler) self.assertEqual(mc.received_response.body, body) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.body, chunked_body) def tearDown(self): self.deproxy.shutdown_all_endpoints() class TestSendingHeaders(unittest.TestCase): def setUp(self): self.deproxy = deproxy.Deproxy() self.port = get_next_deproxy_port() self.deproxy.add_endpoint(self.port) self.url = 'http://localhost:{0}/'.format(self.port) def test_send_duplicate_request_headers(self): headers = deproxy.HeaderCollection() headers.add('Name', 'Value1') headers.add('Name', 'Value2') mc = self.deproxy.make_request(url=self.url, headers=headers) self.assertEqual(len(mc.handlings), 1) values = [value for value in mc.handlings[0].request.headers.find_all('Name')] self.assertEqual(values, ['Value1', 'Value2']) def test_send_duplicate_response_headers(self): def custom_handler(request): headers = deproxy.HeaderCollection() headers.add('Name', 'Value1') headers.add('Name', 'Value2') return deproxy.Response(code=200, message='OK', headers=headers, body=None) mc = self.deproxy.make_request(url=self.url, default_handler=custom_handler) self.assertEqual(len(mc.handlings), 1) values = [value for value in mc.received_response.headers.find_all('Name')] self.assertEqual(values, ['Value1', 'Value2']) def tearDown(self): self.deproxy.shutdown_all_endpoints() class TestPerEndpointHandlers(unittest.TestCase): def setUp(self): self.deproxy = deproxy.Deproxy() self.endpoint1 = self.deproxy.add_endpoint( name='test-endpoint-1', port=get_next_deproxy_port()) self.endpoint2 = self.deproxy.add_endpoint( name='test-endpoint-2', port=get_next_deproxy_port()) def custom_handler1(request): return deproxy.Response(code='605', message='Custom', headers={}, body=None) def custom_handler2(request): return deproxy.Response(code='606', message='Spoiler', headers={}, body=None) self.custom_handler1 = custom_handler1 self.custom_handler2 = custom_handler2 self.url1 = 'http://localhost:{0}/'.format(self.endpoint1.port) self.url2 = 'http://localhost:{0}/'.format(self.endpoint2.port) def test_no_handlers(self): mc = self.deproxy.make_request(url=self.url1) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '200') self.assertEqual(mc.received_response.code, '200') mc = self.deproxy.make_request(url=self.url2) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '200') self.assertEqual(mc.received_response.code, '200') def test_empty_handlers(self): mc = self.deproxy.make_request(url=self.url1, handlers={}) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '200') self.assertEqual(mc.received_response.code, '200') mc = self.deproxy.make_request(url=self.url2, handlers={}) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '200') self.assertEqual(mc.received_response.code, '200') def test_both_handlers(self): handlers = {self.endpoint1: self.custom_handler1, self.endpoint2: self.custom_handler2} mc = self.deproxy.make_request(url=self.url1, handlers=handlers) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '605') self.assertEqual(mc.received_response.code, '605') mc = self.deproxy.make_request(url=self.url2, handlers=handlers) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '606') self.assertEqual(mc.received_response.code, '606') def test_one_handler(self): handlers = {self.endpoint1: self.custom_handler1} mc = self.deproxy.make_request(url=self.url1, handlers=handlers) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '605') self.assertEqual(mc.received_response.code, '605') mc = self.deproxy.make_request(url=self.url2, handlers=handlers) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '200') self.assertEqual(mc.received_response.code, '200') def test_handlers_by_name(self): handlers = {'test-endpoint-1': self.custom_handler1, 'test-endpoint-2': self.custom_handler2} mc = self.deproxy.make_request(url=self.url1, handlers=handlers) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '605') self.assertEqual(mc.received_response.code, '605') mc = self.deproxy.make_request(url=self.url2, handlers=handlers) self.assertEqual(len(mc.handlings), 1) self.assertEqual(mc.handlings[0].response.code, '606') self.assertEqual(mc.received_response.code, '606') def tearDown(self): self.deproxy.shutdown_all_endpoints() def run(): parser = argparse.ArgumentParser() parser.add_argument('--port-base', help='The base port number to use when ' 'assigning ports to tests. Each test case uses the ' 'next lower port number than the test case before. ' 'The default is 9999.', default=9999, type=int) parser.add_argument('--print-log', action='store_true', help='Print the log.') args = parser.parse_args() if args.print_log: logging.basicConfig(level=logging.DEBUG, format=('%(asctime)s %(levelname)s:%(name)s:' '%(funcName)s:' '%(filename)s(%(lineno)d):' '%(threadName)s(%(thread)d):%(message)s')) global deproxy_port_base deproxy_port_base = args.port_base unittest.main(argv=['']) if __name__ == '__main__': run()
{ "content_hash": "a191b8bccb75a2b5cab2ae6aa336f576", "timestamp": "", "source": "github", "line_count": 627, "max_line_length": 79, "avg_line_length": 39.14194577352472, "alnum_prop": 0.6028441039850053, "repo_name": "izrik/pydeproxy", "id": "4f9b4bdd9b5a7d32fa14282472378ea4ccb2937d", "size": "24561", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test_deproxy.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "70136" } ], "symlink_target": "" }
<div class="sidebar-background"></div> <div class="sidebar"> <div class="container sidebar-sticky"> <div class="category-list"> <ul class="categories"> {% assign categories = (site.categories | sort) %} {% for category in categories %} {% assign cat_name = category[0] %} <li><a href="/#{{ cat_name }}">{{ cat_name | capitalize }}</a></li> {% endfor %} </ul> </div> <ul class="sidebar-nav"> <li class="sidebar-nav-item{% if page.title == "Home" %} active{% endif %}"> <a href="/">Home</a> </li> <!--li class="sidebar-nav-item"><a href="{{ site.github.repo }}/archive/v{{ site.version }}.zip">Download</a></li> <li class="sidebar-nav-item"><a href="{{ site.github.repo }}">GitHub project</a></li> <li class="sidebar-nav-item">Currently v{{ site.version }}</li--> </ul> <div class="sidebar-about"> <h1>{{ site.title }}</h1> </div> <p>&copy; {{ site.time | date: '%Y' }}. All rights reserved.</p> </div> </div>
{ "content_hash": "637f8968acdc3d9d45b57c4f2c70919b", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 126, "avg_line_length": 39.666666666666664, "alnum_prop": 0.473109243697479, "repo_name": "mrf00/mrf00.github.io", "id": "0c264b5dbea82a3628d38849fad65370e363f262", "size": "1190", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_includes/sidebar.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "14820" }, { "name": "HTML", "bytes": "4208" } ], "symlink_target": "" }
__author__ = 'greg' import matplotlib.pyplot as plt from matplotlib.collections import PolyCollection import numpy as np import csv import json from aggregation_api import AggregationAPI import matplotlib.cbook as cbook import sys # subject_id = int(sys.argv[1]) # minimum_users = int(sys.argv[2]) subject_id = 511723 project = AggregationAPI(348,public_panoptes_connection=True) subject_image = project.__image_setup__(subject_id) for minimum_users in [8]: print minimum_users fig, ax = plt.subplots() image_file = cbook.get_sample_data(subject_image) image = plt.imread(image_file) # fig, ax = plt.subplots() im = ax.imshow(image) all_vertices = [] with open("/tmp/348/4_ComplexAMOS/vegetation_polygons_heatmap.csv","rb") as f: polygon_reader = csv.reader(f) next(polygon_reader, None) for row in polygon_reader: if int(row[1]) == minimum_users: vertices = json.loads(row[2]) all_vertices.append(vertices) all_vertices = np.asarray(all_vertices) coll = PolyCollection(all_vertices,alpha=0.3) ax.add_collection(coll) ax.autoscale_view() plt.show()
{ "content_hash": "844fad0fc3611f54d6d965bb3630a4bc", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 82, "avg_line_length": 25.67391304347826, "alnum_prop": 0.6714648602878917, "repo_name": "zooniverse/aggregation", "id": "142caccb946f05f475663589d138ebeccc8947b4", "size": "1203", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "analysis/heatmap.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "723" }, { "name": "Python", "bytes": "2184451" }, { "name": "Scala", "bytes": "629" }, { "name": "Shell", "bytes": "190" } ], "symlink_target": "" }
package fixtures import ( "net/http" "github.com/maxbrunsfeld/counterfeiter/v6/fixtures/another_package" ) //counterfeiter:generate . EmbedsInterfaces type EmbedsInterfaces interface { http.Handler another_package.AnotherInterface InterfaceToEmbed DoThings() } type InterfaceToEmbed interface { EmbeddedMethod() string }
{ "content_hash": "302f1d1e822a476f766e6d2a15ae1f39", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 68, "avg_line_length": 16.65, "alnum_prop": 0.7987987987987988, "repo_name": "maxbrunsfeld/counterfeiter", "id": "2b66deec2994edcd23e584eb8e5c6d9a3527fc56", "size": "333", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "fixtures/embeds_interfaces.go", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "18" }, { "name": "Dockerfile", "bytes": "972" }, { "name": "Go", "bytes": "118767" }, { "name": "PowerShell", "bytes": "1084" }, { "name": "Shell", "bytes": "2611" } ], "symlink_target": "" }
#ifndef __ACTION__ #define __ACTION__ /** \brief Action to be perfomed by a Robot during a timeStep. */ struct Action { double v; //!< The desired speed of the Robot. double w; //!< The desired turning rate of the Robot. double turretAngle; //<! The desired angle of the turret. bool shooting; //<! Whether to shoot or not. }; #endif /* end of include guard: __ACTION__ */
{ "content_hash": "893e6acab1b5e171ab99da8090e06e00", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 65, "avg_line_length": 25.6875, "alnum_prop": 0.6180048661800487, "repo_name": "braak/CppRobots", "id": "3a561c34b0c7bc391ac44998e36042dd53beaaba", "size": "626", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "include/Action.hpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "329548" }, { "name": "CMake", "bytes": "22119" } ], "symlink_target": "" }
#ifndef SKY_ENGINE_CORE_EDITING_INSERTTEXTCOMMAND_H_ #define SKY_ENGINE_CORE_EDITING_INSERTTEXTCOMMAND_H_ #include "sky/engine/core/editing/CompositeEditCommand.h" namespace blink { class InsertTextCommand final : public CompositeEditCommand { public: enum RebalanceType { RebalanceLeadingAndTrailingWhitespaces, RebalanceAllWhitespaces }; static PassRefPtr<InsertTextCommand> create(Document& document, const String& text, bool selectInsertedText = false, RebalanceType rebalanceType = RebalanceLeadingAndTrailingWhitespaces) { return adoptRef(new InsertTextCommand(document, text, selectInsertedText, rebalanceType)); } private: InsertTextCommand(Document&, const String& text, bool selectInsertedText, RebalanceType); virtual void doApply() override; Position positionInsideTextNode(const Position&); Position insertTab(const Position&); bool performTrivialReplace(const String&, bool selectInsertedText); bool performOverwrite(const String&, bool selectInsertedText); void setEndingSelectionWithoutValidation(const Position& startPosition, const Position& endPosition); friend class TypingCommand; String m_text; bool m_selectInsertedText; RebalanceType m_rebalanceType; }; } // namespace blink #endif // SKY_ENGINE_CORE_EDITING_INSERTTEXTCOMMAND_H_
{ "content_hash": "17d129a4a566c4d04b84ff426f6b05ee", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 120, "avg_line_length": 31.022727272727273, "alnum_prop": 0.7677655677655678, "repo_name": "collinjackson/mojo", "id": "7aafe8f9970a51cea38f33ef3ea7063ec456202d", "size": "2727", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sky/engine/core/editing/InsertTextCommand.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Bison", "bytes": "31162" }, { "name": "C", "bytes": "1870198" }, { "name": "C++", "bytes": "36473977" }, { "name": "CSS", "bytes": "1897" }, { "name": "Dart", "bytes": "508640" }, { "name": "Go", "bytes": "181090" }, { "name": "Groff", "bytes": "29030" }, { "name": "HTML", "bytes": "6258864" }, { "name": "Java", "bytes": "1187123" }, { "name": "JavaScript", "bytes": "204155" }, { "name": "Makefile", "bytes": "402" }, { "name": "Objective-C", "bytes": "74603" }, { "name": "Objective-C++", "bytes": "370763" }, { "name": "Protocol Buffer", "bytes": "1048" }, { "name": "Python", "bytes": "5515876" }, { "name": "Shell", "bytes": "143302" }, { "name": "nesC", "bytes": "18347" } ], "symlink_target": "" }
<?php /** * ResourceParamsTest * * PHP version 5 * * @category Class * @package Voximplant * @author http://github.com/swagger-api/swagger-codegen * @license http://www.apache.org/licenses/LICENSE-2.0 Apache Licene v2 * @link https://github.com/swagger-api/swagger-codegen */ /** * Voximplant HTTP API * * Voximplant HTTP API description * * OpenAPI spec version: 1.0.0 * Contact: info@voximplant.com * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen * Please update the test case below to test the model. */ namespace Voximplant; /** * ResourceParamsTest Class Doc Comment * * @category Class */ // * @description The available resource parameters. /** * @package Voximplant * @author http://github.com/swagger-api/swagger-codegen * @license http://www.apache.org/licenses/LICENSE-2.0 Apache Licene v2 * @link https://github.com/swagger-api/swagger-codegen */ class ResourceParamsTest extends \PHPUnit_Framework_TestCase { /** * Setup before running any test case */ public static function setUpBeforeClass() { } /** * Setup before running each test case */ public function setUp() { } /** * Clean up after running each test case */ public function tearDown() { } /** * Clean up after running all test cases */ public static function tearDownAfterClass() { } /** * Test "ResourceParams" */ public function testResourceParams() { } /** * Test attribute "allowed" */ public function testPropertyAllowed() { } /** * Test attribute "forbidden" */ public function testPropertyForbidden() { } /** * Test attribute "requested" */ public function testPropertyRequested() { } }
{ "content_hash": "7b2909326c735cff54322c010be41bcd", "timestamp": "", "source": "github", "line_count": 122, "max_line_length": 76, "avg_line_length": 21.049180327868854, "alnum_prop": 0.6495327102803738, "repo_name": "DmitryIvaneychik/phpsdk", "id": "2110faa3213ad0b955aebd7a81e9e4940b7918cb", "size": "2568", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/Model/ResourceParamsTest.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "PHP", "bytes": "2710984" }, { "name": "Shell", "bytes": "1629" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title>Watchr | Dashboard</title> <!-- Tell the browser to be responsive to screen width --> <meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport"> <!-- Bootstrap 3.3.5 --> <link rel="stylesheet" href="bootstrap/css/bootstrap.min.css"> <!-- Font Awesome --> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.4.0/css/font-awesome.min.css"> <!-- Ionicons --> <link rel="stylesheet" href="https://code.ionicframework.com/ionicons/2.0.1/css/ionicons.min.css"> <!-- Theme style --> <link rel="stylesheet" href="dist/css/AdminLTE.min.css"> <!-- AdminLTE Skins. Choose a skin from the css/skins folder instead of downloading all of them to reduce the load. --> <link rel="stylesheet" href="dist/css/skins/skin-blue.min.css"> <!-- iCheck --> <link rel="stylesheet" href="plugins/iCheck/flat/blue.css"> <!-- Morris chart --> <link rel="stylesheet" href="plugins/morris/morris.css"> <!-- jvectormap --> <link rel="stylesheet" href="plugins/jvectormap/jquery-jvectormap-1.2.2.css"> <!-- Date Picker --> <link rel="stylesheet" href="plugins/datepicker/datepicker3.css"> <!-- Daterange picker --> <link rel="stylesheet" href="plugins/daterangepicker/daterangepicker-bs3.css"> <!-- bootstrap wysihtml5 - text editor --> <link rel="stylesheet" href="plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.min.css"> <link rel="stylesheet" href="leaflet.css" /> <style type="text/css"> .leaflet-container { background-color: white; } </style> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body class="hold-transition skin-blue sidebar-mini"> <div class="wrapper"> <header class="main-header"> <!-- Logo --> <a href="index2.html" class="logo"> <!-- mini logo for sidebar mini 50x50 pixels --> <span class="logo-mini"><b>W</b>hr</span> <!-- logo for regular state and mobile devices --> <span class="logo-lg"><b>Watch</b>r</span> </a> <!-- Header Navbar: style can be found in header.less --> <nav class="navbar navbar-static-top" role="navigation"> <!-- Sidebar toggle button--> <a href="#" class="sidebar-toggle" data-toggle="offcanvas" role="button"> <span class="sr-only">Toggle navigation</span> </a> <div class="navbar-custom-menu"> <ul class="nav navbar-nav"> <!-- Messages: style can be found in dropdown.less--> <li class="dropdown messages-menu"> <a href="#" class="dropdown-toggle" data-toggle="dropdown"> <i class="fa fa-envelope-o"></i> <span class="label label-success">4</span> </a> <ul class="dropdown-menu"> <li class="header">You have 4 messages</li> <li> <!-- inner menu: contains the actual data --> <ul class="menu"> <li> <!-- start message --> <a href="#"> <div class="pull-left"> <img src="dist/img/user2-160x160.jpg" class="img-circle" alt="User Image"> </div> <h4> Support Team <small><i class="fa fa-clock-o"></i> 5 mins</small> </h4> <p>Why not buy a new awesome theme?</p> </a> </li> <!-- end message --> <li> <a href="#"> <div class="pull-left"> <img src="dist/img/user3-128x128.jpg" class="img-circle" alt="User Image"> </div> <h4> AdminLTE Design Team <small><i class="fa fa-clock-o"></i> 2 hours</small> </h4> <p>Why not buy a new awesome theme?</p> </a> </li> <li> <a href="#"> <div class="pull-left"> <img src="dist/img/user4-128x128.jpg" class="img-circle" alt="User Image"> </div> <h4> Developers <small><i class="fa fa-clock-o"></i> Today</small> </h4> <p>Why not buy a new awesome theme?</p> </a> </li> <li> <a href="#"> <div class="pull-left"> <img src="dist/img/user3-128x128.jpg" class="img-circle" alt="User Image"> </div> <h4> Sales Department <small><i class="fa fa-clock-o"></i> Yesterday</small> </h4> <p>Why not buy a new awesome theme?</p> </a> </li> <li> <a href="#"> <div class="pull-left"> <img src="dist/img/user4-128x128.jpg" class="img-circle" alt="User Image"> </div> <h4> Reviewers <small><i class="fa fa-clock-o"></i> 2 days</small> </h4> <p>Why not buy a new awesome theme?</p> </a> </li> </ul> </li> <li class="footer"><a href="#">See All Messages</a></li> </ul> </li> <!-- Tasks: style can be found in dropdown.less --> <li class="dropdown tasks-menu"> <a href="#" class="dropdown-toggle" data-toggle="dropdown"> <i class="fa fa-flag-o"></i> <span class="label label-danger">9</span> </a> <ul class="dropdown-menu"> <li class="header">You have 9 tasks</li> <li> <!-- inner menu: contains the actual data --> <ul class="menu"> <li> <!-- Task item --> <a href="#"> <h3> Design some buttons <small class="pull-right">20%</small> </h3> <div class="progress xs"> <div class="progress-bar progress-bar-aqua" style="width: 20%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100"> <span class="sr-only">20% Complete</span> </div> </div> </a> </li> <!-- end task item --> <li> <!-- Task item --> <a href="#"> <h3> Create a nice theme <small class="pull-right">40%</small> </h3> <div class="progress xs"> <div class="progress-bar progress-bar-green" style="width: 40%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100"> <span class="sr-only">40% Complete</span> </div> </div> </a> </li> <!-- end task item --> <li> <!-- Task item --> <a href="#"> <h3> Some task I need to do <small class="pull-right">60%</small> </h3> <div class="progress xs"> <div class="progress-bar progress-bar-red" style="width: 60%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100"> <span class="sr-only">60% Complete</span> </div> </div> </a> </li> <!-- end task item --> <li> <!-- Task item --> <a href="#"> <h3> Make beautiful transitions <small class="pull-right">80%</small> </h3> <div class="progress xs"> <div class="progress-bar progress-bar-yellow" style="width: 80%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100"> <span class="sr-only">80% Complete</span> </div> </div> </a> </li> <!-- end task item --> </ul> </li> <li class="footer"> <a href="#">View all tasks</a> </li> </ul> </li> <!-- User Account: style can be found in dropdown.less --> <li class="dropdown user user-menu"> <a href="#" class="dropdown-toggle" data-toggle="dropdown"> <img src="dist/img/user2-160x160.jpg" class="user-image" alt="User Image"> <span class="hidden-xs">Rewe Prenzlauer Berg</span> </a> </li> <!-- Control Sidebar Toggle Button --> <li> <a href="#" data-toggle="acontrol-sidebar"><i class="fa fa-gears"></i></a> </li> </ul> </div> </nav> </header> <!-- Left side column. contains the logo and sidebar --> <aside class="main-sidebar"> <!-- sidebar: style can be found in sidebar.less --> <section class="sidebar"> <!-- Sidebar user panel --> <div class="user-panel"> <div class="pull-left image"> <img src="dist/img/user2-160x160.jpg" class="img-circle" alt="User Image"> </div> <div class="pull-left info"> <p>REWE Prenzlauer Berg</p> <a href="#"><i class="fa fa-circle text-success"></i> Schounauser Allee</a> </div> </div> <!-- search form --> <form action="#" method="get" class="sidebar-form"> <div class="input-group"> <input type="text" name="q" class="form-control" placeholder="Search..."> <span class="input-group-btn"> <button type="submit" name="search" id="search-btn" class="btn btn-flat"><i class="fa fa-search"></i></button> </span> </div> </form> <!-- /.search form --> <!-- sidebar menu: : style can be found in sidebar.less --> <ul class="sidebar-menu"> <li class="header">MAIN NAVIGATION</li> <li class="active treeview"> <a href="#"> <i class="fa fa-dashboard"></i> <span>Dashboard</span> </a> </li> <li class="treeview"> <a href="#"> <i class="fa fa-files-o"></i> <span>Returning customers</span> </a> </li> <li> <a href="#"> <i class="fa fa-th"></i> <span>Heating Maps</span> <small class="label pull-right bg-green">new</small> </a> </li> <li class="treeview"> <a href="#"> <i class="fa fa-pie-chart"></i> <span>Store areas</span> </a> <li><a href="#"><i class="fa fa-book"></i> <span>Documentation</span></a></li> </ul> </section> <!-- /.sidebar --> </aside> <!-- Content Wrapper. Contains page content --> <div class="content-wrapper" style="min-height: 501px;"> <!-- Content Header (Page header) --> <section class="content-header"> <h1> Dashboard <small>Status panel</small> </h1> <ol class="breadcrumb"> <li><a href="#"><i class="fa fa-dashboard"></i> Home</a></li> <li class="active">Dashboard</li> </ol> </section> <!-- Main content --> <section class="content"> <!-- Small boxes (Stat box) --> <div class="row"> <div class="col-lg-3 col-xs-6"> <!-- small box --> <div class="small-box bg-aqua"> <div class="inner"> <h3>150</h3> <p>Returning Customers</p> </div> <div class="icon"> <i class="ion ion-bag"></i> </div> <a href="#" class="small-box-footer">More info <i class="fa fa-arrow-circle-right"></i></a> </div> </div> <!-- ./col --> <div class="col-lg-3 col-xs-6"> <!-- small box --> <div class="small-box bg-green"> <div class="inner"> <h3>53<sup style="font-size: 20px">%</sup></h3> <p>Bounce Rate</p> </div> <div class="icon"> <i class="ion ion-stats-bars"></i> </div> <a href="#" class="small-box-footer">More info <i class="fa fa-arrow-circle-right"></i></a> </div> </div> <!-- ./col --> <div class="col-lg-3 col-xs-6"> <!-- small box --> <div class="small-box bg-yellow"> <div class="inner"> <h3>44</h3> <p>User Registrations</p> </div> <div class="icon"> <i class="ion ion-person-add"></i> </div> <a href="#" class="small-box-footer">More info <i class="fa fa-arrow-circle-right"></i></a> </div> </div> <!-- ./col --> <div class="col-lg-3 col-xs-6"> <!-- small box --> <div class="small-box bg-red"> <div class="inner"> <h3>65</h3> <p>Unique Visitors</p> </div> <div class="icon"> <i class="ion ion-pie-graph"></i> </div> <a href="#" class="small-box-footer">More info <i class="fa fa-arrow-circle-right"></i></a> </div> </div> <!-- ./col --> </div> <!-- /.row --> <!-- Main row --> <div class="row"> <!-- Left col --> <section class="col-lg-9"> <!-- Custom tabs (Charts with tabs)--> <div class="nav-tabs-custom"> <!-- Tabs within a box --> <ul class="nav nav-tabs pull-right"> <li class="pull-left header"><i class="fa fa-inbox"></i> Real time</li> </ul> <div class="tab-content no-padding"> <!-- Morris chart - Sales --> <div class="chart tab-pane active" id="map" style="position: relative; width: 800px; height: 600px"></div> <div class="chart tab-pane" id="sales-chart" style="position: relative; height: 300px;"></div> <div class="chart tab-pane" id="revenue-chart" style="position: relative; height: 300px; display:none;"></div> </div> </div> <!-- /.nav-tabs-custom --> </section> <!-- /.Left col --> <!-- right col (We are only adding the ID to make the widgets sortable)--> <section class="col-lg-3 connectedSortable"> <!-- solid sales graph --> <div class="box box-solid bg-teal-gradient"> <div class="box-header"> <i class="fa fa-th"></i> <h3 class="box-title">Sales Graph</h3> <div class="box-tools pull-right"> <button class="btn bg-teal btn-sm" data-widget="collapse"><i class="fa fa-minus"></i></button> <button class="btn bg-teal btn-sm" data-widget="remove"><i class="fa fa-times"></i></button> </div> </div> <div class="box-body border-radius-none"> <div class="chart" id="line-chart" style="height: 250px;"></div> </div> <!-- /.box-body --> </div> <!-- /.box --> </section> <!-- right col --> </div> <!-- /.row (main row) --> </section> <!-- /.content --> </div> <!-- /.content-wrapper --> <footer class="main-footer"> <div class="pull-right hidden-xs"> <b>Version</b> 2.3.0 </div> <strong>Copyright &copy; 2014-2015 <a href="http://almsaeedstudio.com">Almsaeed Studio</a>.</strong> All rights reserved. </footer> <!-- Control Sidebar --> <aside class="control-sidebar control-sidebar-dark"> <!-- Create the tabs --> <ul class="nav nav-tabs nav-justified control-sidebar-tabs"> <li><a href="#control-sidebar-home-tab" data-toggle="tab"><i class="fa fa-home"></i></a></li> <li><a href="#control-sidebar-settings-tab"><i class="fa fa-gears"></i></a></li> </ul> <!-- Tab panes --> <div class="tab-content"> <!-- Home tab content --> <div class="tab-pane" id="control-sidebar-home-tab"> <h3 class="control-sidebar-heading">Recent Activity</h3> <ul class="control-sidebar-menu"> <li> <a href="javascript::;"> <i class="menu-icon fa fa-birthday-cake bg-red"></i> <div class="menu-info"> <h4 class="control-sidebar-subheading">Langdon's Birthday</h4> <p>Will be 23 on April 24th</p> </div> </a> </li> <li> <a href="javascript::;"> <i class="menu-icon fa fa-user bg-yellow"></i> <div class="menu-info"> <h4 class="control-sidebar-subheading">Frodo Updated His Profile</h4> <p>New phone +1(800)555-1234</p> </div> </a> </li> <li> <a href="javascript::;"> <i class="menu-icon fa fa-envelope-o bg-light-blue"></i> <div class="menu-info"> <h4 class="control-sidebar-subheading">Nora Joined Mailing List</h4> <p>nora@example.com</p> </div> </a> </li> <li> <a href="javascript::;"> <i class="menu-icon fa fa-file-code-o bg-green"></i> <div class="menu-info"> <h4 class="control-sidebar-subheading">Cron Job 254 Executed</h4> <p>Execution time 5 seconds</p> </div> </a> </li> </ul> <!-- /.control-sidebar-menu --> <h3 class="control-sidebar-heading">Tasks Progress</h3> <ul class="control-sidebar-menu"> <li> <a href="javascript::;"> <h4 class="control-sidebar-subheading"> Custom Template Design <span class="label label-danger pull-right">70%</span> </h4> <div class="progress progress-xxs"> <div class="progress-bar progress-bar-danger" style="width: 70%"></div> </div> </a> </li> <li> <a href="javascript::;"> <h4 class="control-sidebar-subheading"> Update Resume <span class="label label-success pull-right">95%</span> </h4> <div class="progress progress-xxs"> <div class="progress-bar progress-bar-success" style="width: 95%"></div> </div> </a> </li> <li> <a href="javascript::;"> <h4 class="control-sidebar-subheading"> Laravel Integration <span class="label label-warning pull-right">50%</span> </h4> <div class="progress progress-xxs"> <div class="progress-bar progress-bar-warning" style="width: 50%"></div> </div> </a> </li> <li> <a href="javascript::;"> <h4 class="control-sidebar-subheading"> Back End Framework <span class="label label-primary pull-right">68%</span> </h4> <div class="progress progress-xxs"> <div class="progress-bar progress-bar-primary" style="width: 68%"></div> </div> </a> </li> </ul> <!-- /.control-sidebar-menu --> </div> <!-- /.tab-pane --> <!-- Stats tab content --> <div class="tab-pane" id="control-sidebar-stats-tab">Stats Tab Content</div> <!-- /.tab-pane --> <!-- Settings tab content --> <div class="tab-pane" id="control-sidebar-settings-tab"> <form method="post"> <h3 class="control-sidebar-heading">General Settings</h3> <div class="form-group"> <label class="control-sidebar-subheading"> Report panel usage <input type="checkbox" class="pull-right" checked> </label> <p> Some information about this general settings option </p> </div> <!-- /.form-group --> <div class="form-group"> <label class="control-sidebar-subheading"> Allow mail redirect <input type="checkbox" class="pull-right" checked> </label> <p> Other sets of options are available </p> </div> <!-- /.form-group --> <div class="form-group"> <label class="control-sidebar-subheading"> Expose author name in posts <input type="checkbox" class="pull-right" checked> </label> <p> Allow the user to show his name in blog posts </p> </div> <!-- /.form-group --> <h3 class="control-sidebar-heading">Chat Settings</h3> <div class="form-group"> <label class="control-sidebar-subheading"> Show me as online <input type="checkbox" class="pull-right" checked> </label> </div> <!-- /.form-group --> <div class="form-group"> <label class="control-sidebar-subheading"> Turn off notifications <input type="checkbox" class="pull-right"> </label> </div> <!-- /.form-group --> <div class="form-group"> <label class="control-sidebar-subheading"> Delete chat history <a href="javascript::;" class="text-red pull-right"><i class="fa fa-trash-o"></i></a> </label> </div> <!-- /.form-group --> </form> </div> <!-- /.tab-pane --> </div> </aside> <!-- /.control-sidebar --> <!-- Add the sidebar's background. This div must be placed immediately after the control sidebar --> <div class="control-sidebar-bg"></div> </div> <!-- ./wrapper --> <!-- jQuery 2.1.4 --> <script src="plugins/jQuery/jQuery-2.1.4.min.js"></script> <!-- jQuery UI 1.11.4 --> <script src="https://code.jquery.com/ui/1.11.4/jquery-ui.min.js"></script> <!-- Resolve conflict in jQuery UI tooltip with Bootstrap tooltip --> <script> $.widget.bridge('uibutton', $.ui.button); </script> <!-- Bootstrap 3.3.5 --> <script src="bootstrap/js/bootstrap.min.js"></script> <!-- Morris.js charts --> <script src="dist/js/raphael-min.js"></script> <script src="plugins/morris/morris.min.js"></script> <!-- Sparkline --> <script src="plugins/sparkline/jquery.sparkline.min.js"></script> <!-- jvectormap --> <script src="plugins/jvectormap/jquery-jvectormap-1.2.2.min.js"></script> <script src="plugins/jvectormap/jquery-jvectormap-world-mill-en.js"></script> <!-- jQuery Knob Chart --> <script src="plugins/knob/jquery.knob.js"></script> <!-- daterangepicker --> <script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.10.2/moment.min.js"></script> <script src="plugins/daterangepicker/daterangepicker.js"></script> <!-- datepicker --> <script src="plugins/datepicker/bootstrap-datepicker.js"></script> <!-- Bootstrap WYSIHTML5 --> <script src="plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.all.min.js"></script> <!-- Slimscroll --> <script src="plugins/slimScroll/jquery.slimscroll.min.js"></script> <!-- FastClick --> <script src="plugins/fastclick/fastclick.min.js"></script> <!-- Watchr App --> <script src="dist/js/app.js"></script> <!-- Watchr dashboard demo (This is only for demo purposes) --> <script src="dist/js/pages/dashboard.js"></script> <!-- Watchr for demo purposes --> <script src="dist/js/demo.js"></script> <script src="leaflet.js"></script> <script src="js/jquery-1.11.3.min.js"></script> <script> var map = L.map('map').setView([52.50246, 13.41179], 21); L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token=pk.eyJ1IjoibWFwYm94IiwiYSI6IjZjNmRjNzk3ZmE2MTcwOTEwMGY0MzU3YjUzOWFmNWZhIn0.Y8bhBaUMqFiPrDRW9hieoQ', { maxZoom: 22, attribution: 'Map data &copy; <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, ' + '<a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, ' + 'Imagery © <a href="http://mapbox.com">Mapbox</a>', id: 'mapbox.streets' }).addTo(map); var imageUrl = 'img/rewe.jpg', imageBounds = [[52.50262, 13.41141], [52.502252, 13.412125]]; L.imageOverlay(imageUrl, imageBounds).addTo(map); var firstpolyline; var pointList = []; var drawline = function() { console.log(pointList); firstpolyline = new L.Polyline(pointList, { color: 'red', weight: 4, opacity: 0.3, smoothFactor: 2, lineJoin: 'round' }); firstpolyline.addTo(map); map.fitBounds(firstpolyline.getBounds()); } // ajax call var lastpos = [0, 0]; var CallMeBaby = function() { $.ajax({ url: '../getLastPosition', dataType: 'json', timeout: 5000, success: function(data) { if (lastpos != data) { console.log(data); marker.setLatLng(data); lastpos = data; point = new L.LatLng(data[0], data[1]); map.panTo(point); drawline(); } }, error: function() { console.log("Ohi ohi! :( ajax call failled "); } }); }; setInterval(CallMeBaby, 1000); var CallMeTwice = function() { $.ajax({ url: '../getLast10Positions', dataType: 'json', timeout: 5000, success: function(data) { if (firstpolyline !== undefined) { map.removeLayer(firstpolyline); } for(i = 0; i < data.length; i++) { pos = data[i]; console.log(pos); point = new L.LatLng(pos[0], pos[1]); pointList.push(point); } drawline(); }, error: function() { console.log("Ohi ohi! :( ajax call failled "); } }); } setInterval(CallMeTwice(), 2500); var marker = L.marker([52.50246, 13.41179]).addTo(map) .bindPopup("<b>John Snow<b>").openPopup(); var popup = L.popup(); function onMapClick(e) { popup .setLatLng(e.latlng) .setContent("You clicked the map at " + e.latlng.toString()) .openOn(map); } map.on('click', onMapClick); </script> </body> </html>
{ "content_hash": "10e1f64e75b7f141824e4a6609947216", "timestamp": "", "source": "github", "line_count": 802, "max_line_length": 184, "avg_line_length": 36.584788029925186, "alnum_prop": 0.45857332742578644, "repo_name": "andreyors/ecomhack-oct15-watchr", "id": "34db0fd574a2798b174365be3933bd6fc26a32f7", "size": "29342", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dashboard/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "379692" }, { "name": "HTML", "bytes": "1744605" }, { "name": "JavaScript", "bytes": "2094019" }, { "name": "Less", "bytes": "157976" }, { "name": "PHP", "bytes": "58020" }, { "name": "SCSS", "bytes": "2187" } ], "symlink_target": "" }
namespace reporting { ReportQueueManualTestContext::ReportQueueManualTestContext( base::TimeDelta period, uint64_t number_of_messages_to_enqueue, Destination destination, Priority priority, CompletionCallback completion_cb, scoped_refptr<base::SequencedTaskRunner> sequenced_task_runner, BuildReportQueueCallback queue_builder) : TaskRunnerContext<Status>(std::move(completion_cb), sequenced_task_runner), period_(period), number_of_messages_to_enqueue_(number_of_messages_to_enqueue), destination_(destination), priority_(priority), queue_builder_(std::move(queue_builder)), report_queue_(std::unique_ptr<ReportQueue, base::OnTaskRunnerDeleter>( nullptr, base::OnTaskRunnerDeleter(sequenced_task_runner))) { DETACH_FROM_SEQUENCE(sequence_checker_); } ReportQueueManualTestContext::~ReportQueueManualTestContext() { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); } void ReportQueueManualTestContext::OnStart() { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); if (destination_ == UNDEFINED_DESTINATION) { Status invalid_destination = Status( error::INVALID_ARGUMENT, "Destination was UNDEFINED_DESTINATION"); LOG(ERROR) << invalid_destination; Complete(invalid_destination); return; } if (priority_ == UNDEFINED_PRIORITY) { Status invalid_priority = Status(error::INVALID_ARGUMENT, "Destination was UNDEFINED_DESTINATION"); LOG(ERROR) << invalid_priority; Complete(invalid_priority); return; } ReportQueueConfiguration::PolicyCheckCallback policy_check_cb = base::BindRepeating([]() -> Status { return Status::StatusOK(); }); auto config_result = ReportQueueConfiguration::Create( EventType::kDevice, destination_, std::move(policy_check_cb)); if (!config_result.ok()) { Complete(config_result.status()); return; } // Build queue by configuration. DCHECK(queue_builder_) << "Can be only called once"; auto report_queue_result = std::move(queue_builder_).Run(std::move(config_result.ValueOrDie())); if (!report_queue_result.ok()) { Complete(report_queue_result.status()); return; } report_queue_ = std::move(report_queue_result.ValueOrDie()); NextEnqueue(); } void ReportQueueManualTestContext::NextEnqueue() { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); if (number_of_enqueued_messages_ >= number_of_messages_to_enqueue_) { Complete(Status::StatusOK()); return; } ScheduleAfter(period_, &ReportQueueManualTestContext::Enqueue, base::Unretained(this)); } void ReportQueueManualTestContext::Enqueue() { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); report_queue_->Enqueue( base::NumberToString(value_++), priority_, base::BindOnce(&ReportQueueManualTestContext::OnEnqueued, base::Unretained(this))); number_of_enqueued_messages_++; } void ReportQueueManualTestContext::OnEnqueued(Status status) { if (!status.ok()) { Complete(status); return; } Schedule(&ReportQueueManualTestContext::NextEnqueue, base::Unretained(this)); } void ReportQueueManualTestContext::Complete(Status status) { Schedule(&ReportQueueManualTestContext::Response, base::Unretained(this), status); } } // namespace reporting
{ "content_hash": "5d4db5dea38dab7c0286449dc9c5b4d3", "timestamp": "", "source": "github", "line_count": 101, "max_line_length": 79, "avg_line_length": 33.62376237623762, "alnum_prop": 0.6952296819787986, "repo_name": "chromium/chromium", "id": "441953d7a00e93d13476ba35233e9ce281a61c36", "size": "4241", "binary": false, "copies": "5", "ref": "refs/heads/main", "path": "chrome/browser/policy/messaging_layer/util/report_queue_manual_test_context.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<?php /** * User: matteo * Date: 14/12/12 * Time: 23.48 * * Just for fun... */ namespace Cypress\GitElephantHostBundle\Git; use JMS\DiExtraBundle\Annotation\Service; use GitElephant\Repository; /** * split the ref from the path * * @Service("ref_path.splitter") */ class RefPathSplitter { /** * split * * @param \GitElephant\Repository $repository repo * @param string $ref reference * @param string $path path * * @throws \RuntimeException * @return array */ public function split(Repository $repository, $ref, $path) { if (null !== $repository->getBranchOrTag($ref)) { return array($ref, $path == '' ? null : $path); } $parts = explode('/', $ref); $newRef = ''; for ($i = 0; $i < count($parts) - 1; $i++) { $newRef = ltrim($newRef.'/'.$parts[$i], '/'); if (null !== $repository->getBranchOrTag($newRef)) { $newPath = '' === $path ? ltrim(str_replace($newRef, '', $ref), '/') : $path; return array($newRef, $newPath); } } throw new \RuntimeException(sprintf('ref %s and path %s are not splittable', $ref, $path)); } }
{ "content_hash": "a7ea6af514f0679b088da4d90e36df1f", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 99, "avg_line_length": 26.081632653061224, "alnum_prop": 0.5140845070422535, "repo_name": "matteosister/gitelephant.cypresslab.net", "id": "ebb92fca4b45ce43a1c103e12895fada86644fdf", "size": "1278", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Cypress/GitElephantHostBundle/Git/RefPathSplitter.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "8958" }, { "name": "CoffeeScript", "bytes": "3968" }, { "name": "JavaScript", "bytes": "26558" }, { "name": "PHP", "bytes": "135476" }, { "name": "Perl", "bytes": "41847" }, { "name": "Puppet", "bytes": "380413" }, { "name": "Ruby", "bytes": "363602" }, { "name": "Shell", "bytes": "50262" } ], "symlink_target": "" }
echo "Starting NodeAgent..." # Run node-agent /usr/local/bin/node_agent \ --env onprem \ --cert-chain /usr/local/bin/node_agent.crt \ --key /usr/local/bin/node_agent.key \ --workload-cert-ttl 90s \ --root-cert /usr/local/bin/istio_ca.crt >/var/log/node-agent.log 2>&1 & echo "Starting Application..." # Start app apt-get update && apt-get -y install curl curl -sL https://deb.nodesource.com/setup_8.x | bash - apt-get install -y nodejs npm install express node /usr/local/bin/app.js
{ "content_hash": "4fa0218b8fe3b3e741e7d5031f2a826f", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 73, "avg_line_length": 30.875, "alnum_prop": 0.694331983805668, "repo_name": "geeknoid/istio", "id": "061d721ee007be52d96fde8ff64d26ddb06f6f42", "size": "1079", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "security/docker/start_app.sh", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "3464" }, { "name": "Go", "bytes": "11069243" }, { "name": "HTML", "bytes": "36270" }, { "name": "JavaScript", "bytes": "1491" }, { "name": "Makefile", "bytes": "81541" }, { "name": "Python", "bytes": "12859" }, { "name": "Ruby", "bytes": "4321" }, { "name": "Shell", "bytes": "298984" }, { "name": "Smarty", "bytes": "23998" } ], "symlink_target": "" }
#pragma once #include "paddle/framework/eigen.h" #include "paddle/framework/op_registry.h" namespace paddle { namespace operators { template <typename Place, typename T, int Rank> void EigenTranspose(const framework::ExecutionContext& context, const framework::Tensor& in, framework::Tensor& out, std::vector<int> axis) { Eigen::array<int, Rank> permute; for (int i = 0; i < Rank; i++) { permute[i] = axis[i]; } auto in_dim = in.dims(); auto out_dim = out.dims(); auto eigen_in = framework::EigenTensor<T, Rank>::From(in); auto eigen_out = framework::EigenTensor<T, Rank>::From(out); auto& dev = context.GetEigenDevice<Place>(); eigen_out.device(dev) = eigen_in.shuffle(permute); } template <typename Place, typename T> class TransposeKernel : public framework::OpKernel<T> { public: void Compute(const framework::ExecutionContext& context) const override { auto* x = context.Input<framework::Tensor>("X"); auto* out = context.Output<framework::Tensor>("Out"); out->mutable_data<T>(context.GetPlace()); std::vector<int> axis = context.Attr<std::vector<int>>("axis"); int ndims = axis.size(); switch (ndims) { case 1: EigenTranspose<Place, T, 1>(context, *x, *out, axis); break; case 2: EigenTranspose<Place, T, 2>(context, *x, *out, axis); break; case 3: EigenTranspose<Place, T, 3>(context, *x, *out, axis); break; case 4: EigenTranspose<Place, T, 4>(context, *x, *out, axis); break; case 5: EigenTranspose<Place, T, 5>(context, *x, *out, axis); break; case 6: EigenTranspose<Place, T, 6>(context, *x, *out, axis); break; default: PADDLE_THROW("Tensors with rank at most 6 are supported"); } } }; template <typename Place, typename T> class TransposeGradKernel : public framework::OpKernel<T> { public: void Compute(const framework::ExecutionContext& context) const override { auto* out_grad = context.Input<framework::Tensor>(framework::GradVarName("Out")); auto* x_grad = context.Output<framework::Tensor>(framework::GradVarName("X")); if (x_grad) { x_grad->mutable_data<T>(context.GetPlace()); std::vector<int> axis = context.Attr<std::vector<int>>("axis"); std::vector<int> reversed_axis(axis); for (size_t i = 0; i < axis.size(); i++) { reversed_axis[axis[i]] = i; } int ndims = axis.size(); switch (ndims) { case 1: EigenTranspose<Place, T, 1>(context, *out_grad, *x_grad, reversed_axis); break; case 2: EigenTranspose<Place, T, 2>(context, *out_grad, *x_grad, reversed_axis); break; case 3: EigenTranspose<Place, T, 3>(context, *out_grad, *x_grad, reversed_axis); break; case 4: EigenTranspose<Place, T, 4>(context, *out_grad, *x_grad, reversed_axis); break; case 5: EigenTranspose<Place, T, 5>(context, *out_grad, *x_grad, reversed_axis); break; case 6: EigenTranspose<Place, T, 6>(context, *out_grad, *x_grad, reversed_axis); break; default: PADDLE_THROW("Tensors with rank at most 6 are supported"); } } } }; } // namespace operators } // namespace paddle
{ "content_hash": "126e7cd38bc6e0f2cccedb0cc9688700", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 75, "avg_line_length": 31.301724137931036, "alnum_prop": 0.5585238226383916, "repo_name": "pengli09/Paddle", "id": "aaa3f47ab5545accd4d1108e0ad6f5a3062186d0", "size": "4245", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "paddle/operators/transpose_op.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "261148" }, { "name": "C++", "bytes": "5387850" }, { "name": "CMake", "bytes": "215783" }, { "name": "CSS", "bytes": "21730" }, { "name": "Cuda", "bytes": "884320" }, { "name": "Go", "bytes": "109479" }, { "name": "HTML", "bytes": "8941" }, { "name": "JavaScript", "bytes": "1025" }, { "name": "Perl", "bytes": "11452" }, { "name": "Python", "bytes": "1796075" }, { "name": "Shell", "bytes": "137943" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" android:id="@+id/linear_layout" android:layout_width="match_parent" android:layout_height="@dimen/vertical_view_group_double_height" android:background="@color/no_color" android:orientation="vertical"> <LinearLayout android:layout_width="@dimen/vertical_view_group_width" android:layout_height="wrap_content" android:background="@color/no_color" android:orientation="horizontal"> <TextView android:layout_width="wrap_content" android:layout_height="match_parent" android:background="@drawable/stoke_round_corner_base_normal" android:gravity="center" android:paddingLeft="5dp" android:paddingRight="5dp" android:text="活动" android:textColor="@color/base_normal" android:textSize="24sp" /> <TextView android:id="@+id/first_content_TV" android:layout_width="wrap_content" android:layout_height="match_parent" android:layout_marginLeft="3dp" android:gravity="center" android:textColor="@color/base_normal" android:textSize="25sp" /> </LinearLayout> <LinearLayout android:layout_width="@dimen/vertical_view_group_width" android:layout_height="wrap_content" android:background="@color/no_color" android:orientation="horizontal"> <TextView android:layout_width="wrap_content" android:layout_height="match_parent" android:background="@drawable/stoke_round_corner_base_normal" android:gravity="center" android:paddingLeft="5dp" android:paddingRight="5dp" android:text="活动" android:textColor="@color/base_normal" android:textSize="24sp" /> <TextView android:id="@+id/second_content_TV" android:layout_width="wrap_content" android:layout_height="match_parent" android:layout_marginLeft="3dp" android:gravity="center" android:textColor="@color/base_normal" android:textSize="25sp" /> </LinearLayout> </LinearLayout>
{ "content_hash": "5f379004d0c3e5c19f3ec39c5f6a2630", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 73, "avg_line_length": 36.53125, "alnum_prop": 0.6120615911035072, "repo_name": "xuyunqiang/SwitchViewGroup", "id": "820a91aa254731090021c082922ef5a5a2f84a56", "size": "2346", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/layout/vertical_view_group_view.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "7422" } ], "symlink_target": "" }
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin Developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. // // Why base-58 instead of standard base-64 encoding? // - Don't want 0OIl characters that look the same in some fonts and // could be used to create visually identical looking account numbers. // - A string with non-alphanumeric characters is not as easily accepted as an account number. // - E-mail usually won't line-break if there's no punctuation to break at. // - Double-clicking selects the whole number as one word if it's all alphanumeric. // #ifndef BITCOIN_BASE58_H #define BITCOIN_BASE58_H #include <string> #include <vector> #include "bignum.h" #include "key.h" #include "script.h" #include "coin.h" static const char* pszBase58 = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; // Encode a byte sequence as a base58-encoded string inline std::string EncodeBase58(const unsigned char* pbegin, const unsigned char* pend) { CAutoBN_CTX pctx; CBigNum bn58 = 58; CBigNum bn0 = 0; // Convert big endian data to little endian // Extra zero at the end make sure bignum will interpret as a positive number std::vector<unsigned char> vchTmp(pend-pbegin+1, 0); reverse_copy(pbegin, pend, vchTmp.begin()); // Convert little endian data to bignum CBigNum bn; bn.setvch(vchTmp); // Convert bignum to std::string std::string str; // Expected size increase from base58 conversion is approximately 137% // use 138% to be safe str.reserve((pend - pbegin) * 138 / 100 + 1); CBigNum dv; CBigNum rem; while (bn > bn0) { if (!BN_div(&dv, &rem, &bn, &bn58, pctx)) throw bignum_error("EncodeBase58 : BN_div failed"); bn = dv; unsigned int c = rem.getulong(); str += pszBase58[c]; } // Leading zeroes encoded as base58 zeros for (const unsigned char* p = pbegin; p < pend && *p == 0; p++) str += pszBase58[0]; // Convert little endian std::string to big endian reverse(str.begin(), str.end()); return str; } // Encode a byte vector as a base58-encoded string inline std::string EncodeBase58(const std::vector<unsigned char>& vch) { return EncodeBase58(&vch[0], &vch[0] + vch.size()); } // Decode a base58-encoded string psz into byte vector vchRet // returns true if decoding is successful inline bool DecodeBase58(const char* psz, std::vector<unsigned char>& vchRet) { CAutoBN_CTX pctx; vchRet.clear(); CBigNum bn58 = 58; CBigNum bn = 0; CBigNum bnChar; while (isspace(*psz)) psz++; // Convert big endian string to bignum for (const char* p = psz; *p; p++) { const char* p1 = strchr(pszBase58, *p); if (p1 == NULL) { while (isspace(*p)) p++; if (*p != '\0') return false; break; } bnChar.setulong(p1 - pszBase58); if (!BN_mul(&bn, &bn, &bn58, pctx)) throw bignum_error("DecodeBase58 : BN_mul failed"); bn += bnChar; } // Get bignum as little endian data std::vector<unsigned char> vchTmp = bn.getvch(); // Trim off sign byte if present if (vchTmp.size() >= 2 && vchTmp.end()[-1] == 0 && vchTmp.end()[-2] >= 0x80) vchTmp.erase(vchTmp.end()-1); // Restore leading zeros int nLeadingZeros = 0; for (const char* p = psz; *p == pszBase58[0]; p++) nLeadingZeros++; vchRet.assign(nLeadingZeros + vchTmp.size(), 0); // Convert little endian data to big endian reverse_copy(vchTmp.begin(), vchTmp.end(), vchRet.end() - vchTmp.size()); return true; } // Decode a base58-encoded string str into byte vector vchRet // returns true if decoding is successful inline bool DecodeBase58(const std::string& str, std::vector<unsigned char>& vchRet) { return DecodeBase58(str.c_str(), vchRet); } // Encode a byte vector to a base58-encoded string, including checksum inline std::string EncodeBase58Check(const std::vector<unsigned char>& vchIn) { // add 4-byte hash check to the end std::vector<unsigned char> vch(vchIn); uint256 hash = Hash(vch.begin(), vch.end()); vch.insert(vch.end(), (unsigned char*)&hash, (unsigned char*)&hash + 4); return EncodeBase58(vch); } // Decode a base58-encoded string psz that includes a checksum, into byte vector vchRet // returns true if decoding is successful inline bool DecodeBase58Check(const char* psz, std::vector<unsigned char>& vchRet) { if (!DecodeBase58(psz, vchRet)) return false; if (vchRet.size() < 4) { vchRet.clear(); return false; } uint256 hash = Hash(vchRet.begin(), vchRet.end()-4); if (memcmp(&hash, &vchRet.end()[-4], 4) != 0) { vchRet.clear(); return false; } vchRet.resize(vchRet.size()-4); return true; } // Decode a base58-encoded string str that includes a checksum, into byte vector vchRet // returns true if decoding is successful inline bool DecodeBase58Check(const std::string& str, std::vector<unsigned char>& vchRet) { return DecodeBase58Check(str.c_str(), vchRet); } /** Base class for all base58-encoded data */ class CBase58Data { protected: // the version byte unsigned char nVersion; // the actually encoded data std::vector<unsigned char> vchData; CBase58Data() { nVersion = 0; vchData.clear(); } ~CBase58Data() { // zero the memory, as it may contain sensitive data if (!vchData.empty()) memset(&vchData[0], 0, vchData.size()); } void SetData(int nVersionIn, const void* pdata, size_t nSize) { nVersion = nVersionIn; vchData.resize(nSize); if (!vchData.empty()) memcpy(&vchData[0], pdata, nSize); } void SetData(int nVersionIn, const unsigned char *pbegin, const unsigned char *pend) { SetData(nVersionIn, (void*)pbegin, pend - pbegin); } public: bool SetString(const char* psz) { std::vector<unsigned char> vchTemp; DecodeBase58Check(psz, vchTemp); if (vchTemp.empty()) { vchData.clear(); nVersion = 0; return false; } nVersion = vchTemp[0]; vchData.resize(vchTemp.size() - 1); if (!vchData.empty()) memcpy(&vchData[0], &vchTemp[1], vchData.size()); memset(&vchTemp[0], 0, vchTemp.size()); return true; } bool SetString(const std::string& str) { return SetString(str.c_str()); } std::string ToString() const { std::vector<unsigned char> vch(1, nVersion); vch.insert(vch.end(), vchData.begin(), vchData.end()); return EncodeBase58Check(vch); } int CompareTo(const CBase58Data& b58) const { if (nVersion < b58.nVersion) return -1; if (nVersion > b58.nVersion) return 1; if (vchData < b58.vchData) return -1; if (vchData > b58.vchData) return 1; return 0; } bool operator==(const CBase58Data& b58) const { return CompareTo(b58) == 0; } bool operator<=(const CBase58Data& b58) const { return CompareTo(b58) <= 0; } bool operator>=(const CBase58Data& b58) const { return CompareTo(b58) >= 0; } bool operator< (const CBase58Data& b58) const { return CompareTo(b58) < 0; } bool operator> (const CBase58Data& b58) const { return CompareTo(b58) > 0; } }; /** base58-encoded Bitcoin addresses. * Public-key-hash-addresses have version 0 (or 111 testnet). * The data vector contains RIPEMD160(SHA256(pubkey)), where pubkey is the serialized public key. * Script-hash-addresses have version 5 (or 196 testnet). * The data vector contains RIPEMD160(SHA256(cscript)), where cscript is the serialized redemption script. */ class CBitcoinAddress; class CBitcoinAddressVisitor : public boost::static_visitor<bool> { private: CBitcoinAddress *addr; public: CBitcoinAddressVisitor(CBitcoinAddress *addrIn) : addr(addrIn) { } bool operator()(const CKeyID &id) const; bool operator()(const CScriptID &id) const; bool operator()(const CNoDestination &no) const; }; class CBitcoinAddress : public CBase58Data { public: enum { PUBKEY_ADDRESS = COIN_ADDRESS_START, SCRIPT_ADDRESS = 5, PUBKEY_ADDRESS_TEST = 111, SCRIPT_ADDRESS_TEST = 196, }; bool Set(const CKeyID &id) { SetData(fTestNet ? PUBKEY_ADDRESS_TEST : PUBKEY_ADDRESS, &id, 20); return true; } bool Set(const CScriptID &id) { SetData(fTestNet ? SCRIPT_ADDRESS_TEST : SCRIPT_ADDRESS, &id, 20); return true; } bool Set(const CTxDestination &dest) { return boost::apply_visitor(CBitcoinAddressVisitor(this), dest); } bool IsValid() const { unsigned int nExpectedSize = 20; bool fExpectTestNet = false; switch(nVersion) { case PUBKEY_ADDRESS: nExpectedSize = 20; // Hash of public key fExpectTestNet = false; break; case SCRIPT_ADDRESS: nExpectedSize = 20; // Hash of CScript fExpectTestNet = false; break; case PUBKEY_ADDRESS_TEST: nExpectedSize = 20; fExpectTestNet = true; break; case SCRIPT_ADDRESS_TEST: nExpectedSize = 20; fExpectTestNet = true; break; default: return false; } return fExpectTestNet == fTestNet && vchData.size() == nExpectedSize; } CBitcoinAddress() { } CBitcoinAddress(const CTxDestination &dest) { Set(dest); } CBitcoinAddress(const std::string& strAddress) { SetString(strAddress); } CBitcoinAddress(const char* pszAddress) { SetString(pszAddress); } CTxDestination Get() const { if (!IsValid()) return CNoDestination(); switch (nVersion) { case PUBKEY_ADDRESS: case PUBKEY_ADDRESS_TEST: { uint160 id; memcpy(&id, &vchData[0], 20); return CKeyID(id); } case SCRIPT_ADDRESS: case SCRIPT_ADDRESS_TEST: { uint160 id; memcpy(&id, &vchData[0], 20); return CScriptID(id); } } return CNoDestination(); } bool GetKeyID(CKeyID &keyID) const { if (!IsValid()) return false; switch (nVersion) { case PUBKEY_ADDRESS: case PUBKEY_ADDRESS_TEST: { uint160 id; memcpy(&id, &vchData[0], 20); keyID = CKeyID(id); return true; } default: return false; } } bool IsScript() const { if (!IsValid()) return false; switch (nVersion) { case SCRIPT_ADDRESS: case SCRIPT_ADDRESS_TEST: { return true; } default: return false; } } void ToggleTestnet() { switch (nVersion) { case PUBKEY_ADDRESS: nVersion = PUBKEY_ADDRESS_TEST; break; case SCRIPT_ADDRESS: nVersion = SCRIPT_ADDRESS_TEST; break; case PUBKEY_ADDRESS_TEST: nVersion = PUBKEY_ADDRESS; break; case SCRIPT_ADDRESS_TEST: nVersion = SCRIPT_ADDRESS; break; } } }; bool inline CBitcoinAddressVisitor::operator()(const CKeyID &id) const { return addr->Set(id); } bool inline CBitcoinAddressVisitor::operator()(const CScriptID &id) const { return addr->Set(id); } bool inline CBitcoinAddressVisitor::operator()(const CNoDestination &id) const { return false; } /** A base58-encoded secret key */ class CBitcoinSecret : public CBase58Data { public: enum { PRIVKEY_ADDRESS = CBitcoinAddress::PUBKEY_ADDRESS + 128, PRIVKEY_ADDRESS_TEST = CBitcoinAddress::PUBKEY_ADDRESS_TEST + 128, }; void SetSecret(const CSecret& vchSecret, bool fCompressed) { assert(vchSecret.size() == 32); SetData(fTestNet ? PRIVKEY_ADDRESS_TEST : PRIVKEY_ADDRESS, &vchSecret[0], vchSecret.size()); if (fCompressed) vchData.push_back(1); } CSecret GetSecret(bool &fCompressedOut) { CSecret vchSecret; vchSecret.resize(32); memcpy(&vchSecret[0], &vchData[0], 32); fCompressedOut = vchData.size() == 33; return vchSecret; } bool IsValid() const { bool fExpectTestNet = false; switch(nVersion) { case PRIVKEY_ADDRESS: break; case PRIVKEY_ADDRESS_TEST: fExpectTestNet = true; break; default: return false; } return fExpectTestNet == fTestNet && (vchData.size() == 32 || (vchData.size() == 33 && vchData[32] == 1)); } bool SetString(const char* pszSecret) { return CBase58Data::SetString(pszSecret) && IsValid(); } bool SetString(const std::string& strSecret) { return SetString(strSecret.c_str()); } CBitcoinSecret(const CSecret& vchSecret, bool fCompressed) { SetSecret(vchSecret, fCompressed); } CBitcoinSecret() { } }; #endif
{ "content_hash": "2ff1e386ac3ec9115f5ef92019251c59", "timestamp": "", "source": "github", "line_count": 485, "max_line_length": 114, "avg_line_length": 28.18144329896907, "alnum_prop": 0.5965027802165642, "repo_name": "eagleeyee/ZenithCoin", "id": "9d3bb2a01f5ab6c418c230e27e54e521a9467fa6", "size": "13668", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/base58.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "135770" }, { "name": "C++", "bytes": "1537984" }, { "name": "Objective-C", "bytes": "22040" }, { "name": "Prolog", "bytes": "11363" }, { "name": "Python", "bytes": "63076" }, { "name": "Ruby", "bytes": "5315" }, { "name": "Shell", "bytes": "2361" }, { "name": "TypeScript", "bytes": "3759873" } ], "symlink_target": "" }
package com.amazonaws.services.route53.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * The input for a ListResourceRecordSets request. * </p> */ public class ListResourceRecordSetsRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the hosted zone that contains the resource record sets that you * want to get. * </p> */ private String hostedZoneId; /** * <p> * The first name in the lexicographic ordering of domain names that you * want the <code>ListResourceRecordSets</code> request to list. * </p> */ private String startRecordName; /** * <p> * The DNS type at which to begin the listing of resource record sets. * </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> | * <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> | * <code>SPF</code> | <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * </p> */ private String startRecordType; /** * <p> * <i>Weighted resource record sets only:</i> If results were truncated for * a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the * previous response to get the next resource record set that has the * current DNS name and type. * </p> */ private String startRecordIdentifier; /** * <p> * The maximum number of records you want in the response body. * </p> */ private String maxItems; /** * Default constructor for ListResourceRecordSetsRequest object. Callers * should use the setter or fluent setter (with...) methods to initialize * the object after creating it. */ public ListResourceRecordSetsRequest() { } /** * Constructs a new ListResourceRecordSetsRequest object. Callers should use * the setter or fluent setter (with...) methods to initialize any * additional object members. * * @param hostedZoneId * The ID of the hosted zone that contains the resource record sets * that you want to get. */ public ListResourceRecordSetsRequest(String hostedZoneId) { setHostedZoneId(hostedZoneId); } /** * <p> * The ID of the hosted zone that contains the resource record sets that you * want to get. * </p> * * @param hostedZoneId * The ID of the hosted zone that contains the resource record sets * that you want to get. */ public void setHostedZoneId(String hostedZoneId) { this.hostedZoneId = hostedZoneId; } /** * <p> * The ID of the hosted zone that contains the resource record sets that you * want to get. * </p> * * @return The ID of the hosted zone that contains the resource record sets * that you want to get. */ public String getHostedZoneId() { return this.hostedZoneId; } /** * <p> * The ID of the hosted zone that contains the resource record sets that you * want to get. * </p> * * @param hostedZoneId * The ID of the hosted zone that contains the resource record sets * that you want to get. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListResourceRecordSetsRequest withHostedZoneId(String hostedZoneId) { setHostedZoneId(hostedZoneId); return this; } /** * <p> * The first name in the lexicographic ordering of domain names that you * want the <code>ListResourceRecordSets</code> request to list. * </p> * * @param startRecordName * The first name in the lexicographic ordering of domain names that * you want the <code>ListResourceRecordSets</code> request to list. */ public void setStartRecordName(String startRecordName) { this.startRecordName = startRecordName; } /** * <p> * The first name in the lexicographic ordering of domain names that you * want the <code>ListResourceRecordSets</code> request to list. * </p> * * @return The first name in the lexicographic ordering of domain names that * you want the <code>ListResourceRecordSets</code> request to list. */ public String getStartRecordName() { return this.startRecordName; } /** * <p> * The first name in the lexicographic ordering of domain names that you * want the <code>ListResourceRecordSets</code> request to list. * </p> * * @param startRecordName * The first name in the lexicographic ordering of domain names that * you want the <code>ListResourceRecordSets</code> request to list. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListResourceRecordSetsRequest withStartRecordName( String startRecordName) { setStartRecordName(startRecordName); return this; } /** * <p> * The DNS type at which to begin the listing of resource record sets. * </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> | * <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> | * <code>SPF</code> | <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * </p> * * @param startRecordType * The DNS type at which to begin the listing of resource record * sets. </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | * <code>CNAME</code> | <code>MX</code> | <code>NS</code> | * <code>PTR</code> | <code>SOA</code> | <code>SPF</code> | * <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | * <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * @see RRType */ public void setStartRecordType(String startRecordType) { this.startRecordType = startRecordType; } /** * <p> * The DNS type at which to begin the listing of resource record sets. * </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> | * <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> | * <code>SPF</code> | <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * </p> * * @return The DNS type at which to begin the listing of resource record * sets. </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | * <code>CNAME</code> | <code>MX</code> | <code>NS</code> | * <code>PTR</code> | <code>SOA</code> | <code>SPF</code> | * <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | * <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * @see RRType */ public String getStartRecordType() { return this.startRecordType; } /** * <p> * The DNS type at which to begin the listing of resource record sets. * </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> | * <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> | * <code>SPF</code> | <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * </p> * * @param startRecordType * The DNS type at which to begin the listing of resource record * sets. </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | * <code>CNAME</code> | <code>MX</code> | <code>NS</code> | * <code>PTR</code> | <code>SOA</code> | <code>SPF</code> | * <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | * <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * @return Returns a reference to this object so that method calls can be * chained together. * @see RRType */ public ListResourceRecordSetsRequest withStartRecordType( String startRecordType) { setStartRecordType(startRecordType); return this; } /** * <p> * The DNS type at which to begin the listing of resource record sets. * </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> | * <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> | * <code>SPF</code> | <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * </p> * * @param startRecordType * The DNS type at which to begin the listing of resource record * sets. </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | * <code>CNAME</code> | <code>MX</code> | <code>NS</code> | * <code>PTR</code> | <code>SOA</code> | <code>SPF</code> | * <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | * <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * @return Returns a reference to this object so that method calls can be * chained together. * @see RRType */ public void setStartRecordType(RRType startRecordType) { this.startRecordType = startRecordType.toString(); } /** * <p> * The DNS type at which to begin the listing of resource record sets. * </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> | * <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> | * <code>SPF</code> | <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * </p> * * @param startRecordType * The DNS type at which to begin the listing of resource record * sets. </p> * <p> * Valid values: <code>A</code> | <code>AAAA</code> | * <code>CNAME</code> | <code>MX</code> | <code>NS</code> | * <code>PTR</code> | <code>SOA</code> | <code>SPF</code> | * <code>SRV</code> | <code>TXT</code> * </p> * <p> * Values for Weighted Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Regional Resource Record Sets: <code>A</code> | * <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code> * </p> * <p> * Values for Alias Resource Record Sets: <code>A</code> | * <code>AAAA</code> * </p> * <p> * Constraint: Specifying <code>type</code> without specifying * <code>name</code> returns an <a>InvalidInput</a> error. * @return Returns a reference to this object so that method calls can be * chained together. * @see RRType */ public ListResourceRecordSetsRequest withStartRecordType( RRType startRecordType) { setStartRecordType(startRecordType); return this; } /** * <p> * <i>Weighted resource record sets only:</i> If results were truncated for * a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the * previous response to get the next resource record set that has the * current DNS name and type. * </p> * * @param startRecordIdentifier * Weighted resource record sets only:</i> If results were truncated * for a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier */ public void setStartRecordIdentifier(String startRecordIdentifier) { this.startRecordIdentifier = startRecordIdentifier; } /** * <p> * <i>Weighted resource record sets only:</i> If results were truncated for * a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the * previous response to get the next resource record set that has the * current DNS name and type. * </p> * * @return Weighted resource record sets only:</i> If results were truncated * for a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier */ public String getStartRecordIdentifier() { return this.startRecordIdentifier; } /** * <p> * <i>Weighted resource record sets only:</i> If results were truncated for * a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the * previous response to get the next resource record set that has the * current DNS name and type. * </p> * * @param startRecordIdentifier * Weighted resource record sets only:</i> If results were truncated * for a given DNS name and type, specify the value of * <code>ListResourceRecordSetsResponse$NextRecordIdentifier * @return Returns a reference to this object so that method calls can be * chained together. */ public ListResourceRecordSetsRequest withStartRecordIdentifier( String startRecordIdentifier) { setStartRecordIdentifier(startRecordIdentifier); return this; } /** * <p> * The maximum number of records you want in the response body. * </p> * * @param maxItems * The maximum number of records you want in the response body. */ public void setMaxItems(String maxItems) { this.maxItems = maxItems; } /** * <p> * The maximum number of records you want in the response body. * </p> * * @return The maximum number of records you want in the response body. */ public String getMaxItems() { return this.maxItems; } /** * <p> * The maximum number of records you want in the response body. * </p> * * @param maxItems * The maximum number of records you want in the response body. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListResourceRecordSetsRequest withMaxItems(String maxItems) { setMaxItems(maxItems); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getHostedZoneId() != null) sb.append("HostedZoneId: " + getHostedZoneId() + ","); if (getStartRecordName() != null) sb.append("StartRecordName: " + getStartRecordName() + ","); if (getStartRecordType() != null) sb.append("StartRecordType: " + getStartRecordType() + ","); if (getStartRecordIdentifier() != null) sb.append("StartRecordIdentifier: " + getStartRecordIdentifier() + ","); if (getMaxItems() != null) sb.append("MaxItems: " + getMaxItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListResourceRecordSetsRequest == false) return false; ListResourceRecordSetsRequest other = (ListResourceRecordSetsRequest) obj; if (other.getHostedZoneId() == null ^ this.getHostedZoneId() == null) return false; if (other.getHostedZoneId() != null && other.getHostedZoneId().equals(this.getHostedZoneId()) == false) return false; if (other.getStartRecordName() == null ^ this.getStartRecordName() == null) return false; if (other.getStartRecordName() != null && other.getStartRecordName().equals(this.getStartRecordName()) == false) return false; if (other.getStartRecordType() == null ^ this.getStartRecordType() == null) return false; if (other.getStartRecordType() != null && other.getStartRecordType().equals(this.getStartRecordType()) == false) return false; if (other.getStartRecordIdentifier() == null ^ this.getStartRecordIdentifier() == null) return false; if (other.getStartRecordIdentifier() != null && other.getStartRecordIdentifier().equals( this.getStartRecordIdentifier()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getHostedZoneId() == null) ? 0 : getHostedZoneId() .hashCode()); hashCode = prime * hashCode + ((getStartRecordName() == null) ? 0 : getStartRecordName() .hashCode()); hashCode = prime * hashCode + ((getStartRecordType() == null) ? 0 : getStartRecordType() .hashCode()); hashCode = prime * hashCode + ((getStartRecordIdentifier() == null) ? 0 : getStartRecordIdentifier().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); return hashCode; } @Override public ListResourceRecordSetsRequest clone() { return (ListResourceRecordSetsRequest) super.clone(); } }
{ "content_hash": "000200bd0215d69b655773e43c16afaa", "timestamp": "", "source": "github", "line_count": 661, "max_line_length": 89, "avg_line_length": 36.680786686838125, "alnum_prop": 0.5574115317990597, "repo_name": "trasa/aws-sdk-java", "id": "6754d61cf9a2d0f6f1ecb597b926bae0040892fb", "size": "24830", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/ListResourceRecordSetsRequest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "100011199" }, { "name": "Scilab", "bytes": "2354" } ], "symlink_target": "" }
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! Higher level communication abstractions. */ #[allow(missing_doc)]; use std::comm::{GenericChan, GenericSmartChan, GenericPort}; use std::comm::{Chan, Port, Peekable}; use std::comm; /// An extension of `pipes::stream` that allows both sending and receiving. pub struct DuplexStream<T, U> { priv chan: Chan<T>, priv port: Port<U>, } // Allow these methods to be used without import: impl<T:Send,U:Send> DuplexStream<T, U> { pub fn send(&self, x: T) { self.chan.send(x) } pub fn try_send(&self, x: T) -> bool { self.chan.try_send(x) } pub fn recv(&self, ) -> U { self.port.recv() } pub fn try_recv(&self) -> Option<U> { self.port.try_recv() } pub fn peek(&self) -> bool { self.port.peek() } } impl<T:Send,U:Send> GenericChan<T> for DuplexStream<T, U> { fn send(&self, x: T) { self.chan.send(x) } } impl<T:Send,U:Send> GenericSmartChan<T> for DuplexStream<T, U> { fn try_send(&self, x: T) -> bool { self.chan.try_send(x) } } impl<T:Send,U:Send> GenericPort<U> for DuplexStream<T, U> { fn recv(&self) -> U { self.port.recv() } fn try_recv(&self) -> Option<U> { self.port.try_recv() } } impl<T:Send,U:Send> Peekable<U> for DuplexStream<T, U> { fn peek(&self) -> bool { self.port.peek() } } /// Creates a bidirectional stream. pub fn DuplexStream<T:Send,U:Send>() -> (DuplexStream<T, U>, DuplexStream<U, T>) { let (p1, c2) = comm::stream(); let (p2, c1) = comm::stream(); (DuplexStream { chan: c1, port: p1 }, DuplexStream { chan: c2, port: p2 }) } /// An extension of `pipes::stream` that provides synchronous message sending. pub struct SyncChan<T> { priv duplex_stream: DuplexStream<T, ()> } /// An extension of `pipes::stream` that acknowledges each message received. pub struct SyncPort<T> { priv duplex_stream: DuplexStream<(), T> } impl<T: Send> GenericChan<T> for SyncChan<T> { fn send(&self, val: T) { assert!(self.try_send(val), "SyncChan.send: receiving port closed"); } } impl<T: Send> GenericSmartChan<T> for SyncChan<T> { /// Sends a message, or report if the receiver has closed the connection before receiving. fn try_send(&self, val: T) -> bool { self.duplex_stream.try_send(val) && self.duplex_stream.try_recv().is_some() } } impl<T: Send> GenericPort<T> for SyncPort<T> { fn recv(&self) -> T { self.try_recv().expect("SyncPort.recv: sending channel closed") } fn try_recv(&self) -> Option<T> { self.duplex_stream.try_recv().map(|val| { self.duplex_stream.try_send(()); val }) } } impl<T: Send> Peekable<T> for SyncPort<T> { fn peek(&self) -> bool { self.duplex_stream.peek() } } /// Creates a stream whose channel, upon sending a message, blocks until the message is received. pub fn rendezvous<T: Send>() -> (SyncPort<T>, SyncChan<T>) { let (chan_stream, port_stream) = DuplexStream(); (SyncPort { duplex_stream: port_stream }, SyncChan { duplex_stream: chan_stream }) } #[cfg(test)] mod test { use comm::{DuplexStream, rendezvous}; use std::rt::test::run_in_uv_task; #[test] pub fn DuplexStream1() { let (left, right) = DuplexStream(); left.send(~"abc"); right.send(123); assert!(left.recv() == 123); assert!(right.recv() == ~"abc"); } #[test] pub fn basic_rendezvous_test() { let (port, chan) = rendezvous(); do spawn { chan.send("abc"); } assert!(port.recv() == "abc"); } #[test] fn recv_a_lot() { // Rendezvous streams should be able to handle any number of messages being sent do run_in_uv_task { let (port, chan) = rendezvous(); do spawn { 1000000.times(|| { chan.send(()) }) } 1000000.times(|| { port.recv() }) } } #[test] fn send_and_fail_and_try_recv() { let (port, chan) = rendezvous(); do spawn { chan.duplex_stream.send(()); // Can't access this field outside this module fail!() } port.recv() } #[test] fn try_send_and_recv_then_fail_before_ack() { let (port, chan) = rendezvous(); do spawn { port.duplex_stream.recv(); fail!() } chan.try_send(()); } #[test] #[should_fail] fn send_and_recv_then_fail_before_ack() { let (port, chan) = rendezvous(); do spawn { port.duplex_stream.recv(); fail!() } chan.send(()); } }
{ "content_hash": "60c984e38eb01c239088c8dfa09b53df", "timestamp": "", "source": "github", "line_count": 206, "max_line_length": 97, "avg_line_length": 25.393203883495147, "alnum_prop": 0.5694895813419996, "repo_name": "fabricedesre/rust", "id": "42287736ffa5f2a46ad172dd96fa34c62967a6f3", "size": "5231", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/libextra/comm.rs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "20065" }, { "name": "C", "bytes": "526873" }, { "name": "C++", "bytes": "33090" }, { "name": "CSS", "bytes": "12875" }, { "name": "Emacs Lisp", "bytes": "34614" }, { "name": "JavaScript", "bytes": "42047" }, { "name": "Perl", "bytes": "619" }, { "name": "Puppet", "bytes": "5358" }, { "name": "Python", "bytes": "48672" }, { "name": "Rust", "bytes": "11603116" }, { "name": "Shell", "bytes": "2065" }, { "name": "VimL", "bytes": "22082" } ], "symlink_target": "" }
The first way Brain Monkey offers to monkey patch a function is `Functions\when()`. This function has to be used to **set a behavior** for functions. `when()` and 5 related methods are used to define functions \(if not defined yet\) and: * make them return a specific value * make them return one of the received arguments * make them echo a specific value * make them echo one of the received arguments * make them behave just like another callback For the sake of readability, in all the code samples below I'll assume that an `use` statement is in place: ```php use Brain\Monkey\Functions; ``` Don't forget to add it in your code as well, or use the fully qualified class name. Also be sure to read the _PHP Functions / Setup_ section that explain how setup Brain Monkey for usage in tests. ## `justReturn()` By using `when()` in combination with `justReturn()` you can make a \(maybe\) undefined function _just return_ a given value: ```php Functions\when('a_undefined_function')->justReturn('Cool!'); echo a_undefined_function(); // echoes "Cool!" ``` Without passing a value to `justReturn()` the target function will return nothing \(`null`\). ## `returnArg()` This other `when`-related method is used to make the target function return one of the received arguments, by default the first. ```php Functions\when('give_me_the_first')->returnArg(); // is the same of ->returnArg(1) Functions\when('i_want_the_second')->returnArg(2); Functions\when('and_the_third_for_me')->returnArg(3); echo give_me_the_first('A', 'B', 'C'); // echoes "A" echo i_want_the_second('A', 'B', 'C'); // echoes "B" echo and_the_third_for_me('A', 'B', 'C'); // echoes "C" ``` Note that if the target function does not receive the desired argument, `returnArg()` throws an exception: ```php Functions\when('needs_the_third')->returnArg(3); // throws an exception because required 3rd argument, but received 2 echo needs_the_third('A', 'B'); ``` ## `justEcho()` Similar to `justReturn()`, it makes the mocked function echo some value instead of returning it. ```php Functions\when('a_undefined_function')->justEcho('Cool!'); a_undefined_function(); // echoes "Cool!" ``` ## `echoArg()` Similar to `returnArg()`, it makes the mocked function echo some received argument instead of returning it. ```php Functions\when('echo_the_first')->echoArg(); // is the same of ->echoArg(1) Functions\when('echo_the_second')->echoArg(2); echo_the_first('A', 'B', 'C'); // echoes "A" echo_the_second('A', 'B', 'C'); // echoes "B" ``` ## `alias()` The last of the when-related methods allows to make a function behave just like another callback. The replacing function can be anything that can be run: a core function or a custom one, a class method, a closure... ```php Functions\when('duplicate')->alias(function($value) { "Was ".$value.", now is ".($value * 2); }); Functions\when('bigger')->alias('strtoupper'); echo duplicate(1); // echoes "Was 1, now is 2" echo bigger('was lower'); // echoes "WAS LOWER" ```
{ "content_hash": "18f7e1f8e3346cc27cd4f7a85c8de9d7", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 215, "avg_line_length": 32.08510638297872, "alnum_prop": 0.6986074270557029, "repo_name": "Brain-WP/BrainMonkey", "id": "e5e32d37804a0f6d43d49ac54635b5e083e920d9", "size": "3052", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "docs/functions-testing-tools/functions-when.md", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "214759" } ], "symlink_target": "" }
package sh.isaac.provider.qa; import java.util.HashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; import sh.isaac.MetaData; import sh.isaac.api.Get; import sh.isaac.api.chronicle.LatestVersion; import sh.isaac.api.chronicle.Version; import sh.isaac.api.chronicle.VersionType; import sh.isaac.api.component.concept.ConceptChronology; import sh.isaac.api.component.concept.ConceptVersion; import sh.isaac.api.component.semantic.SemanticChronology; import sh.isaac.api.component.semantic.version.DescriptionVersion; import sh.isaac.api.coordinate.StampFilter; import sh.isaac.api.coordinate.StampFilterImmutable; import sh.isaac.api.qa.QAInfo; import sh.isaac.api.qa.QAResults; import sh.isaac.api.qa.Severity; import sh.isaac.utility.Frills; public class SimpleQA extends QATask { private final Pattern illegalChars = Pattern.compile(".*[\\t\\r\\n@$#\\\\].*"); private final QAResults results = new QAResults(); private HashMap<String, Integer> uniqueFQNs = new HashMap<>(); public SimpleQA(StampFilter coordinate) { super(coordinate); } @Override protected QAResults call() throws Exception { //Just a one-off rule for now, for testing the overall API flow Get.conceptService().getConceptChronologyStream(true).forEach((ConceptChronology concept) -> { LatestVersion<ConceptVersion> cv = concept.getLatestVersion(coordinate); if (cv.isPresent() && cv.get().isActive()) { final int termType = Frills.getTerminologyTypeForModule(cv.get().getModuleNid(), coordinate); if (Frills.getSCTRulesTermTypes().contains(termType)) { AtomicInteger fqnCount = new AtomicInteger(); AtomicInteger rnCount = new AtomicInteger(); Get.assemblageService().getSemanticChronologyStreamForComponent(concept.getNid(), true).forEach((SemanticChronology semantic) -> { if (semantic.getVersionType() == VersionType.DESCRIPTION) { LatestVersion<DescriptionVersion> dv = semantic.getLatestVersion(coordinate); if (dv.isPresent() && dv.get().isActive()) { checkVersion(dv.get()); if (dv.get().getDescriptionTypeConceptNid() == MetaData.FULLY_QUALIFIED_NAME_DESCRIPTION_TYPE____SOLOR.getNid()) { fqnCount.getAndIncrement(); Integer existing = uniqueFQNs.put(dv.get().getText() + dv.get().getLanguageConceptNid(), dv.get().getNid()); if (existing != null) { addResult(new QAInfo(Severity.ERROR, existing, "Duplicate Fully Qualified Name", dv.get().getText())); } } else if (dv.get().getDescriptionTypeConceptNid() == MetaData.REGULAR_NAME_DESCRIPTION_TYPE____SOLOR.getNid()) { rnCount.getAndIncrement(); } } } }); if (fqnCount.get() < 1) { addResult(new QAInfo(Severity.ERROR, concept.getNid(), "No active Fully Quallifed Name")); } if (rnCount.get() < 1) { addResult(new QAInfo(Severity.ERROR, concept.getNid(), "No active Regular Name")); } } } }); return results; } @Override public QAResults checkVersion(Version v) { if (v instanceof DescriptionVersion) { //TODO I need a list of parent modules that snomed QA rules should apply to final String descriptionText = ((DescriptionVersion)v).getText(); if (illegalChars.matcher(descriptionText).matches()) { addResult(new QAInfo(Severity.ERROR, v.getNid(), "An active term should not contain tabs, newlines, or characters @, $, #, \\\\.", descriptionText)); } if (descriptionText.contains(" ")) { addResult(new QAInfo(Severity.WARNING, v.getNid(), "An active term should not contain double spaces", descriptionText)); } } return results; } private void addResult(QAInfo result) { synchronized (results) { results.addResult(result); } } @Override protected StampFilterImmutable getFilter() { return coordinate.toStampFilterImmutable(); } }
{ "content_hash": "8e9bf2baa72829d74a8bc54d1e4469ce", "timestamp": "", "source": "github", "line_count": 122, "max_line_length": 135, "avg_line_length": 32.57377049180328, "alnum_prop": 0.6957725213890287, "repo_name": "OSEHRA/ISAAC", "id": "f1e653dd6d49d65233df8ca6c26560a68d840688", "size": "5190", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "provider/qa/src/main/java/sh/isaac/provider/qa/SimpleQA.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AppleScript", "bytes": "1499" }, { "name": "CSS", "bytes": "80251" }, { "name": "HTML", "bytes": "35085" }, { "name": "Java", "bytes": "12855254" }, { "name": "Swift", "bytes": "98" }, { "name": "XSLT", "bytes": "362" } ], "symlink_target": "" }
package com.rvprg.sumi.transport; import com.google.inject.Inject; import com.rvprg.sumi.protocol.MessageConsumer; import net.jcip.annotations.Immutable; @Immutable public class MemberConnectorListenerImpl implements MemberConnectorListener { private final MessageConsumer messageConsumer; private final ChannelPipelineInitializer pipelineInitializer; @Inject public MemberConnectorListenerImpl(MessageConsumer messageConsumer, ChannelPipelineInitializer pipelineInitializer) { this.messageConsumer = messageConsumer; this.pipelineInitializer = pipelineInitializer; } @Override public void connected(ActiveMember member) { pipelineInitializer.initialize(member.getChannel().pipeline()).addLast(new MessageDispatcher(member, messageConsumer)); } @Override public void scheduledReconnect(MemberId member) { // nop } @Override public void disconnected(MemberId memberId) { // nop } @Override public void exceptionCaught(MemberId memberId, Throwable cause) { // nop } }
{ "content_hash": "8cb047496225310f5a021a4589fc1c90", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 127, "avg_line_length": 27.974358974358974, "alnum_prop": 0.7442713107241063, "repo_name": "rvprg/pet", "id": "b710c6dbee51ec36875d7c6cabbad25a928570c4", "size": "1091", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "consensus/src/main/java/com/rvprg/sumi/transport/MemberConnectorListenerImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "277053" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Security.Claims; namespace Einstein.Security.Identity { public static class IUserIdentityExtensions { public static IEnumerable<Claim> GetBaseClaims(this IUserIdentity identity) { if (identity == null) yield return new Claim(ClaimTypes.UserIdentifier, null); yield return new Claim(ClaimTypes.UserIdentifier, identity.Id.ToString()); yield return new Claim(ClaimTypes.UserName, identity.UserName); if (identity.Roles != null) { foreach (var role in identity.Roles) { yield return new Claim(ClaimTypes.Role, role.Name); if (role.Claims != null) { foreach (var roleClaim in role.Claims) yield return roleClaim; } } } } public static IEnumerable<Claim> AllClaims(this IUserIdentity identity) { return identity == null ? Enumerable.Empty<Claim>() : identity.GetBaseClaims().Union(identity.Claims ?? Enumerable.Empty<Claim>()); } public static bool HasClaim(this IUserIdentity identity, Func<Claim, bool> predicate) { return identity != null && identity.AllClaims().Any(predicate); } public static bool HasClaim(this IUserIdentity identity, string claimType, string value = null) { return identity != null && identity.HasClaim(x => x.Type == claimType && ((value == null && x.Value == null) || x.Value == value)); } public static ClaimsIdentity AsClaimsIdentity(this IUserIdentity identity, string authenticationType = null) { var claimsIdentity = new ClaimsIdentity(identity, identity.AllClaims(), authenticationType, ClaimTypes.UserName, ClaimTypes.Role); return claimsIdentity; } } }
{ "content_hash": "0e2de68e152ceef1fbc06acf57e649ce", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 143, "avg_line_length": 39.301886792452834, "alnum_prop": 0.5808929428708594, "repo_name": "PaulWeakley/Einstein", "id": "4d46acabe35e6fec7b396f17f01afe1a811d186d", "size": "2085", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Einstein/Security/Identity/IUserIdentityExtensions.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "95089" } ], "symlink_target": "" }
<?php /** * Returns the text of the error message from previous MySQL operation * * @phpstub * * @param resource $link_identifier * * @return string Returns the error text from the last MySQL function, or * (empty string) if no error occurred. */ function mysql_error($link_identifier = NULL) { }
{ "content_hash": "93c4a5a73623a88c32ec8add4cb1ad8a", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 73, "avg_line_length": 21.4, "alnum_prop": 0.6697819314641744, "repo_name": "schmittjoh/php-stubs", "id": "824bb475020c2ea5c91baf90ccd7b75ad4334838", "size": "321", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "res/php/mysql/functions/mysql-error.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "PHP", "bytes": "2203628" } ], "symlink_target": "" }
from hazelcast.serialization.bits import * from hazelcast.protocol.builtin import FixSizedTypesCodec from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer from hazelcast.protocol.builtin import StringCodec # hex: 0x012000 _REQUEST_MESSAGE_TYPE = 73728 # hex: 0x012001 _RESPONSE_MESSAGE_TYPE = 73729 _REQUEST_REPLACE_EXISTING_VALUES_OFFSET = REQUEST_HEADER_SIZE _REQUEST_INITIAL_FRAME_SIZE = _REQUEST_REPLACE_EXISTING_VALUES_OFFSET + BOOLEAN_SIZE_IN_BYTES def encode_request(name, replace_existing_values): buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE) FixSizedTypesCodec.encode_boolean(buf, _REQUEST_REPLACE_EXISTING_VALUES_OFFSET, replace_existing_values) StringCodec.encode(buf, name, True) return OutboundMessage(buf, False)
{ "content_hash": "5d0faa9b156f76f78e4784c088b41181", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 108, "avg_line_length": 43.89473684210526, "alnum_prop": 0.802158273381295, "repo_name": "hazelcast/hazelcast-python-client", "id": "b0879d678144963dead8c5cfbed1d7ea276ab8a6", "size": "834", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hazelcast/protocol/codec/map_load_all_codec.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "2300326" }, { "name": "Shell", "bytes": "1900" } ], "symlink_target": "" }
package io.reinert.requestor.gwtjackson.rebind.codegen; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.MethodSpec; /** * A method schema that can be assembled to later get its spec. * * @author Danilo Reinert */ public abstract class MethodAssembler { private MethodSpec spec; protected abstract MethodSpec.Builder getSignature(); public MethodSpec assemble(CodeBlock codeBlock) { final MethodSpec.Builder builder = getSignature(); if (codeBlock != null) builder.addCode(codeBlock); spec = builder.build(); return spec; } public MethodSpec spec() { if (spec == null) throw new IllegalStateException("Method spec has not been built yet."); return spec; } }
{ "content_hash": "24a2ad0c05cb9abf52d32439d6a710d5", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 83, "avg_line_length": 25.766666666666666, "alnum_prop": 0.684346701164295, "repo_name": "reinert/requestor", "id": "6cbb9554050801065ddea64a7269826893b75003", "size": "1370", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "requestor/ext/requestor-gwtjackson/src/main/java/io/reinert/requestor/gwtjackson/rebind/codegen/MethodAssembler.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "415" }, { "name": "Java", "bytes": "1324878" }, { "name": "Shell", "bytes": "1907" } ], "symlink_target": "" }
'use strict'; import React from 'react'; import PropTypes from 'prop-types'; import radium from 'radium'; import areEqual from 'fbjs/lib/areEqual'; import moment from 'moment'; const timeTypes = { hour: PropTypes.number, minute: PropTypes.number, second: PropTypes.number, millisecond: PropTypes.number }; @radium export default class VideoSubtitle extends React.Component { static propTypes = { subtitle: PropTypes.arrayOf( PropTypes.shape({ ...timeTypes, content: PropTypes.func.isRequired }) ).isRequired, now: PropTypes.shape(timeTypes).isRequired } constructor(props) { super(props); this.subtitle = this.sortSubtitle(props.subtitle); } componentWillReceiveProps(nextProps) { /* istanbul ignore if */ if(!areEqual(this.props.subtitle, nextProps.subtitle)) this.subtitle = this.sortSubtitle(nextProps.subtitle); } render() { const {now, ...props} = this.props; const nowTime = moment(now).format('x'); delete props.subtitle; return ( <div {...props}> {this.subtitle.map(({content, time}, index) => ( React.cloneElement(content( nowTime === time || (nowTime > time && nowTime < (this.subtitle[index + 1] || /* istanbul ignore next */ {}).time) ), { key: index }) ))} </div> ); } sortSubtitle(subtitle) { return subtitle.map(({content, ...time}) => ({ content, originTime: time, time: moment(time).format('x') })).sort((a, b) => { return a.time - b.time; }); } }
{ "content_hash": "26eae055fc086e9d46e0681b3b6c6288", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 106, "avg_line_length": 23.735294117647058, "alnum_prop": 0.6028500619578686, "repo_name": "HsuTing/cat-components", "id": "d257cc2f44ff6048988eed49ff8802314479ca0a", "size": "1614", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/video-subtitle.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "4889" }, { "name": "JavaScript", "bytes": "133601" } ], "symlink_target": "" }
// Code generated by client-gen. DO NOT EDIT. // This package has the automatically generated fake clientset. package fake
{ "content_hash": "ed92df6bb5d48a5bfab8ff029afee901", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 63, "avg_line_length": 21, "alnum_prop": 0.7619047619047619, "repo_name": "kubeflow/katib", "id": "d2e3339ea31a8d5aa0254a54007b0ca5e58b55d3", "size": "693", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pkg/client/controller/clientset/versioned/fake/doc.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "892" }, { "name": "Dockerfile", "bytes": "17663" }, { "name": "Go", "bytes": "762583" }, { "name": "HTML", "bytes": "112818" }, { "name": "JavaScript", "bytes": "325581" }, { "name": "Makefile", "bytes": "5703" }, { "name": "Python", "bytes": "884171" }, { "name": "SCSS", "bytes": "4261" }, { "name": "Shell", "bytes": "60643" }, { "name": "TypeScript", "bytes": "193459" } ], "symlink_target": "" }
<?php namespace Store\Entity; use Doctrine\ORM\Mapping as ORM; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\Common\Collections\Collection; use Zend\Form\Annotation; /** * Users * * @ORM\Table(name="categories") * @ORM\Entity(repositoryClass="Store\Entity\Repository\CategoryRepository") * @Annotation\Name("category") * @Annotation\Hydrator("Zend\Stdlib\Hydrator\ClassMethods") */ class Category { //fields------------------ /** * @ORM\Id * @ORM\Column(type="integer"); * @ORM\GeneratedValue(strategy="AUTO") */ protected $id; /** * @var string * * @ORM\Column(name="name", type="text", nullable=false) */ private $name; public function __get($property) { return $this->$property; } /** * Magic setter to save protected properties. * * @param string $property * @param mixed $value */ public function __set($property, $value) { $this->$property = $value; } public function getArrayCopy() { return get_object_vars($this); } }
{ "content_hash": "2c2edd58b9ffc6d4be53191d6f6753e2", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 76, "avg_line_length": 21.79245283018868, "alnum_prop": 0.5731601731601732, "repo_name": "slby/Store", "id": "489fad6b12ecb25d90f8cc0d1dbcd3b3baf9a0d0", "size": "1155", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "module/Store/src/Store/Entity/Category.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "1042" }, { "name": "JavaScript", "bytes": "1618" }, { "name": "PHP", "bytes": "222129" } ], "symlink_target": "" }
![][logo] # Chatable Web Client The official Chatable Web Chat client [logo]: https://raw.githubusercontent.com/Chatable/chatable.github.io/master/img/chatableclienticon-small.png "Chatable" [![Build Status](https://drone.io/github.com/Chatable/chatable-client-web/status.png)](https://drone.io/github.com/Chatable/chatable-client-web/latest) &nbsp; [![Stories in Ready](https://badge.waffle.io/Chatable/chatable-client-web.png?label=ready&title=Ready)](https://waffle.io/Chatable/chatable-client-web) ## Instructions ### 1. Compilation `mvn gwt:compile` Then copy web.html into the root of the created dir ### 2. Integration Create an instace of Jetty >Or get it via `wget http://download.eclipse.org/jetty/stable-9/dist/jetty-distribution-9.2.9.v20150224.tar.gz` for Jetty v9.2.9 Copy the created target folder from 1. into the webapps/ folder of the Jetty root ### 3. Deployment In the Jetty root: `java -jar start.jar jetty.port=<Your Exposed Port>`
{ "content_hash": "aa58b7169416ed86c7b10434350d1759", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 151, "avg_line_length": 31.322580645161292, "alnum_prop": 0.752832131822863, "repo_name": "Chatable/chatable-client-web", "id": "896be9e6434ad977777b27578a01df2eb5d33ba2", "size": "972", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "184" }, { "name": "HTML", "bytes": "820" }, { "name": "Java", "bytes": "8431" }, { "name": "JavaScript", "bytes": "2467" } ], "symlink_target": "" }
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace YAF.Pages.Admin { public partial class topicstatus_edit { /// <summary> /// PageLinks control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::VZF.Controls.PageLinks PageLinks; /// <summary> /// Adminmenu1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::VZF.Controls.AdminMenu Adminmenu1; /// <summary> /// LocalizedLabel1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::VZF.Controls.LocalizedLabel LocalizedLabel1; /// <summary> /// HelpLabel1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::VZF.Controls.HelpLabel HelpLabel1; /// <summary> /// TopicStatusName control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.WebControls.TextBox TopicStatusName; /// <summary> /// HelpLabel2 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::VZF.Controls.HelpLabel HelpLabel2; /// <summary> /// DefaultDescription control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.WebControls.TextBox DefaultDescription; /// <summary> /// save control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.WebControls.Button save; /// <summary> /// cancel control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.WebControls.Button cancel; /// <summary> /// SmartScroller1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::VZF.Controls.SmartScroller SmartScroller1; } }
{ "content_hash": "a7fea55d30b9cedf77345427f82f3b33", "timestamp": "", "source": "github", "line_count": 105, "max_line_length": 84, "avg_line_length": 34.6, "alnum_prop": 0.5312413982934214, "repo_name": "vzrus/VZF", "id": "cf33d6fc7455879bc4ee7d782ae303ff9a462c25", "size": "3635", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vzfsrc/VZF.NET/pages/admin/topicstatus_edit.ascx.designer.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "1892975" }, { "name": "Batchfile", "bytes": "2894" }, { "name": "C#", "bytes": "8971363" }, { "name": "CSS", "bytes": "1682229" }, { "name": "HTML", "bytes": "46740" }, { "name": "JavaScript", "bytes": "5407447" }, { "name": "PLpgSQL", "bytes": "1799027" }, { "name": "SQLPL", "bytes": "4870" }, { "name": "XSLT", "bytes": "7052" } ], "symlink_target": "" }
-- MySQL dump 10.13 Distrib 5.7.11, for osx10.11 (x86_64) -- -- Host: localhost Database: amdv_error -- ------------------------------------------------------ -- Server version 5.7.11 /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES eucjpms */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `error_data` -- DROP TABLE IF EXISTS `error_data`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `error_data` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `keycode` varchar(16) NOT NULL, `description` text NOT NULL, `updated_at` datetime DEFAULT NULL, `created_at` datetime DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2016-03-30 20:01:03
{ "content_hash": "a8c831db7d9bdf037b5b9f176ca0bf2b", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 83, "avg_line_length": 40, "alnum_prop": 0.6659090909090909, "repo_name": "am1tanaka/send-error", "id": "2e0028d81fb8ec010579e380845c7b79726a8e0b", "size": "1760", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/schema/error.sql", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "3921" }, { "name": "JavaScript", "bytes": "5978" }, { "name": "PHP", "bytes": "65427" }, { "name": "TSQL", "bytes": "4670" } ], "symlink_target": "" }
<?php namespace Acme\StoreBundle\Form\Type; use Symfony\Component\Form\AbstractType; use Symfony\Component\Form\FormBuilder; use Symfony\Component\Form\FormBuilderInterface; use Symfony\Component\OptionsResolver\OptionsResolver; use Symfony\Component\OptionsResolver\OptionsResolverInterface; use Doctrine\ORM\EntityRepository; use Symfony\Component\Routing\Router; // these import Field Types use Symfony\Component\Form\Extension\Core\Type\ChoiceType; use Symfony\Component\Form\Extension\Core\Type\TextType; use Symfony\Component\Form\Extension\Core\ChoiceList\ChoiceList; class GoodFilterType extends AbstractType { public function buildForm(FormBuilderInterface $builder, array $options) { $builder->add('title', 'text', array('required' => false)) ->add('active', 'choice', array( 'choices' => array( '1' => 'Yes', '0' => 'No', ), 'empty_value' => 'All', 'required' => false ) ) ->add('Search', 'submit', array( 'attr' => array('class' => 'btn btn-default') )) ->setMethod("GET"); } public function getName() { return 'goods_filter'; } public function setDefaultOptions(OptionsResolverInterface $resolver) { $resolver->setDefaults(array( 'data_class' => 'Acme\StoreBundle\Entity\Good', )); } }
{ "content_hash": "b030a4f85c4a0b7fac22f72ffc978502", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 76, "avg_line_length": 30.693877551020407, "alnum_prop": 0.605718085106383, "repo_name": "rozmarin-2013/catalog", "id": "c04b3d6e14cc40320b6dd08aeba72489c89b4927", "size": "1504", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Acme/StoreBundle/Form/Type/GoodFilterType.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "3073" }, { "name": "CSS", "bytes": "2805" }, { "name": "HTML", "bytes": "32046" }, { "name": "PHP", "bytes": "116034" } ], "symlink_target": "" }
using System; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace NewRelicSharp.Tests.Commands.Get { [TestClass] public class GetApplicationIdsTest : TestCommandsBase { [TestMethod] public void TestGetApplicationIds() { var command = testCommandBuilder.CreateGetApplicationIds(); CheckCommandMethod(command); var response = accessor.GetApplicationIds(); Assert.IsNotNull(response, String.Format("ApplicationIds is null")); } } }
{ "content_hash": "8101d4d7a50e8c5814b42f345d86c165", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 80, "avg_line_length": 24.59090909090909, "alnum_prop": 0.6654343807763401, "repo_name": "akuryan/New-Relic-Rest-Api", "id": "3fe0b29d99d3b6b1f66c5cd3a787462daee82ee1", "size": "543", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "NewRelicSharp.Tests/Commands/Get/GetApplicationIdsTest.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "36794" } ], "symlink_target": "" }
<?php namespace lithium\tests\mocks\net\http; use lithium\core\Libraries; class MockSocket extends \lithium\net\Socket { public $data = null; public $configs = []; public function __construct(array $config = []) { parent::__construct((array) $config); } public function open(array $options = []) { parent::open($options); return true; } public function close() { return true; } public function eof() { return true; } public function read() { if ($this->data->path === '/http_auth/') { if (is_array($this->data->auth)) { $request = $this->data->to('array'); $data = $this->data->auth; $data['nc'] = '00000001'; $data['cnonce'] = md5(time()); $username = $this->data->username; $password = $this->data->password; $part1 = md5("{$username}:{$data['realm']}:{$password}"); $part2 = "{$data['nonce']}:{$data['nc']}:{$data['cnonce']}:{$data['qop']}"; $part3 = md5($this->data->method . ':' . $this->data->path); $hash = md5("{$part1}:{$part2}:{$part3}"); preg_match('/response="(.*?)"/', $this->data->headers('Authorization'), $matches); list($match, $response) = $matches; if ($hash === $response) { return 'success'; } } $header = 'Digest realm="app",qop="auth",nonce="4bca0fbca7bd0",'; $header .= 'opaque="d3fb67a7aa4d887ec4bf83040a820a46";'; $this->data->headers('WWW-Authenticate', $header); $status = "GET HTTP/1.1 401 Authorization Required"; $response = [$status, join("\r\n", $this->data->headers()), "", "not authorized"]; return join("\r\n", $response); } return (string) $this->data; } public function write($data) { if (!is_object($data)) { $data = Libraries::instance( null, 'request', (array) $data + $this->_config, $this->_classes ); } $this->data = $data; return true; } public function timeout($time) { return true; } public function encoding($charset) { return true; } public function config() { return $this->_config; } } ?>
{ "content_hash": "fd921e65820e207c00687d731eec82e8", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 86, "avg_line_length": 23.821428571428573, "alnum_prop": 0.5932033983008496, "repo_name": "UnionOfRAD/lithium", "id": "db8c3cc921c907463d3e5bc3b8ac2b4a5c182dfc", "size": "2265", "binary": false, "copies": "2", "ref": "refs/heads/1.3", "path": "tests/mocks/net/http/MockSocket.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "311" }, { "name": "Dockerfile", "bytes": "1709" }, { "name": "Hack", "bytes": "264" }, { "name": "PHP", "bytes": "3145434" }, { "name": "Shell", "bytes": "34982" } ], "symlink_target": "" }
from handlers.index import IndexHandler from handlers.command import CommandListHandler, CommandWSHandler from handlers.share import ShareHandler from handlers.marathon import MarathonEventsHandler url_patterns = [ (r"/", IndexHandler), (r"/share/(.*)", ShareHandler), (r"/ws/invoke", CommandWSHandler), (r"/api/commands", CommandListHandler), (r"/api/marathon/callback", MarathonEventsHandler), ]
{ "content_hash": "a2fc2e57605526ec00798f33d2cb9db0", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 65, "avg_line_length": 35, "alnum_prop": 0.7476190476190476, "repo_name": "shizhz/tutu", "id": "74fbb9a0e4c3304a476f24bd8509ab814ac72f52", "size": "445", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "urls.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "592491" }, { "name": "HTML", "bytes": "5763" }, { "name": "JavaScript", "bytes": "10442" }, { "name": "Python", "bytes": "94027" }, { "name": "Shell", "bytes": "548" } ], "symlink_target": "" }
<?php /** * Short description * * Long Description * * @license http://opensource.org/licenses/BSD-3-Clause BSD-3-Clause */ namespace Pharc; class Pharc { const VERSION = '@package_version@'; const BRANCH_ALIAS_VERSION = '@package_branch_alias_version@'; const RELEASE_DATE = '@release_date@'; }
{ "content_hash": "90c63478d6f2359e04df7d35efb71e50", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 70, "avg_line_length": 17.77777777777778, "alnum_prop": 0.659375, "repo_name": "theshadow/pharc", "id": "218da6f5068999e2296cf8a83712611da54117d5", "size": "320", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Pharc.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "61691" } ], "symlink_target": "" }
PEAR_PackageFileManager_File->_setupIgnore, empty array --SKIPIF-- --FILE-- <?php require_once dirname(dirname(__FILE__)) . DIRECTORY_SEPARATOR . 'setup.php.inc'; $pfm->_setupIgnore(array(), 1); $phpunit->assertFalse($pfm->ignore[1], 'should be false if not an array'); echo 'tests done'; ?> --EXPECT-- tests done
{ "content_hash": "e8338255577c35d717a0ef3726415062", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 80, "avg_line_length": 28.454545454545453, "alnum_prop": 0.6996805111821086, "repo_name": "pear/PEAR_PackageFileManager", "id": "d22a647e0989702a6b7d61896e124df189cd8b39", "size": "322", "binary": false, "copies": "1", "ref": "refs/heads/trunk", "path": "tests/PEAR_PackageFileManager_File/setupIgnore/test_emptyarray.phpt", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "PHP", "bytes": "385178" } ], "symlink_target": "" }
//CHECKSTYLE:FileLength:OFF package org.pentaho.di.repository.kdr; import java.util.Date; import java.util.Hashtable; import java.util.List; import java.util.Map; import org.pentaho.di.core.Const; import org.pentaho.di.core.ProgressMonitorListener; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.plugins.DatabasePluginType; import org.pentaho.di.core.plugins.JobEntryPluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.repository.LongObjectId; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.kdr.delegates.KettleDatabaseRepositoryConnectionDelegate; public class KettleDatabaseRepositoryCreationHelper { private KettleDatabaseRepository repository; private LogChannelInterface log; private DatabaseMeta databaseMeta; private Database database; private PluginRegistry pluginRegistry; public KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ) { this.repository = repository; this.databaseMeta = this.repository.getDatabaseMeta(); this.database = this.repository.getDatabase(); this.log = repository.getLog(); this.pluginRegistry = PluginRegistry.getInstance(); } /** * Create or upgrade repository tables & fields, populate lookup tables, ... * * @param monitor * The progress monitor to use, or null if no monitor is present. * @param upgrade * True if you want to upgrade the repository, false if you want to create it. * @param statements * the list of statements to populate * @param dryrun * true if we don't actually execute the statements * * @throws KettleException * in case something goes wrong! */ public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; // integer, no need for bigint! log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); // //////////////////////////////////////////////////////////////////////////////// // R_LOG // // Log the operations we do in the repository. // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE, ValueMetaInterface.TYPE_DATE ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } // //////////////////////////////////////////////////////////////////////////////// // R_VERSION // // Let's start with the version table // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, ValueMetaInterface.TYPE_INTEGER, 3, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, ValueMetaInterface.TYPE_INTEGER, 3, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, ValueMetaInterface.TYPE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Const.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } // Insert an extra record in R_VERSION every time we pass here... // try { // if the table doesn't exist, don't try to grab an ID from it... LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } // //////////////////////////////////////////////////////////////////////////////// // R_DATABASE_TYPE // // Create table... // boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Const.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { // // Populate... // updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DATABASE_CONTYPE // // Create table... // boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } // If it's creating the table, go ahead and populate below... // ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { // // Populate with data... // code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_NOTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DATABASE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PORT, ValueMetaInterface.TYPE_INTEGER, 7, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DATABASE_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_AK"; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DIRECTORY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_AK"; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANSFORMATION // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, ValueMetaInterface.TYPE_NUMBER, 12, 2 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, ValueMetaInterface.TYPE_NUMBER, 12, 2 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } // In case of an update, the added column R_TRANSFORMATION.ID_DIRECTORY == NULL!!! // if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_INTEGER, 18, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_TRANS_ATTRIBUTE_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_INTEGER, 18, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_JOB_ATTRIBUTE_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DEPENDENCY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_PARTITION_SCHEMA // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_PARTITION // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_PARTITION_SCHEMA // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_CLUSTER // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_SLAVE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PORT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_MASTER, ValueMetaInterface.TYPE_BOOLEAN ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_CLUSTER_SLAVE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_SLAVE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_CLUSTER // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // // R_TRANS_HOP // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////// // R_TRANS_STEP_CONDITION // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exists: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////// // R_CONDITION // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////// // R_VALUE // tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exists: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP_TYPE // // Create table... boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Const.isEmpty( sql ) ) { // Doesn't exists: create the table... create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP // // Create table table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_COPIES, ValueMetaInterface.TYPE_INTEGER, 3, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exists: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP_ATTRIBUTE // // Create table... tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table .addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_INTEGER, 18, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP_DATABASE // // Keeps the links between transformation steps and databases. // That way investigating dependencies becomes easier to program. // // Create table... tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU1"; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU2"; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_NOTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_LOGLEVEL // // Create table... boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { // // Populate with data... // code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, ValueMetaInterface.TYPE_INTEGER ), nextid ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, ValueMetaInterface.TYPE_STRING ), code[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_LOG // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ID_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_FILENAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); // 255 max length for now. sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_DATABASE // // Keeps the links between job entries and databases. // That way investigating dependencies becomes easier to program. // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU1"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU2"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_TYPE // // Create table... boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { // // Populate with data... // updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_COPY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, ValueMetaInterface.TYPE_INTEGER, 4, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_NUMBER, 13, 2 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB_HOP // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB_NOTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // // R_TRANS_LOCK // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, ValueMetaInterface.TYPE_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // // R_JOB_LOCK // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, ValueMetaInterface.TYPE_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////////// // // MetaStore tables... // // ///////////////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////////////// // // R_NAMESPACE // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ValueMetaInterface.TYPE_STRING, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_ELEMENT_TYPE // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_ELEMENT // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_ELEMENT_ATTRIBUTE // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////////// // // User tables... // // ///////////////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////////////// // // R_USER // // Keep a mapping between the user login and the object id // Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); // Create table... // boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ID_USER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_LOGIN, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_PASSWORD, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ENABLED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Const.isEmpty( sql ) ) { // Doesn't exist: create the table... create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { // // Populate with data... // user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; // prof = new String[] { "Administrator", "Read-only" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ID_USER, ValueMetaInterface.TYPE_INTEGER ), nextid ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_LOGIN, ValueMetaInterface.TYPE_STRING ), user[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_PASSWORD, ValueMetaInterface.TYPE_STRING ), password ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_NAME, ValueMetaInterface.TYPE_STRING ), code[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), desc[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ENABLED, ValueMetaInterface.TYPE_BOOLEAN ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } /** * Update the list in R_STEP_TYPE using the StepLoader StepPlugin entries * * @throws KettleException * if the update didn't go as planned. */ public List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ) throws KettleException { synchronized ( repository ) { // We should only do an update if something has changed... // List<PluginInterface> plugins = pluginRegistry.getPlugins( StepPluginType.class ); for ( int i = 0; i < plugins.size(); i++ ) { PluginInterface sp = plugins.get( i ); ObjectId id = null; if ( !create ) { id = repository.stepDelegate.getStepTypeID( sp.getIds()[0] ); } if ( id == null ) { // Not found, we need to add this one... // We need to add this one ... id = new LongObjectId( i + 1 ); if ( !create ) { id = repository.connectionDelegate.getNextStepTypeID(); } RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, ValueMetaInterface.TYPE_STRING ), sp.getIds()[0] ); table .addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), sp .getName() ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, ValueMetaInterface.TYPE_STRING ), sp .getDescription() ); if ( dryrun ) { String sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_STEP_TYPE, table.getRowMeta(), table .getData(), null ); statements.add( sql ); } else { database.prepareInsert( table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_STEP_TYPE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); } } } } return statements; } /** * Update the list in R_DATABASE_TYPE using the database plugin entries * * @throws KettleException * if the update didn't go as planned. */ public List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ) throws KettleException { synchronized ( repository ) { // We should only do an update if something has changed... // List<PluginInterface> plugins = pluginRegistry.getPlugins( DatabasePluginType.class ); for ( int i = 0; i < plugins.size(); i++ ) { PluginInterface plugin = plugins.get( i ); ObjectId id = null; if ( !create ) { id = repository.databaseDelegate.getDatabaseTypeID( plugin.getIds()[0] ); } if ( id == null ) { // Not found, we need to add this one... // We need to add this one ... id = new LongObjectId( i + 1 ); if ( !create ) { id = repository.connectionDelegate.getNextDatabaseTypeID(); } RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, ValueMetaInterface.TYPE_STRING ), plugin.getIds()[0] ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), plugin .getName() ); if ( dryrun ) { String sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_DATABASE_TYPE, table.getRowMeta(), table.getData(), null ); statements.add( sql ); } else { database.prepareInsert( table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_DATABASE_TYPE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); } } } } return statements; } /** * Update the list in R_JOBENTRY_TYPE * * @param create * * @exception KettleException * if something went wrong during the update. */ public void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ) throws KettleException { synchronized ( repository ) { // We should only do an update if something has changed... PluginRegistry registry = PluginRegistry.getInstance(); List<PluginInterface> jobPlugins = registry.getPlugins( JobEntryPluginType.class ); for ( int i = 0; i < jobPlugins.size(); i++ ) { PluginInterface jobPlugin = jobPlugins.get( i ); String type_desc = jobPlugin.getIds()[0]; String type_desc_long = jobPlugin.getName(); ObjectId id = null; if ( !create ) { id = repository.jobEntryDelegate.getJobEntryTypeID( type_desc ); } if ( id == null ) { // Not found, we need to add this one... // We need to add this one ... id = new LongObjectId( i + 1 ); if ( !create ) { id = repository.connectionDelegate.getNextJobEntryTypeID(); } RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, ValueMetaInterface.TYPE_STRING ), type_desc ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), type_desc_long ); if ( dryrun ) { String sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE, table.getRowMeta(), table.getData(), null ); statements.add( sql ); } else { database.prepareInsert( table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); } } } } } }
{ "content_hash": "4892b119638b11c7555945154175af67", "timestamp": "", "source": "github", "line_count": 3132, "max_line_length": 115, "avg_line_length": 41.061621966794384, "alnum_prop": 0.6190039267524591, "repo_name": "rfellows/pentaho-kettle", "id": "a2e1d70864d98f67b955586033fca9b595c008d4", "size": "129509", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "engine/src/org/pentaho/di/repository/kdr/KettleDatabaseRepositoryCreationHelper.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "18738" }, { "name": "Java", "bytes": "31392666" }, { "name": "JavaScript", "bytes": "24319" }, { "name": "Shell", "bytes": "45217" } ], "symlink_target": "" }
using System; using System.Reactive.Concurrency; using ReactiveUI; namespace Akavache { public static class BlobCache { static string applicationName; static ISecureBlobCache perSession = new TestBlobCache(Scheduler.Immediate); static BlobCache() { if (RxApp.InUnitTestRunner()) { localMachine = new TestBlobCache(RxApp.TaskpoolScheduler); userAccount = new TestBlobCache(RxApp.TaskpoolScheduler); #if !SILVERLIGHT secure = new TestBlobCache(RxApp.TaskpoolScheduler); #endif } } /// <summary> /// /// </summary> public static string ApplicationName { get { if (applicationName == null) { throw new Exception("Make sure to set BlobCache.ApplicationName on startup"); } return applicationName; } set { applicationName = value; } } static IBlobCache localMachine; static IBlobCache userAccount; /// <summary> /// /// </summary> public static IBlobCache LocalMachine { get { return localMachine ?? PersistentBlobCache.LocalMachine; } set { localMachine = value; } } /// <summary> /// /// </summary> public static IBlobCache UserAccount { get { return userAccount ?? PersistentBlobCache.UserAccount; } set { userAccount = value; } } #if !SILVERLIGHT static ISecureBlobCache secure; /// <summary> /// /// </summary> public static ISecureBlobCache Secure { get { return secure ?? EncryptedBlobCache.Current; } set { secure = value; } } #endif /// <summary> /// /// </summary> public static ISecureBlobCache InMemory { get { return perSession; } set { perSession = value; } } } }
{ "content_hash": "021966b8af3eb8bf55ec405a3cb7099e", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 97, "avg_line_length": 25.46987951807229, "alnum_prop": 0.5179754020813624, "repo_name": "jorik041/Akavache", "id": "7994ffe69b23ff16321bc3cc8a78c1526bffcf3e", "size": "2114", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Akavache/BlobCache.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "130506" } ], "symlink_target": "" }
<!-- Global site tag (gtag.js) - Google Analytics --> <script async src="https://www.googletagmanager.com/gtag/js?id=G-BV53D27H52"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-BV53D27H52'); </script> <script data-ad-client="ca-pub-7037508450653487" async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js"></script> <meta charset="utf-8"> <title>{% if page.title %}{{ page.title }} &#8211; {% endif %}{{ site.title }}</title> {% if page.excerpt %}<meta name="description" content="{{ page.excerpt | strip_html }}">{% endif %} <meta name="keywords" content="{{ page.tags | join: ', ' }}"> {% if page.author %} {% assign author = site.data.authors[page.author] %}{% else %}{% assign author = site.owner %} {% endif %} {% include _open-graph.html %} {% if site.owner.google.verify %}<!-- Webmaster Tools verfication --> <meta name="google-site-verification" content="{{ site.owner.google.verify }}">{% endif %} {% if site.owner.bing-verify %}<meta name="msvalidate.01" content="{{ site.owner.bing-verify }}">{% endif %} {% capture canonical %}{{ site.url }}{% if site.permalink contains '.html' %}{{ page.url }}{% else %}{{ page.url | remove:'index.html' | strip_slash }}{% endif %}{% endcapture %} <link rel="canonical" href="{{ canonical }}"> <link href="{{ site.url }}/feed.xml" type="application/atom+xml" rel="alternate" title="{{ site.title }} Feed"> {% if site.owner.google.plus %}<link rel="author" href="http://plus.google.com/+{{ site.owner.google.plus }}?rel=author">{% endif %} <!-- http://t.co/dKP3o1e --> <meta name="HandheldFriendly" content="True"> <meta name="MobileOptimized" content="320"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <!-- For all browsers --> <link rel="stylesheet" href="{{ site.url }}/assets/css/main.css"> <meta http-equiv="cleartype" content="on"> <!-- HTML5 Shiv and Media Query Support --> <!--[if lt IE 9]> <script src="{{ site.url }}/assets/js/vendor/html5shiv.min.js"></script> <script src="{{ site.url }}/assets/js/vendor/respond.min.js"></script> <![endif]--> <!-- Modernizr --> <script src="{{ site.url }}/assets/js/vendor/modernizr-2.7.1.custom.min.js"></script> <link href='//fonts.googleapis.com/css?family=PT+Sans+Narrow:400,700%7CPT+Serif:400,700,400italic' rel='stylesheet' type='text/css'> <!-- Icons --> <!-- 16x16 --> <link rel="shortcut icon" href="{{ site.url }}/favicon.ico"> <!-- 32x32 --> <link rel="shortcut icon" href="{{ site.url }}/favicon.png"> <!-- 57x57 (precomposed) for iPhone 3GS, pre-2011 iPod Touch and older Android devices --> <link rel="apple-touch-icon-precomposed" href="{{ site.url }}/images/apple-touch-icon-precomposed.png"> <!-- 72x72 (precomposed) for 1st generation iPad, iPad 2 and iPad mini --> <link rel="apple-touch-icon-precomposed" sizes="72x72" href="{{ site.url }}/images/apple-touch-icon-72x72-precomposed.png"> <!-- 114x114 (precomposed) for iPhone 4, 4S, 5 and post-2011 iPod Touch --> <link rel="apple-touch-icon-precomposed" sizes="114x114" href="{{ site.url }}/images/apple-touch-icon-114x114-precomposed.png"> <!-- 144x144 (precomposed) for iPad 3rd and 4th generation --> <link rel="apple-touch-icon-precomposed" sizes="144x144" href="{{ site.url }}/images/apple-touch-icon-144x144-precomposed.png">
{ "content_hash": "87aad866cdc1b715250936057be77030", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 178, "avg_line_length": 53.476190476190474, "alnum_prop": 0.6708222024339566, "repo_name": "ashrafMageed/ashrafMageed.github.io", "id": "ddc5455ae527deff382183dde851fba23fed28e1", "size": "3369", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_includes/_head.html", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "20283" }, { "name": "JavaScript", "bytes": "53389" }, { "name": "Ruby", "bytes": "98" }, { "name": "SCSS", "bytes": "43038" } ], "symlink_target": "" }