text stringlengths 2 1.04M | meta dict |
|---|---|
namespace Efrpg.Filtering
{
public enum FilterType
{
Schema, // Can only be used on Schema
Table, // Can only used on Tables
Column, // Can only used on Columns
StoredProcedure // Can only used on Stored Procedures
}
} | {
"content_hash": "f4f28fd376ff2a886f75bd295a4ef1f9",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 61,
"avg_line_length": 28.8,
"alnum_prop": 0.5694444444444444,
"repo_name": "sjh37/efreversepoco",
"id": "9ca033bbc32a8c58c7742f71bdacafae8092bed8",
"size": "290",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Generator/Filtering/FilterType.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "548"
},
{
"name": "C#",
"bytes": "167175"
}
],
"symlink_target": ""
} |
#ifndef FILE_boost_type_traits_floating_point_promotion_hpp_INCLUDED
#define FILE_boost_type_traits_floating_point_promotion_hpp_INCLUDED
namespace boost {
template<class T> struct floating_point_promotion { typedef T type; };
template<> struct floating_point_promotion<float> { typedef double type; };
template<> struct floating_point_promotion<float const> { typedef double const type; };
template<> struct floating_point_promotion<float volatile>{ typedef double volatile type; };
template<> struct floating_point_promotion<float const volatile> { typedef double const volatile type; };
#if !defined(BOOST_NO_CXX11_TEMPLATE_ALIASES)
template <class T> using floating_point_promotion_t = typename floating_point_promotion<T>::type;
#endif
}
#endif // #ifndef FILE_boost_type_traits_floating_point_promotion_hpp_INCLUDED
| {
"content_hash": "c723c5871495accf11b70e4c778c550b",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 108,
"avg_line_length": 39.59090909090909,
"alnum_prop": 0.7451205510907003,
"repo_name": "IntelRealSense/librealsense",
"id": "89439e8de8e42f2cfa03f7f2ef7dee391baaca6f",
"size": "1068",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third-party/realsense-file/boost/boost/type_traits/floating_point_promotion.hpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "736"
},
{
"name": "C",
"bytes": "6226973"
},
{
"name": "C#",
"bytes": "541746"
},
{
"name": "C++",
"bytes": "9343375"
},
{
"name": "CMake",
"bytes": "181677"
},
{
"name": "CSS",
"bytes": "9575"
},
{
"name": "Cuda",
"bytes": "38173"
},
{
"name": "Dockerfile",
"bytes": "2393"
},
{
"name": "HTML",
"bytes": "3550"
},
{
"name": "Java",
"bytes": "309233"
},
{
"name": "JavaScript",
"bytes": "480021"
},
{
"name": "MATLAB",
"bytes": "106616"
},
{
"name": "PowerShell",
"bytes": "7989"
},
{
"name": "Python",
"bytes": "485240"
},
{
"name": "ShaderLab",
"bytes": "15538"
},
{
"name": "Shell",
"bytes": "108709"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
Mucor decumanus Pall.
### Remarks
null | {
"content_hash": "ac7debeea23d122ad4941765f4b7b7cf",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 21,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.6984126984126984,
"repo_name": "mdoering/backbone",
"id": "62d39180a54e9f1058d0feae317dc179bc560066",
"size": "171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Zygomycota/Mucorales/Mucoraceae/Mucor/Mucor decumanus/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace GSoft.Dynamite.Serializers
{
using System.Web.Script.Serialization;
/// <summary>
/// The JavaScript serializer.
/// </summary>
public class JavaScriptSerializer : ISerializer
{
private readonly JavaScriptSerializer serializer;
/// <summary>
/// Initializes a new instance of the <see cref="JavaScriptSerializer"/> class.
/// </summary>
public JavaScriptSerializer()
{
this.serializer = new JavaScriptSerializer();
}
/// <summary>
/// The serialize.
/// </summary>
/// <param name="valueToSerialize">
/// The object to serialize.
/// </param>
/// <returns>
/// The <see cref="string"/>.
/// </returns>
public string Serialize(object valueToSerialize)
{
return this.serializer.Serialize(valueToSerialize);
}
/// <summary>
/// The deserialize method.
/// </summary>
/// <param name="json">
/// The JSON to deserialize.
/// </param>
/// <typeparam name="T">
/// The type to deserialize
/// </typeparam>
/// <returns>
/// The deserialized object.
/// </returns>
public T Deserialize<T>(string json)
{
return this.serializer.Deserialize<T>(json);
}
}
}
| {
"content_hash": "feea56ef6b020b874116e50de2665ce0",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 87,
"avg_line_length": 27.431372549019606,
"alnum_prop": 0.5260900643316655,
"repo_name": "NunoEdgarGub1/Dynamite",
"id": "9e02dfdc5b6d3ee4bb605ff1f4538c39a8a1199d",
"size": "1401",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "Source/GSoft.Dynamite/Serializers/JavascriptSerializer.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "86821"
},
{
"name": "C#",
"bytes": "2780100"
},
{
"name": "CSS",
"bytes": "2215"
},
{
"name": "Cucumber",
"bytes": "7529"
},
{
"name": "JavaScript",
"bytes": "26866"
},
{
"name": "PowerShell",
"bytes": "824878"
}
],
"symlink_target": ""
} |
@interface LXImageLoopBrowser ()<UIScrollViewDelegate, LXZoomingScrollViewDelegate>
{
// 当前需要展示的3张图片
NSMutableArray * _curImgArr;
UIView * _contentView;
BOOL _isShowAtWindow;
}
@end
@implementation LXImageLoopBrowser
- (void)dealloc
{
NSLog(@"LXImageLoopBrowser dealloc");
}
#pragma mark - Life cycle
//从xib初始化会调用此方法
- (instancetype)initWithCoder:(NSCoder *)coder
{
self = [super initWithCoder:coder];
if (self) {
[self loadScrollImageUI];
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
[self loadScrollImageUI];
}
return self;
}
- (instancetype)init
{
return [self initWithFrame:CGRectZero];
}
//加载ScrollView上的UIImageView
- (void) loadScrollImageUI {
_zoomEnabled = YES;
_imageContentMode = UIViewContentModeScaleAspectFit;
_curImgArr = [[NSMutableArray alloc] init];
_currentIndex = 0;
_scrollView = [[UIScrollView alloc] init];
_scrollView.bounces = NO;
_scrollView.showsHorizontalScrollIndicator = NO;
_scrollView.showsVerticalScrollIndicator = NO;
_scrollView.pagingEnabled = YES;
_scrollView.delegate = self;
_scrollView.translatesAutoresizingMaskIntoConstraints = NO;
[self addSubview:_scrollView];
[self addConstraints:@[
[NSLayoutConstraint constraintWithItem:_scrollView attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeTop multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_scrollView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeLeft multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_scrollView attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeBottom multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_scrollView attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeRight multiplier:1 constant:0]
]];
_contentView = [[UIView alloc] init];
_contentView.backgroundColor = [UIColor clearColor];
_contentView.translatesAutoresizingMaskIntoConstraints = NO;
// _contentView.layer.borderWidth = 1;
// _contentView.layer.borderColor = [UIColor orangeColor].CGColor;
[_scrollView addSubview:_contentView];
[_scrollView addConstraints:@[
[NSLayoutConstraint constraintWithItem:_contentView attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:_scrollView attribute:NSLayoutAttributeTop multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_contentView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:_scrollView attribute:NSLayoutAttributeLeft multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_contentView attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationEqual toItem:_scrollView attribute:NSLayoutAttributeBottom multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_contentView attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:_scrollView attribute:NSLayoutAttributeRight multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:_contentView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:_scrollView attribute:NSLayoutAttributeWidth multiplier:3 constant:0],
[NSLayoutConstraint constraintWithItem:_contentView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:_scrollView attribute:NSLayoutAttributeHeight multiplier:1 constant:0]
]];
NSLayoutConstraint * leftConstraint = nil;
LXZoomingScrollView * tempView = nil;
for (int i = 0; i < 3; i++) {
LXZoomingScrollView * photoView = [[LXZoomingScrollView alloc] init];
photoView.tag = IMAGEVIEW_TAG + i;
photoView.translatesAutoresizingMaskIntoConstraints = NO;
photoView.zoomEnabled = _zoomEnabled;
photoView.tapDelegate = self;
photoView.imageContentMode = _imageContentMode;
[_contentView addSubview:photoView];
if (tempView) {
leftConstraint = [NSLayoutConstraint constraintWithItem:photoView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:tempView attribute:NSLayoutAttributeRight multiplier:1 constant:0];
} else {
leftConstraint = [NSLayoutConstraint constraintWithItem:photoView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:_contentView attribute:NSLayoutAttributeLeft multiplier:1 constant:0];
}
[_contentView addConstraints:@[
leftConstraint,
[NSLayoutConstraint constraintWithItem:photoView attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:_contentView attribute:NSLayoutAttributeTop multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:photoView attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationEqual toItem:_contentView attribute:NSLayoutAttributeBottom multiplier:1 constant:0],
[NSLayoutConstraint constraintWithItem:photoView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:_contentView attribute:NSLayoutAttributeWidth multiplier:1.0/3.0 constant:0]
]];
tempView = photoView;
}
_currentPhotoView = (LXZoomingScrollView *)[_contentView viewWithTag:IMAGEVIEW_TAG + 1];
_pageControl = [[UIPageControl alloc] init];
[self addSubview:_pageControl];
}
#pragma mark - Setter and getter
- (void)setImageUrls:(NSArray *)imageUrls {
if (_imageUrls == imageUrls) {
return;
}
_imageUrls = [imageUrls copy];
_currentIndex = 0;
_pageControl.numberOfPages = _imageUrls.count;
[self loadData];
[self refreshScrollView];
}
- (void)setZoomEnabled:(BOOL)zoomEnabled {
if (_zoomEnabled == zoomEnabled) {
return;
}
_zoomEnabled = zoomEnabled;
for (int i = 0; i < 3; i++) {
LXZoomingScrollView * scaleView = (LXZoomingScrollView *)[_scrollView viewWithTag:IMAGEVIEW_TAG + i];
scaleView.zoomEnabled = _zoomEnabled;
}
}
- (void)setImageContentMode:(UIViewContentMode)imageContentMode {
if (_imageContentMode == imageContentMode) {
return;
}
_imageContentMode = imageContentMode;
for (int i = 0; i < 3; i++) {
LXZoomingScrollView * scaleView = (LXZoomingScrollView *)[_scrollView viewWithTag:IMAGEVIEW_TAG + i];
scaleView.imageContentMode = _imageContentMode;
}
}
- (void)setCurrentIndex:(NSInteger)currentIndex {
if (_currentIndex == currentIndex) {
return;
}
if (currentIndex < _imageUrls.count) {
_currentIndex = currentIndex;
[self loadData];
[self refreshScrollView];
}
else {
NSLog(@"越界!");
}
_pageControl.currentPage = _currentIndex;
}
- (UIImage *)currentImage {
LXZoomingScrollView * imgView = (LXZoomingScrollView *)[_contentView viewWithTag:IMAGEVIEW_TAG + 1];
return imgView.currentImage;
}
#pragma mark - Private method
// 索引值必须保证在数组不越界的范围之内 0-_imageUrls.count-1
- (NSInteger) beyondBounds:(NSInteger)index
{
// 第一张往左滑
if (index < 0) {
index = _imageUrls.count-1;
} else if (index >= _imageUrls.count) { // 最后一张往右滑
index = 0;
}
return index;
}
//每一次都向数组中添加3张图片数据
- (void)loadData
{
[_curImgArr removeAllObjects];
if (_imageUrls.count < 1) {
_scrollView.scrollEnabled = NO;
return;
} else if (_imageUrls.count == 1) {
_scrollView.scrollEnabled = NO;
} else {
_scrollView.scrollEnabled = YES;
}
NSInteger prePage = [self beyondBounds:_currentIndex-1];
NSInteger curPage = [self beyondBounds:_currentIndex];
NSInteger nextPage = [self beyondBounds:_currentIndex+1];
UIImage * preImage;
UIImage * curImage;
UIImage * nextImage;
if (self.delegate && [self.delegate respondsToSelector:@selector(imageLoopBrowser:placeholderImageForIndex:)]) {
preImage = [self.delegate imageLoopBrowser:self placeholderImageForIndex:prePage];
curImage = [self.delegate imageLoopBrowser:self placeholderImageForIndex:curPage];
nextImage = [self.delegate imageLoopBrowser:self placeholderImageForIndex:nextPage];
}
NSMutableDictionary * preDic = [NSMutableDictionary dictionary];
[preDic setValue:[_imageUrls objectAtIndex:prePage] forKey:kLXCurImgUrl];
[preDic setValue:preImage forKey:kLXCurPlaceholderImg];
NSMutableDictionary * curDic = [NSMutableDictionary dictionary];
[curDic setValue:[_imageUrls objectAtIndex:curPage] forKey:kLXCurImgUrl];
[curDic setValue:curImage forKey:kLXCurPlaceholderImg];
NSMutableDictionary * nextDic = [NSMutableDictionary dictionary];
[nextDic setValue:[_imageUrls objectAtIndex:nextPage] forKey:kLXCurImgUrl];
[nextDic setValue:nextImage forKey:kLXCurPlaceholderImg];
// 添加图片
[_curImgArr addObject:preDic];
[_curImgArr addObject:curDic];
[_curImgArr addObject:nextDic];
}
//刷新显示图片
- (void) refreshScrollView
{
//刷新显示图片
for (int i = 0; i < 3; i++) {
LXZoomingScrollView * imgView = (LXZoomingScrollView *)[_scrollView viewWithTag:IMAGEVIEW_TAG + i];
NSDictionary * imageDic = _curImgArr[i];
NSURL * imagUrl = [NSURL URLWithString:[imageDic objectForKey:kLXCurImgUrl]];
UIImage * placeholderImage = [imageDic objectForKey:kLXCurPlaceholderImg];
[imgView lx_setImageWithURL:imagUrl placeholderImage:placeholderImage errorImage:_errorImage];
}
// 滚动到第2屏数据 才是当前页面
// 第1屏是前1张图片 第2屏才是当前页面 第3屏是下1屏
_scrollView.contentOffset =CGPointMake(_scrollView.frame.size.width, 0);
}
#pragma mark - Delegate
#pragma mark UIScrollViewDelegate
// 停止减速调用
// 停止减速的瞬间判断 scrollView的偏移量 决定是否加载新的图片数据
- (void) scrollViewDidEndDecelerating:(UIScrollView *)scrollView
{
NSUInteger oldPage = _currentIndex;
// 加载下一页 每次滑动后scrollView的偏移量已经是1*scrollView.frame.size.width
if (scrollView.contentOffset.x >= 2 * scrollView.frame.size.width) {
_currentIndex = [self beyondBounds:_currentIndex + 1];
[self loadData];
[self refreshScrollView];
if ([self.delegate respondsToSelector:@selector(imageLoopBrowser:didMoveAtIndex:moveFromIndex:)]) {
[self.delegate imageLoopBrowser:self didMoveAtIndex:_currentIndex moveFromIndex:oldPage];
}
}
// 加载上一页
if (scrollView.contentOffset.x <= 0) {
_currentIndex = [self beyondBounds:_currentIndex - 1];
[self loadData];
[self refreshScrollView];
if ([self.delegate respondsToSelector:@selector(imageLoopBrowser:didMoveAtIndex:moveFromIndex:)]) {
[self.delegate imageLoopBrowser:self didMoveAtIndex:_currentIndex moveFromIndex:oldPage];
}
}
_pageControl.currentPage = _currentIndex;
}
#pragma mark LXZoomingScrollViewDelegate
- (void) zoomingScrollViewSingleTap:(LXZoomingScrollView *)photoView {
if (_delegate && [_delegate respondsToSelector:@selector(imageLoopBrowser:didOnceTapAtIndex:)]) {
[_delegate imageLoopBrowser:self didOnceTapAtIndex:_currentIndex];
}
}
#pragma mark - Public method
- (UIImage *) getImageAtIndex:(NSInteger)index {
UIImage * image = nil;
if (_imageUrls.count > index) {
NSURL * imageUrl = [NSURL URLWithString:_imageUrls[index]];
NSString * key = [[SDWebImageManager sharedManager] cacheKeyForURL:imageUrl];
image = [[SDImageCache sharedImageCache] imageFromMemoryCacheForKey:key];
if (!image) {
image = [[SDImageCache sharedImageCache] imageFromDiskCacheForKey:key];
}
}
return image;
}
#pragma mark - Override method
- (void) layoutSubviews {
[super layoutSubviews];
//AutoLayout 改变Frame的时候,会把ScrollView的 contentOffset和ContentSize设为0, 需要在此做此操作显示中间的LXZoomingScrollView
_scrollView.contentOffset =CGPointMake(_scrollView.frame.size.width, 0);
_pageControl.frame = CGRectMake(0, CGRectGetHeight(self.frame) - 40, CGRectGetWidth(self.frame), 20);
}
@end
| {
"content_hash": "841e3dbbc2a85fd776282b0fb0c5181d",
"timestamp": "",
"source": "github",
"line_count": 349,
"max_line_length": 236,
"avg_line_length": 36.6676217765043,
"alnum_prop": 0.6921934828475423,
"repo_name": "xx-li/LXPhotobroswer",
"id": "0f4ae945e0c2fb8c5ec0e73191561cd343b1cef7",
"size": "13584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LXImageLoopBrowser/LXImageLoopBrowser.m",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "223243"
},
{
"name": "Ruby",
"bytes": "221"
},
{
"name": "Shell",
"bytes": "7953"
}
],
"symlink_target": ""
} |
The most BS course I have ever taken
[Website](https://web.archive.org/web/20160522210720/http://www.cs.ubbcluj.ro:80/~mihis/teaching.html)
| {
"content_hash": "e075fede4267de2c11c8688f524113c8",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 102,
"avg_line_length": 47,
"alnum_prop": 0.7659574468085106,
"repo_name": "leyyin/university",
"id": "83229c598e16accc95b0e847a07fa6de2ea73791",
"size": "163",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "individual-project/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "53578"
},
{
"name": "Awk",
"bytes": "10006"
},
{
"name": "C",
"bytes": "113260"
},
{
"name": "C++",
"bytes": "171138"
},
{
"name": "CMake",
"bytes": "1458"
},
{
"name": "Common Lisp",
"bytes": "18679"
},
{
"name": "D",
"bytes": "30122"
},
{
"name": "HTML",
"bytes": "244869"
},
{
"name": "Java",
"bytes": "113685"
},
{
"name": "Matlab",
"bytes": "16233"
},
{
"name": "Objective-C",
"bytes": "63"
},
{
"name": "PLSQL",
"bytes": "6236"
},
{
"name": "Prolog",
"bytes": "16706"
},
{
"name": "Python",
"bytes": "290447"
},
{
"name": "QMake",
"bytes": "327"
},
{
"name": "SQLPL",
"bytes": "2649"
},
{
"name": "Shell",
"bytes": "7981"
},
{
"name": "TeX",
"bytes": "11421"
}
],
"symlink_target": ""
} |
#include <test/support/tdb_catch.h>
#include "../filter_create.h"
#include "../bit_width_reduction_filter.h"
#include "../bitshuffle_filter.h"
#include "../bitsort_filter.h"
#include "../byteshuffle_filter.h"
#include "../checksum_md5_filter.h"
#include "../checksum_sha256_filter.h"
#include "../compression_filter.h"
#include "../encryption_aes256gcm_filter.h"
#include "../filter.h"
#include "../float_scaling_filter.h"
#include "../noop_filter.h"
#include "../positive_delta_filter.h"
#include "tiledb/common/logger_public.h"
#include "tiledb/sm/crypto/encryption_key.h"
#include "tiledb/sm/enums/compressor.h"
#include "tiledb/sm/enums/encryption_type.h"
#include "tiledb/sm/enums/filter_option.h"
#include "tiledb/sm/enums/filter_type.h"
#include "tiledb/sm/filter/webp_filter.h"
using namespace tiledb::sm;
template <class T, int n>
inline T& buffer_offset(void* p) {
return *static_cast<T*>(static_cast<void*>(static_cast<char*>(p) + n));
}
TEST_CASE(
"Filter: Test bit width reduction filter deserialization",
"[filter][bit-width-reduction]") {
FilterType filtertype0 = FilterType::FILTER_BIT_WIDTH_REDUCTION;
uint32_t max_window_size0 = 1024;
char serialized_buffer[9];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = sizeof(uint32_t); // metadata_length
buffer_offset<uint32_t, 5>(p) = max_window_size0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
uint32_t max_window_size1 = 0;
REQUIRE(
filter1->get_option(FilterOption::BIT_WIDTH_MAX_WINDOW, &max_window_size1)
.ok());
CHECK(max_window_size0 == max_window_size1);
}
TEST_CASE(
"Filter: Test bit shuffle filter deserialization",
"[filter][bit-shuffle]") {
FilterType filtertype0 = FilterType::FILTER_BITSHUFFLE;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test byte shuffle filter deserialization",
"[filter][byte-shuffle]") {
FilterType filtertype0 = FilterType::FILTER_BYTESHUFFLE;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test checksum md5 filter deserialization",
"[filter][checksum-md5]") {
FilterType filtertype0 = FilterType::FILTER_CHECKSUM_MD5;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test checksum sha256 filter deserialization",
"[filter][checksum-sha256]") {
FilterType filtertype0 = FilterType::FILTER_CHECKSUM_SHA256;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test encryption aes256gcm filter deserialization",
"[filter][encryption-aes256gcm]") {
FilterType filtertype0 = FilterType::INTERNAL_FILTER_AES_256_GCM;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test compression filter deserialization",
"[filter][compression]") {
SECTION("no level compression") {
auto filtertype0 =
GENERATE(FilterType::FILTER_RLE, FilterType::FILTER_DOUBLE_DELTA);
Compressor compressor0 = Compressor::NO_COMPRESSION;
switch (filtertype0) {
case FilterType::FILTER_RLE:
compressor0 = Compressor::RLE;
break;
case FilterType::FILTER_DOUBLE_DELTA:
compressor0 = Compressor::DOUBLE_DELTA;
break;
default:
compressor0 = Compressor::NO_COMPRESSION;
break;
}
char serialized_buffer[10];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) =
sizeof(uint8_t) + sizeof(int32_t); // metadata_length
buffer_offset<uint8_t, 5>(p) = static_cast<uint8_t>(compressor0);
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
SECTION("gzip") {
auto level0 = GENERATE(1, 2, 3, 4, 5, 6, 7, 8, 9);
Compressor compressor0 = Compressor::GZIP;
FilterType filtertype0 = FilterType::FILTER_GZIP;
char serialized_buffer[10];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) =
sizeof(uint8_t) + sizeof(int32_t); // metadata_length
buffer_offset<uint8_t, 5>(p) = static_cast<uint8_t>(compressor0);
buffer_offset<int32_t, 6>(p) = level0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
int compressionlevel1 = 0;
REQUIRE(
filter1->get_option(FilterOption::COMPRESSION_LEVEL, &compressionlevel1)
.ok());
CHECK(level0 == compressionlevel1);
}
SECTION("zstd") {
// zstd levels range from -7(fastest) to 22
auto level0 = GENERATE(-7, -5, -3, 3, 5, 7, 9, 15, 22);
Compressor compressor0 = Compressor::ZSTD;
FilterType filtertype0 = FilterType::FILTER_ZSTD;
char serialized_buffer[10];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) =
sizeof(uint8_t) + sizeof(int32_t); // metadata_length
buffer_offset<uint8_t, 5>(p) = static_cast<uint8_t>(compressor0);
buffer_offset<int32_t, 6>(p) = level0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
int compressionlevel1 = 0;
REQUIRE(
filter1->get_option(FilterOption::COMPRESSION_LEVEL, &compressionlevel1)
.ok());
CHECK(level0 == compressionlevel1);
}
SECTION("lz4") {
// lz4 levels range from 1 to 12
auto level0 = GENERATE(1, 2, 3, 5, 7, 8, 9, 11, 12);
Compressor compressor0 = Compressor::LZ4;
FilterType filtertype0 = FilterType::FILTER_LZ4;
char serialized_buffer[10];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) =
sizeof(uint8_t) + sizeof(int32_t); // metadata_length
buffer_offset<uint8_t, 5>(p) = static_cast<uint8_t>(compressor0);
buffer_offset<int32_t, 6>(p) = level0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
int compressionlevel1 = 0;
REQUIRE(
filter1->get_option(FilterOption::COMPRESSION_LEVEL, &compressionlevel1)
.ok());
CHECK(level0 == compressionlevel1);
}
SECTION("bzip2") {
// bzip2 levels range from 1 to 9
auto level0 = GENERATE(1, 2, 3, 4, 5, 6, 7, 8, 9);
Compressor compressor0 = Compressor::BZIP2;
FilterType filtertype0 = FilterType::FILTER_BZIP2;
char serialized_buffer[10];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) =
sizeof(uint8_t) + sizeof(int32_t); // metadata_length
buffer_offset<uint8_t, 5>(p) = static_cast<uint8_t>(compressor0);
buffer_offset<int32_t, 6>(p) = level0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
int compressionlevel1 = 0;
REQUIRE(
filter1->get_option(FilterOption::COMPRESSION_LEVEL, &compressionlevel1)
.ok());
CHECK(level0 == compressionlevel1);
}
}
TEST_CASE("Filter: Test noop filter deserialization", "[filter][noop]") {
Buffer buffer;
FilterType filtertype0 = FilterType::FILTER_NONE;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test positive delta filter deserialization",
"[filter][positive-delta]") {
FilterType filtertype0 = FilterType::FILTER_POSITIVE_DELTA;
uint32_t max_window_size0 = 1024;
char serialized_buffer[9];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = sizeof(uint32_t); // metadata_length
buffer_offset<uint32_t, 5>(p) = max_window_size0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
uint32_t max_window_size1 = 0;
REQUIRE(filter1
->get_option(
FilterOption::POSITIVE_DELTA_MAX_WINDOW, &max_window_size1)
.ok());
CHECK(max_window_size0 == max_window_size1);
}
TEST_CASE(
"Filter: Test float scaling filter deserialization",
"[filter][float-scaling]") {
FilterType filtertype0 = FilterType::FILTER_SCALE_FLOAT;
double scale0 = 1.5213;
double offset0 = 0.2022;
uint64_t byte_width0 = 16;
char serialized_buffer[29];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) =
sizeof(double) + sizeof(double) + sizeof(uint64_t); // metadata_length
// The metadata struct ensures that the fields are stored in this particular
// order.
buffer_offset<double, 5>(p) = scale0;
buffer_offset<double, 13>(p) = offset0;
buffer_offset<uint64_t, 21>(p) = byte_width0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
CHECK(filter1->type() == filtertype0);
double scale1 = 0.0;
REQUIRE(filter1->get_option(FilterOption::SCALE_FLOAT_FACTOR, &scale1).ok());
CHECK(scale0 == scale1);
double offset1 = 0.0;
REQUIRE(filter1->get_option(FilterOption::SCALE_FLOAT_OFFSET, &offset1).ok());
CHECK(offset0 == offset1);
uint64_t byte_width1 = 0;
REQUIRE(filter1->get_option(FilterOption::SCALE_FLOAT_BYTEWIDTH, &byte_width1)
.ok());
CHECK(byte_width0 == byte_width1);
}
TEST_CASE("Filter: Test XOR filter deserialization", "[filter][xor]") {
Buffer buffer;
FilterType filtertype0 = FilterType::FILTER_XOR;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE(
"Filter: Test Bit sort filter deserialization", "[filter][bit-sort]") {
Buffer buffer;
FilterType filtertype0 = FilterType::FILTER_BITSORT;
char serialized_buffer[5];
char* p = &serialized_buffer[0];
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filtertype0);
buffer_offset<uint32_t, 1>(p) = 0; // metadata_length
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter1{
FilterCreate::deserialize(deserializer, constants::format_version)};
// Check type
CHECK(filter1->type() == filtertype0);
}
TEST_CASE("Filter: Test WEBP filter deserialization", "[filter][webp]") {
if constexpr (webp_filter_exists) {
Buffer buffer;
FilterType filterType = FilterType::FILTER_WEBP;
char serialized_buffer[17];
char* p = &serialized_buffer[0];
// Metadata layout has total size 5.
// | metadata |
// | 1 | 4 |
// | filter_type | meta_length |
buffer_offset<uint8_t, 0>(p) = static_cast<uint8_t>(filterType);
buffer_offset<uint32_t, 1>(p) = sizeof(WebpFilter::FilterConfig);
// WebpFilter::FilterConfig struct has size of 12 with 2 bytes padding.
// | WebpFilter::FilterConfig |
// | 4 | 1 | 1 | 2 | 2 | 2 |
// | quality | format | lossless | y_extent | x_extent | padding |
float quality0 = 50.5f;
WebpInputFormat fmt0 = WebpInputFormat::WEBP_RGBA;
uint8_t lossless0 = 1;
uint16_t y0 = 20, x0 = 40;
buffer_offset<float, 5>(p) = quality0;
buffer_offset<uint8_t, 9>(p) = static_cast<uint8_t>(fmt0);
buffer_offset<uint8_t, 10>(p) = lossless0;
buffer_offset<uint16_t, 11>(p) = y0;
buffer_offset<uint16_t, 13>(p) = x0;
Deserializer deserializer(&serialized_buffer, sizeof(serialized_buffer));
auto filter{
FilterCreate::deserialize(deserializer, constants::format_version)};
CHECK(filter->type() == filterType);
float quality1;
REQUIRE(filter->get_option(FilterOption::WEBP_QUALITY, &quality1).ok());
CHECK(quality0 == quality1);
WebpInputFormat fmt1;
REQUIRE(filter->get_option(FilterOption::WEBP_INPUT_FORMAT, &fmt1).ok());
CHECK(fmt0 == fmt1);
uint8_t lossless1;
REQUIRE(filter->get_option(FilterOption::WEBP_LOSSLESS, &lossless1).ok());
CHECK(lossless0 == lossless1);
auto extents = dynamic_cast<WebpFilter*>(filter.get())->get_extents();
CHECK(y0 == extents.first);
CHECK(x0 == extents.second);
}
} | {
"content_hash": "a2aedeb7015cd47990e9c45e36d7cb89",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 80,
"avg_line_length": 35.17594654788419,
"alnum_prop": 0.6737368620995314,
"repo_name": "TileDB-Inc/TileDB",
"id": "4596a885f95ea959f8caf8f94e9c785b8de64e27",
"size": "17094",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tiledb/sm/filter/test/unit_filter_create.cc",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1052"
},
{
"name": "C",
"bytes": "464910"
},
{
"name": "C++",
"bytes": "11966295"
},
{
"name": "CMake",
"bytes": "454258"
},
{
"name": "CSS",
"bytes": "1068"
},
{
"name": "Cap'n Proto",
"bytes": "21389"
},
{
"name": "Makefile",
"bytes": "101897"
},
{
"name": "PowerShell",
"bytes": "22061"
},
{
"name": "Python",
"bytes": "34370"
},
{
"name": "SCSS",
"bytes": "428"
},
{
"name": "Shell",
"bytes": "56157"
}
],
"symlink_target": ""
} |
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'melon-title',
templateUrl: './melon-title.component.html',
styleUrls: ['./melon-title.component.scss']
})
export class MelonTitleComponent implements OnInit {
melonTitle = "TITLE";
constructor() { }
ngOnInit() {
}
}
| {
"content_hash": "677559bb55d4a38453cca0017ee3c917",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 52,
"avg_line_length": 19.3125,
"alnum_prop": 0.6763754045307443,
"repo_name": "andreasdjs/melon",
"id": "6425769755ccaff5b3bacdeb5300f95bb5b9d736",
"size": "309",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/melon-title/melon-title.component.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "308"
},
{
"name": "HTML",
"bytes": "525"
},
{
"name": "JavaScript",
"bytes": "1996"
},
{
"name": "TypeScript",
"bytes": "8711"
}
],
"symlink_target": ""
} |
package nl.Steffion.AdminEye.Commands;
import java.util.ArrayList;
import org.bukkit.command.Command;
import org.bukkit.entity.Player;
import nl.Steffion.AdminEye.AdminEye;
import nl.Steffion.AdminEye.AdminEyeUtils;
import nl.Steffion.AdminEye.PlayerFile;
import nl.Steffion.AdminEye.StefsAPI;
import nl.Steffion.AdminEye.StefsAPI.ExecutedCommand;
public class UnflyCommand extends ExecutedCommand {
@Override
public boolean execute(Player player, String playerName, Command cmd,
String label, String[] args) {
if (args.length <= 0) {
StefsAPI.MessageHandler.buildMessage().addSender(playerName)
.setMessage("error.notEnoughArguments", AdminEye.messages)
.changeVariable("syntax", "/unfly <player name>").build();
} else {
unflyPlayer(player, playerName, args[0]);
}
return true;
}
public static void unflyPlayer(Player player, String playerName,
String unflyPlayerName) {
ArrayList<Player> unflyPlayers = AdminEyeUtils
.requestPlayers(unflyPlayerName);
if (unflyPlayers == null && unflyPlayerName != null) {
StefsAPI.MessageHandler.buildMessage().addSender(playerName)
.setMessage("error.playerNotFound", AdminEye.messages)
.changeVariable("playername", unflyPlayerName).build();
return;
}
String unflyingPlayers = "";
for (Player unflyPlayer : unflyPlayers) {
PlayerFile playerFile = new PlayerFile(unflyPlayer.getName());
playerFile.flyFlying = true;
playerFile.save();
unflyPlayer.setFlying(false);
unflyPlayer.setAllowFlight(false);
unflyingPlayers += "%A" + unflyPlayer.getName() + "%N, ";
}
unflyingPlayers = (unflyPlayerName.equals("*") ? unflyingPlayers = AdminEye.config
.getFile().getString("chat.everyone") + "%N, "
: unflyingPlayers);
AdminEye.broadcastAdminEyeMessage(playerName, "unfly", "unfly",
"playernames", unflyingPlayers);
}
}
| {
"content_hash": "4868ecfd799f0721c9be7fb9532450a1",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 84,
"avg_line_length": 31.033333333333335,
"alnum_prop": 0.738453276047261,
"repo_name": "Steffion/AdminEye",
"id": "63b27950cfbeac84a23b50278dc9203d95253e54",
"size": "1862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/nl/Steffion/AdminEye/Commands/UnflyCommand.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "137159"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
namespace SofiaTransport.Api
{
public class RouteConfig
{
public static void RegisterRoutes(RouteCollection routes)
{
routes.IgnoreRoute("{resource}.axd/{*pathInfo}");
routes.MapRoute(
name: "Default",
url: "{controller}/{action}/{id}",
defaults: new { controller = "Home", action = "Index", id = UrlParameter.Optional }
);
}
}
} | {
"content_hash": "736b6018a6eafa57b7b2ef7424584a8b",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 99,
"avg_line_length": 25.434782608695652,
"alnum_prop": 0.5914529914529915,
"repo_name": "NoMoree/TransportApi",
"id": "be7388da3aaa8f2a2f054e14a342e48a01c91e58",
"size": "587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SofiaTransport.Api/App_Start/RouteConfig.cs",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
var utills;
(function (utills) {
/**
*
* @author
*
*/
var animalBody = (function (_super) {
__extends(animalBody, _super);
function animalBody() {
_super.call(this);
}
var d = __define,c=animalBody,p=c.prototype;
return animalBody;
}(egret.Bitmap));
utills.animalBody = animalBody;
egret.registerClass(animalBody,'utills.animalBody');
})(utills || (utills = {}));
| {
"content_hash": "84acd9f38051c11dd9088253aa1ab915",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 56,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.5460526315789473,
"repo_name": "wang-fu/jigsaw-puzzle",
"id": "3e4a439f748100d298bba83d4641b084061b3366",
"size": "456",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin-debug/utills/animal.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3100"
},
{
"name": "HTML",
"bytes": "6029"
},
{
"name": "JavaScript",
"bytes": "1898276"
},
{
"name": "TypeScript",
"bytes": "32337"
}
],
"symlink_target": ""
} |
package top.soyask.calendarii.ui.fragment.setting.birth;
import android.support.annotation.NonNull;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.NumberPicker;
import android.widget.RadioButton;
import android.widget.Spinner;
import java.lang.reflect.Method;
import java.util.Locale;
import top.soyask.calendarii.R;
import top.soyask.calendarii.database.dao.BirthdayDao;
import top.soyask.calendarii.entity.Birthday;
import top.soyask.calendarii.ui.fragment.base.BaseFragment;
import top.soyask.calendarii.utils.DayUtils;
import top.soyask.calendarii.utils.LunarUtils;
public class AddFragment extends BaseFragment implements AdapterView.OnItemSelectedListener, CompoundButton.OnCheckedChangeListener, View.OnClickListener {
private String[] mName;
private EditText mEtWho;
private NumberPicker mNpMonth;
private NumberPicker mNpDay;
private boolean isLunar = false;
private OnDoneClickListener mOnDoneClickListener;
private static final NumberPicker.Formatter LUNAR_MONTH_FORMATTER = value -> LunarUtils.LUNAR_MONTH[value - 1];
private static final NumberPicker.Formatter LUNAR_DAY_FORMATTER = LunarUtils::getLunarDay;
private static final NumberPicker.Formatter NORMAL_MONTH_FORMATTER = value -> String.format(Locale.CHINA, "%d月", value);
private static final NumberPicker.Formatter NORMAL_DAY_FORMATTER = String::valueOf;
private RadioButton mRbNormal;
private RadioButton mRbLunar;
public AddFragment() {
super(R.layout.fragment_add_birth);
}
public static AddFragment newInstance() {
AddFragment fragment = new AddFragment();
return fragment;
}
@Override
protected void setupUI() {
setupSpinner();
setupNumberPicker();
setupRadioButton();
setupOther();
}
private void setupOther() {
mEtWho = findViewById(R.id.et_who);
findToolbar().setNavigationOnClickListener(this);
findViewById(R.id.ib_done).setOnClickListener(this);
}
private void setupRadioButton() {
mRbNormal = findViewById(R.id.rb_normal);
mRbNormal.setChecked(true);
mRbLunar = findViewById(R.id.rb_lunar);
mRbNormal.setOnCheckedChangeListener(this);
mRbLunar.setOnCheckedChangeListener(this);
}
private void setupNumberPicker() {
mNpMonth = findViewById(R.id.np_month);
mNpMonth.setMaxValue(12);
mNpMonth.setMinValue(1);
mNpDay = findViewById(R.id.np_day);
mNpDay.setMaxValue(30);
mNpDay.setMinValue(1);
mNpMonth.setFormatter(NORMAL_MONTH_FORMATTER);
mNpDay.setFormatter(NORMAL_DAY_FORMATTER);
mNpMonth.setOnValueChangedListener((picker, oldVal, newVal) -> {
if (isLunar) {
mNpDay.setMaxValue(30);
} else {
int monthDayCount = DayUtils.getMonthDayCount(newVal, 2000);
mNpDay.setMaxValue(monthDayCount);
}
});
try {
Method method = mNpMonth.getClass().getDeclaredMethod("changeValueByOne", boolean.class);
method.setAccessible(true);
method.invoke(mNpMonth, true);
method.invoke(mNpDay, true);
} catch (Exception e) {
e.printStackTrace();
}
}
private void setupSpinner() {
Spinner spinner = findViewById(R.id.spinner_who);
mName = getResources().getStringArray(R.array.who);
spinner.setAdapter(new ArrayAdapter<>(mHostActivity, R.layout.item_who, R.id.tv, mName));
spinner.setOnItemSelectedListener(this);
}
public void setOnDoneClickListener(OnDoneClickListener onDoneClickListener) {
this.mOnDoneClickListener = onDoneClickListener;
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
boolean enabled = position == mName.length - 1;
mEtWho.setEnabled(enabled);
mEtWho.setHint(mName[position]);
mEtWho.setText("");
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
isLunar = "农历".equals(buttonView.getText());
setNumberPickerFormatter();
}
}
private void setNumberPickerFormatter() {
if (isLunar) {
mNpMonth.setFormatter(LUNAR_MONTH_FORMATTER);
mNpDay.setFormatter(LUNAR_DAY_FORMATTER);
mNpDay.setMaxValue(30);
} else {
mNpMonth.setFormatter(NORMAL_MONTH_FORMATTER);
mNpDay.setFormatter(NORMAL_DAY_FORMATTER);
}
mNpMonth.postInvalidate();
mNpDay.postInvalidate();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.ib_done:
done();
removeFragment(this);
break;
default:
removeFragment(this);
break;
}
}
private void done() {
Birthday birthday = getBirthday();
BirthdayDao birthdayDao = BirthdayDao.getInstance(mHostActivity);
birthdayDao.add(birthday);
mOnDoneClickListener.onDone(birthday);
}
@NonNull
private Birthday getBirthday() {
String who = getWho();
String when = getWhen();
Birthday birthday = new Birthday();
birthday.setWho(who);
birthday.setWhen(when);
birthday.setLunar(isLunar);
return birthday;
}
@NonNull
private String getWhen() {
String when;
int month = mNpMonth.getValue();
int day = mNpDay.getValue();
if (isLunar) {
when = LunarUtils.LUNAR_MONTH[month - 1] + LunarUtils.getLunarDay(day);
} else {
when = month + "月" + day + "日";
}
return when;
}
@NonNull
private String getWho() {
String who = mEtWho.getText().toString().trim();
if ("".equals(who)) {
who = mEtWho.getHint().toString();
}
return who;
}
public interface OnDoneClickListener {
void onDone(Birthday birthday);
}
}
| {
"content_hash": "8b7c4c98b61cd173a7a6910c21e119d8",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 155,
"avg_line_length": 31.25980392156863,
"alnum_prop": 0.6438764309236318,
"repo_name": "SoyaLeaf/CalendarII",
"id": "998dfd2a0e66acb895203d8e6e8817c3b096390f",
"size": "6387",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "app/src/main/java/top/soyask/calendarii/ui/fragment/setting/birth/AddFragment.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "300568"
}
],
"symlink_target": ""
} |
'use strict';
var createScatter = require('gl-scatter2d');
var createFancyScatter = require('gl-scatter2d-fancy');
var createLine = require('gl-line2d');
var createError = require('gl-error2d');
var isNumeric = require('fast-isnumeric');
var Lib = require('../../lib');
var Axes = require('../../plots/cartesian/axes');
var ErrorBars = require('../../components/errorbars');
var str2RGBArray = require('../../lib/str2rgbarray');
var formatColor = require('../../lib/gl_format_color');
var subTypes = require('../scatter/subtypes');
var makeBubbleSizeFn = require('../scatter/make_bubble_size_func');
var getTraceColor = require('../scatter/get_trace_color');
var MARKER_SYMBOLS = require('../../constants/gl_markers');
var DASHES = require('../../constants/gl2d_dashes');
var AXES = ['xaxis', 'yaxis'];
function LineWithMarkers(scene, uid) {
this.scene = scene;
this.uid = uid;
this.xData = [];
this.yData = [];
this.textLabels = [];
this.color = 'rgb(0, 0, 0)';
this.name = '';
this.hoverinfo = 'all';
this.idToIndex = [];
this.bounds = [0, 0, 0, 0];
this.hasLines = false;
this.lineOptions = {
positions: new Float32Array(),
color: [0, 0, 0, 1],
width: 1,
fill: [false, false, false, false],
fillColor: [
[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 0, 0, 1]],
dashes: [1]
};
this.line = createLine(scene.glplot, this.lineOptions);
this.line._trace = this;
this.hasErrorX = false;
this.errorXOptions = {
positions: new Float32Array(),
errors: new Float32Array(),
lineWidth: 1,
capSize: 0,
color: [0, 0, 0, 1]
};
this.errorX = createError(scene.glplot, this.errorXOptions);
this.errorX._trace = this;
this.hasErrorY = false;
this.errorYOptions = {
positions: new Float32Array(),
errors: new Float32Array(),
lineWidth: 1,
capSize: 0,
color: [0, 0, 0, 1]
};
this.errorY = createError(scene.glplot, this.errorYOptions);
this.errorY._trace = this;
this.hasMarkers = false;
this.scatterOptions = {
positions: new Float32Array(),
sizes: [],
colors: [],
glyphs: [],
borderWidths: [],
borderColors: [],
size: 12,
color: [0, 0, 0, 1],
borderSize: 1,
borderColor: [0, 0, 0, 1]
};
this.scatter = createScatter(scene.glplot, this.scatterOptions);
this.scatter._trace = this;
this.fancyScatter = createFancyScatter(scene.glplot, this.scatterOptions);
this.fancyScatter._trace = this;
}
var proto = LineWithMarkers.prototype;
proto.handlePick = function(pickResult) {
var index = this.idToIndex[pickResult.pointId];
return {
trace: this,
dataCoord: pickResult.dataCoord,
traceCoord: [
this.xData[index],
this.yData[index]
],
textLabel: Array.isArray(this.textLabels) ?
this.textLabels[index] :
this.textLabels,
color: Array.isArray(this.color) ?
this.color[index] :
this.color,
name: this.name,
hoverinfo: this.hoverinfo
};
};
// check if trace is fancy
proto.isFancy = function(options) {
if(this.scene.xaxis.type !== 'linear') return true;
if(this.scene.yaxis.type !== 'linear') return true;
if(!options.x || !options.y) return true;
var marker = options.marker || {};
if(Array.isArray(marker.symbol) ||
marker.symbol !== 'circle' ||
Array.isArray(marker.size) ||
Array.isArray(marker.line.width) ||
Array.isArray(marker.opacity)
) return true;
var markerColor = marker.color;
if(Array.isArray(markerColor)) return true;
var lineColor = Array.isArray(marker.line.color);
if(Array.isArray(lineColor)) return true;
if(this.hasErrorX) return true;
if(this.hasErrorY) return true;
return false;
};
// handle the situation where values can be array-like or not array like
function convertArray(convert, data, count) {
if(!Array.isArray(data)) data = [data];
return _convertArray(convert, data, count);
}
function _convertArray(convert, data, count) {
var result = new Array(count),
data0 = data[0];
for(var i = 0; i < count; ++i) {
result[i] = (i >= data.length) ?
convert(data0) :
convert(data[i]);
}
return result;
}
var convertNumber = convertArray.bind(null, function(x) { return +x; });
var convertColorBase = convertArray.bind(null, str2RGBArray);
var convertSymbol = convertArray.bind(null, function(x) {
return MARKER_SYMBOLS[x] || '●';
});
function convertColor(color, opacity, count) {
return _convertColor(
convertColorBase(color, count),
convertNumber(opacity, count),
count
);
}
function convertColorScale(containerIn, markerOpacity, traceOpacity, count) {
var colors = formatColor(containerIn, markerOpacity, count);
colors = Array.isArray(colors[0]) ?
colors :
_convertArray(Lib.identity, [colors], count);
return _convertColor(
colors,
convertNumber(traceOpacity, count),
count
);
}
function _convertColor(colors, opacities, count) {
var result = new Array(4 * count);
for(var i = 0; i < count; ++i) {
for(var j = 0; j < 3; ++j) result[4*i+j] = colors[i][j];
result[4*i+3] = colors[i][3] * opacities[i];
}
return result;
}
/**
* Truncate a Float32Array to some length. A wrapper to support environments
* (e.g. node-webkit) that do not implement Float32Array.prototype.slice
*/
function truncate(float32ArrayIn, len) {
if(Float32Array.slice === undefined) {
var float32ArrayOut = new Float32Array(len);
for(var i = 0; i < len; i++) float32ArrayOut[i] = float32ArrayIn[i];
return float32ArrayOut;
}
return float32ArrayIn.slice(0, len);
}
/* Order is important here to get the correct laying:
* - lines
* - errorX
* - errorY
* - markers
*/
proto.update = function(options) {
if(options.visible !== true) {
this.hasLines = false;
this.hasErrorX = false;
this.hasErrorY = false;
this.hasMarkers = false;
}
else {
this.hasLines = subTypes.hasLines(options);
this.hasErrorX = options.error_x.visible === true;
this.hasErrorY = options.error_y.visible === true;
this.hasMarkers = subTypes.hasMarkers(options);
}
this.textLabels = options.text;
this.name = options.name;
this.hoverinfo = options.hoverinfo;
this.bounds = [Infinity, Infinity, -Infinity, -Infinity];
if(this.isFancy(options)) {
this.updateFancy(options);
}
else {
this.updateFast(options);
}
// not quite on-par with 'scatter', but close enough for now
// does not handle the colorscale case
this.color = getTraceColor(options, {});
};
proto.updateFast = function(options) {
var x = this.xData = options.x;
var y = this.yData = options.y;
var len = x.length,
idToIndex = new Array(len),
positions = new Float32Array(2 * len),
bounds = this.bounds,
pId = 0,
ptr = 0;
var xx, yy;
// TODO add 'very fast' mode that bypasses this loop
// TODO bypass this on modebar +/- zoom
for(var i = 0; i < len; ++i) {
xx = x[i];
yy = y[i];
// check for isNaN is faster but doesn't skip over nulls
if(!isNumeric(xx) || !isNumeric(yy)) continue;
idToIndex[pId++] = i;
positions[ptr++] = xx;
positions[ptr++] = yy;
bounds[0] = Math.min(bounds[0], xx);
bounds[1] = Math.min(bounds[1], yy);
bounds[2] = Math.max(bounds[2], xx);
bounds[3] = Math.max(bounds[3], yy);
}
positions = truncate(positions, ptr);
this.idToIndex = idToIndex;
this.updateLines(options, positions);
this.updateError('X', options);
this.updateError('Y', options);
var markerSize;
if(this.hasMarkers) {
this.scatterOptions.positions = positions;
var markerColor = str2RGBArray(options.marker.color),
borderColor = str2RGBArray(options.marker.line.color),
opacity = (options.opacity) * (options.marker.opacity);
markerColor[3] *= opacity;
this.scatterOptions.color = markerColor;
borderColor[3] *= opacity;
this.scatterOptions.borderColor = borderColor;
markerSize = options.marker.size;
this.scatterOptions.size = markerSize;
this.scatterOptions.borderSize = options.marker.line.width;
this.scatter.update(this.scatterOptions);
}
else {
this.scatterOptions.positions = new Float32Array();
this.scatterOptions.glyphs = [];
this.scatter.update(this.scatterOptions);
}
// turn off fancy scatter plot
this.scatterOptions.positions = new Float32Array();
this.scatterOptions.glyphs = [];
this.fancyScatter.update(this.scatterOptions);
// add item for autorange routine
this.expandAxesFast(bounds, markerSize);
};
proto.updateFancy = function(options) {
var scene = this.scene,
xaxis = scene.xaxis,
yaxis = scene.yaxis,
bounds = this.bounds;
// makeCalcdata runs d2c (data-to-coordinate) on every point
var x = this.xData = xaxis.makeCalcdata(options, 'x');
var y = this.yData = yaxis.makeCalcdata(options, 'y');
// get error values
var errorVals = ErrorBars.calcFromTrace(options, scene.fullLayout);
var len = x.length,
idToIndex = new Array(len),
positions = new Float32Array(2 * len),
errorsX = new Float32Array(4 * len),
errorsY = new Float32Array(4 * len),
pId = 0,
ptr = 0,
ptrX = 0,
ptrY = 0;
var getX = (xaxis.type === 'log') ?
function(x) { return xaxis.d2l(x); } :
function(x) { return x; };
var getY = (yaxis.type === 'log') ?
function(y) { return yaxis.d2l(y); } :
function(y) { return y; };
var i, j, xx, yy, ex0, ex1, ey0, ey1;
for(i = 0; i < len; ++i) {
xx = getX(x[i]);
yy = getY(y[i]);
if(isNaN(xx) || isNaN(yy)) continue;
idToIndex[pId++] = i;
positions[ptr++] = xx;
positions[ptr++] = yy;
ex0 = errorsX[ptrX++] = xx - errorVals[i].xs || 0;
ex1 = errorsX[ptrX++] = errorVals[i].xh - xx || 0;
errorsX[ptrX++] = 0;
errorsX[ptrX++] = 0;
errorsY[ptrY++] = 0;
errorsY[ptrY++] = 0;
ey0 = errorsY[ptrY++] = yy - errorVals[i].ys || 0;
ey1 = errorsY[ptrY++] = errorVals[i].yh - yy || 0;
bounds[0] = Math.min(bounds[0], xx - ex0);
bounds[1] = Math.min(bounds[1], yy - ey0);
bounds[2] = Math.max(bounds[2], xx + ex1);
bounds[3] = Math.max(bounds[3], yy + ey1);
}
positions = truncate(positions, ptr);
this.idToIndex = idToIndex;
this.updateLines(options, positions);
this.updateError('X', options, positions, errorsX);
this.updateError('Y', options, positions, errorsY);
var sizes;
if(this.hasMarkers) {
this.scatterOptions.positions = positions;
// TODO rewrite convert function so that
// we don't have to loop through the data another time
this.scatterOptions.sizes = new Array(pId);
this.scatterOptions.glyphs = new Array(pId);
this.scatterOptions.borderWidths = new Array(pId);
this.scatterOptions.colors = new Array(pId * 4);
this.scatterOptions.borderColors = new Array(pId * 4);
var markerSizeFunc = makeBubbleSizeFn(options),
markerOpts = options.marker,
markerOpacity = markerOpts.opacity,
traceOpacity = options.opacity,
colors = convertColorScale(markerOpts, markerOpacity, traceOpacity, len),
glyphs = convertSymbol(markerOpts.symbol, len),
borderWidths = convertNumber(markerOpts.line.width, len),
borderColors = convertColorScale(markerOpts.line, markerOpacity, traceOpacity, len),
index;
sizes = convertArray(markerSizeFunc, markerOpts.size, len);
for(i = 0; i < pId; ++i) {
index = idToIndex[i];
this.scatterOptions.sizes[i] = 4.0 * sizes[index];
this.scatterOptions.glyphs[i] = glyphs[index];
this.scatterOptions.borderWidths[i] = 0.5 * borderWidths[index];
for(j = 0; j < 4; ++j) {
this.scatterOptions.colors[4*i+j] = colors[4*index+j];
this.scatterOptions.borderColors[4*i+j] = borderColors[4*index+j];
}
}
this.fancyScatter.update(this.scatterOptions);
}
else {
this.scatterOptions.positions = new Float32Array();
this.scatterOptions.glyphs = [];
this.fancyScatter.update(this.scatterOptions);
}
// turn off fast scatter plot
this.scatterOptions.positions = new Float32Array();
this.scatterOptions.glyphs = [];
this.scatter.update(this.scatterOptions);
// add item for autorange routine
this.expandAxesFancy(x, y, sizes);
};
proto.updateLines = function(options, positions) {
if(this.hasLines) {
this.lineOptions.positions = positions;
var lineColor = str2RGBArray(options.line.color);
if(this.hasMarkers) lineColor[3] *= options.marker.opacity;
var lineWidth = Math.round(0.5 * this.lineOptions.width),
dashes = (DASHES[options.line.dash] || [1]).slice();
for(var i = 0; i < dashes.length; ++i) dashes[i] *= lineWidth;
switch(options.fill) {
case 'tozeroy':
this.lineOptions.fill = [false, true, false, false];
break;
case 'tozerox':
this.lineOptions.fill = [true, false, false, false];
break;
default:
this.lineOptions.fill = [false, false, false, false];
break;
}
var fillColor = str2RGBArray(options.fillcolor);
this.lineOptions.color = lineColor;
this.lineOptions.width = 2.0 * options.line.width;
this.lineOptions.dashes = dashes;
this.lineOptions.fillColor = [fillColor, fillColor, fillColor, fillColor];
}
else {
this.lineOptions.positions = new Float32Array();
}
this.line.update(this.lineOptions);
};
proto.updateError = function(axLetter, options, positions, errors) {
var errorObj = this['error' + axLetter],
errorOptions = options['error_' + axLetter.toLowerCase()],
errorObjOptions = this['error' + axLetter + 'Options'];
if(axLetter.toLowerCase() === 'x' && errorOptions.copy_ystyle) {
errorOptions = options.error_y;
}
if(this['hasError' + axLetter]) {
errorObjOptions.positions = positions;
errorObjOptions.errors = errors;
errorObjOptions.capSize = errorOptions.width;
errorObjOptions.lineWidth = errorOptions.thickness / 2; // ballpark rescaling
errorObjOptions.color = convertColor(errorOptions.color, 1, 1);
}
else {
errorObjOptions.positions = new Float32Array();
}
errorObj.update(errorObjOptions);
};
proto.expandAxesFast = function(bounds, markerSize) {
var pad = markerSize || 10;
var ax, min, max;
for(var i = 0; i < 2; i++) {
ax = this.scene[AXES[i]];
min = ax._min;
if(!min) min = [];
min.push({ val: bounds[i], pad: pad });
max = ax._max;
if(!max) max = [];
max.push({ val: bounds[i+2], pad: pad });
}
};
// not quite on-par with 'scatter' (scatter fill in several other expand options),
// but close enough for now
proto.expandAxesFancy = function(x, y, ppad) {
var scene = this.scene,
expandOpts = { padded: true, ppad: ppad };
Axes.expand(scene.xaxis, x, expandOpts);
Axes.expand(scene.yaxis, y, expandOpts);
};
proto.dispose = function() {
this.line.dispose();
this.errorX.dispose();
this.errorY.dispose();
this.scatter.dispose();
this.fancyScatter.dispose();
};
function createLineWithMarkers(scene, data) {
var plot = new LineWithMarkers(scene, data.uid);
plot.update(data);
return plot;
}
module.exports = createLineWithMarkers;
| {
"content_hash": "97e149cc942049a543b498ff19132637",
"timestamp": "",
"source": "github",
"line_count": 558,
"max_line_length": 96,
"avg_line_length": 29.508960573476703,
"alnum_prop": 0.5988703996113203,
"repo_name": "Science-for-Nature-and-People/hydraulic-fracturing",
"id": "089da9e7fc575cf64b534a59b21878e20e690d92",
"size": "16659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/scripts/plotly.js-master/src/traces/scattergl/convert.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19896"
},
{
"name": "HTML",
"bytes": "82988"
},
{
"name": "JavaScript",
"bytes": "2779838"
},
{
"name": "Shell",
"bytes": "1857"
}
],
"symlink_target": ""
} |
/*!
* \file boxfunc4.c
* <pre>
*
* Boxa and Boxaa range selection
* BOXA *boxaSelectRange()
* BOXAA *boxaaSelectRange()
*
* Boxa size selection
* BOXA *boxaSelectBySize()
* NUMA *boxaMakeSizeIndicator()
* BOXA *boxaSelectByArea()
* NUMA *boxaMakeAreaIndicator()
* BOXA *boxaSelectByWHRatio()
* NUMA *boxaMakeWHRatioIndicator()
* BOXA *boxaSelectWithIndicator()
*
* Boxa permutation
* BOXA *boxaPermutePseudorandom()
* BOXA *boxaPermuteRandom()
* l_int32 boxaSwapBoxes()
*
* Boxa and box conversions
* PTA *boxaConvertToPta()
* BOXA *ptaConvertToBoxa()
* PTA *boxConvertToPta()
* BOX *ptaConvertToBox()
*
* Boxa sequence fitting
* BOXA *boxaSmoothSequenceLS()
* BOXA *boxaSmoothSequenceMedian()
* BOXA *boxaLinearFit()
* BOXA *boxaWindowedMedian()
* BOXA *boxaModifyWithBoxa()
* BOXA *boxaConstrainSize()
* BOXA *boxaReconcileEvenOddHeight()
* static l_int32 boxaTestEvenOddHeight()
* BOXA *boxaReconcilePairWidth()
* l_int32 boxaPlotSides() [for debugging]
* l_int32 boxaPlotSizes() [for debugging]
* BOXA *boxaFillSequence()
* static l_int32 boxaFillAll()
*
* Miscellaneous boxa functions
* l_int32 boxaGetExtent()
* l_int32 boxaGetCoverage()
* l_int32 boxaaSizeRange()
* l_int32 boxaSizeRange()
* l_int32 boxaLocationRange()
* NUMA *boxaGetSizes()
* l_int32 boxaGetArea()
* PIX *boxaDisplayTiled()
* </pre>
*/
#include <math.h>
#include "allheaders.h"
static l_int32 boxaTestEvenOddHeight(BOXA *boxa1, BOXA *boxa2, l_int32 start,
l_float32 *pdel1, l_float32 *pdel2);
static l_int32 boxaFillAll(BOXA *boxa);
/*---------------------------------------------------------------------*
* Boxa and boxaa range selection *
*---------------------------------------------------------------------*/
/*!
* \brief boxaSelectRange()
*
* \param[in] boxas
* \param[in] first use 0 to select from the beginning
* \param[in] last use 0 to select to the end
* \param[in] copyflag L_COPY, L_CLONE
* \return boxad, or NULL on error
*
* <pre>
* Notes:
* (1) The copyflag specifies what we do with each box from boxas.
* Specifically, L_CLONE inserts a clone into boxad of each
* selected box from boxas.
* </pre>
*/
BOXA *
boxaSelectRange(BOXA *boxas,
l_int32 first,
l_int32 last,
l_int32 copyflag)
{
l_int32 n, nbox, i;
BOX *box;
BOXA *boxad;
PROCNAME("boxaSelectRange");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (copyflag != L_COPY && copyflag != L_CLONE)
return (BOXA *)ERROR_PTR("invalid copyflag", procName, NULL);
if ((n = boxaGetCount(boxas)) == 0) {
L_WARNING("boxas is empty\n", procName);
return boxaCopy(boxas, copyflag);
}
first = L_MAX(0, first);
if (last <= 0) last = n - 1;
if (first >= n)
return (BOXA *)ERROR_PTR("invalid first", procName, NULL);
if (first > last)
return (BOXA *)ERROR_PTR("first > last", procName, NULL);
nbox = last - first + 1;
boxad = boxaCreate(nbox);
for (i = first; i <= last; i++) {
box = boxaGetBox(boxas, i, copyflag);
boxaAddBox(boxad, box, L_INSERT);
}
return boxad;
}
/*!
* \brief boxaaSelectRange()
*
* \param[in] baas
* \param[in] first use 0 to select from the beginning
* \param[in] last use 0 to select to the end
* \param[in] copyflag L_COPY, L_CLONE
* \return baad, or NULL on error
*
* <pre>
* Notes:
* (1) The copyflag specifies what we do with each boxa from baas.
* Specifically, L_CLONE inserts a clone into baad of each
* selected boxa from baas.
* </pre>
*/
BOXAA *
boxaaSelectRange(BOXAA *baas,
l_int32 first,
l_int32 last,
l_int32 copyflag)
{
l_int32 n, nboxa, i;
BOXA *boxa;
BOXAA *baad;
PROCNAME("boxaaSelectRange");
if (!baas)
return (BOXAA *)ERROR_PTR("baas not defined", procName, NULL);
if (copyflag != L_COPY && copyflag != L_CLONE)
return (BOXAA *)ERROR_PTR("invalid copyflag", procName, NULL);
if ((n = boxaaGetCount(baas)) == 0)
return (BOXAA *)ERROR_PTR("empty baas", procName, NULL);
first = L_MAX(0, first);
if (last <= 0) last = n - 1;
if (first >= n)
return (BOXAA *)ERROR_PTR("invalid first", procName, NULL);
if (first > last)
return (BOXAA *)ERROR_PTR("first > last", procName, NULL);
nboxa = last - first + 1;
baad = boxaaCreate(nboxa);
for (i = first; i <= last; i++) {
boxa = boxaaGetBoxa(baas, i, copyflag);
boxaaAddBoxa(baad, boxa, L_INSERT);
}
return baad;
}
/*---------------------------------------------------------------------*
* Boxa size selection *
*---------------------------------------------------------------------*/
/*!
* \brief boxaSelectBySize()
*
* \param[in] boxas
* \param[in] width, height threshold dimensions
* \param[in] type L_SELECT_WIDTH, L_SELECT_HEIGHT,
* L_SELECT_IF_EITHER, L_SELECT_IF_BOTH
* \param[in] relation L_SELECT_IF_LT, L_SELECT_IF_GT,
* L_SELECT_IF_LTE, L_SELECT_IF_GTE
* \param[out] pchanged [optional] 1 if changed; 0 if clone returned
* \return boxad filtered set, or NULL on error
*
* <pre>
* Notes:
* (1) The args specify constraints on the size of the
* components that are kept.
* (2) Uses box copies in the new boxa.
* (3) If the selection type is L_SELECT_WIDTH, the input
* height is ignored, and v.v.
* (4) To keep small components, use relation = L_SELECT_IF_LT or
* L_SELECT_IF_LTE.
* To keep large components, use relation = L_SELECT_IF_GT or
* L_SELECT_IF_GTE.
* </pre>
*/
BOXA *
boxaSelectBySize(BOXA *boxas,
l_int32 width,
l_int32 height,
l_int32 type,
l_int32 relation,
l_int32 *pchanged)
{
BOXA *boxad;
NUMA *na;
PROCNAME("boxaSelectBySize");
if (pchanged) *pchanged = FALSE;
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (boxaGetCount(boxas) == 0) {
L_WARNING("boxas is empty\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (type != L_SELECT_WIDTH && type != L_SELECT_HEIGHT &&
type != L_SELECT_IF_EITHER && type != L_SELECT_IF_BOTH)
return (BOXA *)ERROR_PTR("invalid type", procName, NULL);
if (relation != L_SELECT_IF_LT && relation != L_SELECT_IF_GT &&
relation != L_SELECT_IF_LTE && relation != L_SELECT_IF_GTE)
return (BOXA *)ERROR_PTR("invalid relation", procName, NULL);
/* Compute the indicator array for saving components */
if ((na =
boxaMakeSizeIndicator(boxas, width, height, type, relation)) == NULL)
return (BOXA *)ERROR_PTR("na not made", procName, NULL);
/* Filter to get output */
boxad = boxaSelectWithIndicator(boxas, na, pchanged);
numaDestroy(&na);
return boxad;
}
/*!
* \brief boxaMakeSizeIndicator()
*
* \param[in] boxa
* \param[in] width, height threshold dimensions
* \param[in] type L_SELECT_WIDTH, L_SELECT_HEIGHT,
* L_SELECT_IF_EITHER, L_SELECT_IF_BOTH
* \param[in] relation L_SELECT_IF_LT, L_SELECT_IF_GT,
* L_SELECT_IF_LTE, L_SELECT_IF_GTE
* \return na indicator array, or NULL on error
*
* <pre>
* Notes:
* (1) The args specify constraints on the size of the
* components that are kept.
* (2) If the selection type is L_SELECT_WIDTH, the input
* height is ignored, and v.v.
* (3) To keep small components, use relation = L_SELECT_IF_LT or
* L_SELECT_IF_LTE.
* To keep large components, use relation = L_SELECT_IF_GT or
* L_SELECT_IF_GTE.
* </pre>
*/
NUMA *
boxaMakeSizeIndicator(BOXA *boxa,
l_int32 width,
l_int32 height,
l_int32 type,
l_int32 relation)
{
l_int32 i, n, w, h, ival;
NUMA *na;
PROCNAME("boxaMakeSizeIndicator");
if (!boxa)
return (NUMA *)ERROR_PTR("boxa not defined", procName, NULL);
if ((n = boxaGetCount(boxa)) == 0)
return (NUMA *)ERROR_PTR("boxa is empty", procName, NULL);
if (type != L_SELECT_WIDTH && type != L_SELECT_HEIGHT &&
type != L_SELECT_IF_EITHER && type != L_SELECT_IF_BOTH)
return (NUMA *)ERROR_PTR("invalid type", procName, NULL);
if (relation != L_SELECT_IF_LT && relation != L_SELECT_IF_GT &&
relation != L_SELECT_IF_LTE && relation != L_SELECT_IF_GTE)
return (NUMA *)ERROR_PTR("invalid relation", procName, NULL);
na = numaCreate(n);
for (i = 0; i < n; i++) {
ival = 0;
boxaGetBoxGeometry(boxa, i, NULL, NULL, &w, &h);
switch (type)
{
case L_SELECT_WIDTH:
if ((relation == L_SELECT_IF_LT && w < width) ||
(relation == L_SELECT_IF_GT && w > width) ||
(relation == L_SELECT_IF_LTE && w <= width) ||
(relation == L_SELECT_IF_GTE && w >= width))
ival = 1;
break;
case L_SELECT_HEIGHT:
if ((relation == L_SELECT_IF_LT && h < height) ||
(relation == L_SELECT_IF_GT && h > height) ||
(relation == L_SELECT_IF_LTE && h <= height) ||
(relation == L_SELECT_IF_GTE && h >= height))
ival = 1;
break;
case L_SELECT_IF_EITHER:
if (((relation == L_SELECT_IF_LT) && (w < width || h < height)) ||
((relation == L_SELECT_IF_GT) && (w > width || h > height)) ||
((relation == L_SELECT_IF_LTE) && (w <= width || h <= height)) ||
((relation == L_SELECT_IF_GTE) && (w >= width || h >= height)))
ival = 1;
break;
case L_SELECT_IF_BOTH:
if (((relation == L_SELECT_IF_LT) && (w < width && h < height)) ||
((relation == L_SELECT_IF_GT) && (w > width && h > height)) ||
((relation == L_SELECT_IF_LTE) && (w <= width && h <= height)) ||
((relation == L_SELECT_IF_GTE) && (w >= width && h >= height)))
ival = 1;
break;
default:
L_WARNING("can't get here!\n", procName);
break;
}
numaAddNumber(na, ival);
}
return na;
}
/*!
* \brief boxaSelectByArea()
*
* \param[in] boxas
* \param[in] area threshold value of width * height
* \param[in] relation L_SELECT_IF_LT, L_SELECT_IF_GT,
* L_SELECT_IF_LTE, L_SELECT_IF_GTE
* \param[out] pchanged [optional] 1 if changed; 0 if clone returned
* \return boxad filtered set, or NULL on error
*
* <pre>
* Notes:
* (1) Uses box copies in the new boxa.
* (2) To keep small components, use relation = L_SELECT_IF_LT or
* L_SELECT_IF_LTE.
* To keep large components, use relation = L_SELECT_IF_GT or
* L_SELECT_IF_GTE.
* </pre>
*/
BOXA *
boxaSelectByArea(BOXA *boxas,
l_int32 area,
l_int32 relation,
l_int32 *pchanged)
{
BOXA *boxad;
NUMA *na;
PROCNAME("boxaSelectByArea");
if (pchanged) *pchanged = FALSE;
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (boxaGetCount(boxas) == 0) {
L_WARNING("boxas is empty\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (relation != L_SELECT_IF_LT && relation != L_SELECT_IF_GT &&
relation != L_SELECT_IF_LTE && relation != L_SELECT_IF_GTE)
return (BOXA *)ERROR_PTR("invalid relation", procName, NULL);
/* Compute the indicator array for saving components */
na = boxaMakeAreaIndicator(boxas, area, relation);
/* Filter to get output */
boxad = boxaSelectWithIndicator(boxas, na, pchanged);
numaDestroy(&na);
return boxad;
}
/*!
* \brief boxaMakeAreaIndicator()
*
* \param[in] boxa
* \param[in] area threshold value of width * height
* \param[in] relation L_SELECT_IF_LT, L_SELECT_IF_GT,
* L_SELECT_IF_LTE, L_SELECT_IF_GTE
* \return na indicator array, or NULL on error
*
* <pre>
* Notes:
* (1) To keep small components, use relation = L_SELECT_IF_LT or
* L_SELECT_IF_LTE.
* To keep large components, use relation = L_SELECT_IF_GT or
* L_SELECT_IF_GTE.
* </pre>
*/
NUMA *
boxaMakeAreaIndicator(BOXA *boxa,
l_int32 area,
l_int32 relation)
{
l_int32 i, n, w, h, ival;
NUMA *na;
PROCNAME("boxaMakeAreaIndicator");
if (!boxa)
return (NUMA *)ERROR_PTR("boxa not defined", procName, NULL);
if ((n = boxaGetCount(boxa)) == 0)
return (NUMA *)ERROR_PTR("boxa is empty", procName, NULL);
if (relation != L_SELECT_IF_LT && relation != L_SELECT_IF_GT &&
relation != L_SELECT_IF_LTE && relation != L_SELECT_IF_GTE)
return (NUMA *)ERROR_PTR("invalid relation", procName, NULL);
na = numaCreate(n);
for (i = 0; i < n; i++) {
ival = 0;
boxaGetBoxGeometry(boxa, i, NULL, NULL, &w, &h);
if ((relation == L_SELECT_IF_LT && w * h < area) ||
(relation == L_SELECT_IF_GT && w * h > area) ||
(relation == L_SELECT_IF_LTE && w * h <= area) ||
(relation == L_SELECT_IF_GTE && w * h >= area))
ival = 1;
numaAddNumber(na, ival);
}
return na;
}
/*!
* \brief boxaSelectByWHRatio()
*
* \param[in] boxas
* \param[in] ratio width/height threshold value
* \param[in] relation L_SELECT_IF_LT, L_SELECT_IF_GT,
* L_SELECT_IF_LTE, L_SELECT_IF_GTE
* \param[out] pchanged [optional] 1 if changed; 0 if clone returned
* \return boxad filtered set, or NULL on error
*
* <pre>
* Notes:
* (1) Uses box copies in the new boxa.
* (2) To keep narrow components, use relation = L_SELECT_IF_LT or
* L_SELECT_IF_LTE.
* To keep wide components, use relation = L_SELECT_IF_GT or
* L_SELECT_IF_GTE.
* </pre>
*/
BOXA *
boxaSelectByWHRatio(BOXA *boxas,
l_float32 ratio,
l_int32 relation,
l_int32 *pchanged)
{
BOXA *boxad;
NUMA *na;
PROCNAME("boxaSelectByWHRatio");
if (pchanged) *pchanged = FALSE;
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (boxaGetCount(boxas) == 0) {
L_WARNING("boxas is empty\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (relation != L_SELECT_IF_LT && relation != L_SELECT_IF_GT &&
relation != L_SELECT_IF_LTE && relation != L_SELECT_IF_GTE)
return (BOXA *)ERROR_PTR("invalid relation", procName, NULL);
/* Compute the indicator array for saving components */
na = boxaMakeWHRatioIndicator(boxas, ratio, relation);
/* Filter to get output */
boxad = boxaSelectWithIndicator(boxas, na, pchanged);
numaDestroy(&na);
return boxad;
}
/*!
* \brief boxaMakeWHRatioIndicator()
*
* \param[in] boxa
* \param[in] ratio width/height threshold value
* \param[in] relation L_SELECT_IF_LT, L_SELECT_IF_GT,
* L_SELECT_IF_LTE, L_SELECT_IF_GTE
* \return na indicator array, or NULL on error
*
* <pre>
* Notes:
* (1) To keep narrow components, use relation = L_SELECT_IF_LT or
* L_SELECT_IF_LTE.
* To keep wide components, use relation = L_SELECT_IF_GT or
* L_SELECT_IF_GTE.
* </pre>
*/
NUMA *
boxaMakeWHRatioIndicator(BOXA *boxa,
l_float32 ratio,
l_int32 relation)
{
l_int32 i, n, w, h, ival;
l_float32 whratio;
NUMA *na;
PROCNAME("boxaMakeWHRatioIndicator");
if (!boxa)
return (NUMA *)ERROR_PTR("boxa not defined", procName, NULL);
if ((n = boxaGetCount(boxa)) == 0)
return (NUMA *)ERROR_PTR("boxa is empty", procName, NULL);
if (relation != L_SELECT_IF_LT && relation != L_SELECT_IF_GT &&
relation != L_SELECT_IF_LTE && relation != L_SELECT_IF_GTE)
return (NUMA *)ERROR_PTR("invalid relation", procName, NULL);
na = numaCreate(n);
for (i = 0; i < n; i++) {
ival = 0;
boxaGetBoxGeometry(boxa, i, NULL, NULL, &w, &h);
whratio = (l_float32)w / (l_float32)h;
if ((relation == L_SELECT_IF_LT && whratio < ratio) ||
(relation == L_SELECT_IF_GT && whratio > ratio) ||
(relation == L_SELECT_IF_LTE && whratio <= ratio) ||
(relation == L_SELECT_IF_GTE && whratio >= ratio))
ival = 1;
numaAddNumber(na, ival);
}
return na;
}
/*!
* \brief boxaSelectWithIndicator()
*
* \param[in] boxas
* \param[in] na indicator numa
* \param[out] pchanged [optional] 1 if changed; 0 if clone returned
* \return boxad, or NULL on error
*
* <pre>
* Notes:
* (1) Returns a copy of the boxa if no components are removed.
* (2) Uses box copies in the new boxa.
* (3) The indicator numa has values 0 (ignore) and 1 (accept).
* </pre>
*/
BOXA *
boxaSelectWithIndicator(BOXA *boxas,
NUMA *na,
l_int32 *pchanged)
{
l_int32 i, n, ival, nsave;
BOX *box;
BOXA *boxad;
PROCNAME("boxaSelectWithIndicator");
if (pchanged) *pchanged = FALSE;
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (!na)
return (BOXA *)ERROR_PTR("na not defined", procName, NULL);
nsave = 0;
n = numaGetCount(na);
for (i = 0; i < n; i++) {
numaGetIValue(na, i, &ival);
if (ival == 1) nsave++;
}
if (nsave == n) {
if (pchanged) *pchanged = FALSE;
return boxaCopy(boxas, L_COPY);
}
if (pchanged) *pchanged = TRUE;
boxad = boxaCreate(nsave);
for (i = 0; i < n; i++) {
numaGetIValue(na, i, &ival);
if (ival == 0) continue;
box = boxaGetBox(boxas, i, L_COPY);
boxaAddBox(boxad, box, L_INSERT);
}
return boxad;
}
/*---------------------------------------------------------------------*
* Boxa Permutation *
*---------------------------------------------------------------------*/
/*!
* \brief boxaPermutePseudorandom()
*
* \param[in] boxas input boxa
* \return boxad with boxes permuted, or NULL on error
*
* <pre>
* Notes:
* (1) This does a pseudorandom in-place permutation of the boxes.
* (2) The result is guaranteed not to have any boxes in their
* original position, but it is not very random. If you
* need randomness, use boxaPermuteRandom().
* </pre>
*/
BOXA *
boxaPermutePseudorandom(BOXA *boxas)
{
l_int32 n;
NUMA *na;
BOXA *boxad;
PROCNAME("boxaPermutePseudorandom");
if (!boxas)
return (BOXA *)ERROR_PTR("boxa not defined", procName, NULL);
n = boxaGetCount(boxas);
na = numaPseudorandomSequence(n, 0);
boxad = boxaSortByIndex(boxas, na);
numaDestroy(&na);
return boxad;
}
/*!
* \brief boxaPermuteRandom()
*
* \param[in] boxad [optional] can be null or equal to boxas
* \param[in] boxas input boxa
* \return boxad with boxes permuted, or NULL on error
*
* <pre>
* Notes:
* (1) If boxad is null, make a copy of boxas and permute the copy.
* Otherwise, boxad must be equal to boxas, and the operation
* is done in-place.
* (2) If boxas is empty, return an empty boxad.
* (3) This does a random in-place permutation of the boxes,
* by swapping each box in turn with a random box. The
* result is almost guaranteed not to have any boxes in their
* original position.
* (4) MSVC rand() has MAX_RAND = 2^15 - 1, so it will not do
* a proper permutation is the number of boxes exceeds this.
* </pre>
*/
BOXA *
boxaPermuteRandom(BOXA *boxad,
BOXA *boxas)
{
l_int32 i, n, index;
PROCNAME("boxaPermuteRandom");
if (!boxas)
return (BOXA *)ERROR_PTR("boxa not defined", procName, NULL);
if (boxad && (boxad != boxas))
return (BOXA *)ERROR_PTR("boxad defined but in-place", procName, NULL);
if (!boxad)
boxad = boxaCopy(boxas, L_COPY);
if ((n = boxaGetCount(boxad)) == 0)
return boxad;
index = (l_uint32)rand() % n;
index = L_MAX(1, index);
boxaSwapBoxes(boxad, 0, index);
for (i = 1; i < n; i++) {
index = (l_uint32)rand() % n;
if (index == i) index--;
boxaSwapBoxes(boxad, i, index);
}
return boxad;
}
/*!
* \brief boxaSwapBoxes()
*
* \param[in] boxa
* \param[in] i, j two indices of boxes, that are to be swapped
* \return 0 if OK, 1 on error
*/
l_int32
boxaSwapBoxes(BOXA *boxa,
l_int32 i,
l_int32 j)
{
l_int32 n;
BOX *box;
PROCNAME("boxaSwapBoxes");
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
n = boxaGetCount(boxa);
if (i < 0 || i >= n)
return ERROR_INT("i invalid", procName, 1);
if (j < 0 || j >= n)
return ERROR_INT("j invalid", procName, 1);
if (i == j)
return ERROR_INT("i == j", procName, 1);
box = boxa->box[i];
boxa->box[i] = boxa->box[j];
boxa->box[j] = box;
return 0;
}
/*---------------------------------------------------------------------*
* Boxa and Box Conversions *
*---------------------------------------------------------------------*/
/*!
* \brief boxaConvertToPta()
*
* \param[in] boxa
* \param[in] ncorners 2 or 4 for the representation of each box
* \return pta with %ncorners points for each box in the boxa,
* or NULL on error
*
* <pre>
* Notes:
* (1) If ncorners == 2, we select the UL and LR corners.
* Otherwise we save all 4 corners in this order: UL, UR, LL, LR.
* </pre>
*/
PTA *
boxaConvertToPta(BOXA *boxa,
l_int32 ncorners)
{
l_int32 i, n;
BOX *box;
PTA *pta, *pta1;
PROCNAME("boxaConvertToPta");
if (!boxa)
return (PTA *)ERROR_PTR("boxa not defined", procName, NULL);
if (ncorners != 2 && ncorners != 4)
return (PTA *)ERROR_PTR("ncorners not 2 or 4", procName, NULL);
n = boxaGetCount(boxa);
if ((pta = ptaCreate(n)) == NULL)
return (PTA *)ERROR_PTR("pta not made", procName, NULL);
for (i = 0; i < n; i++) {
box = boxaGetBox(boxa, i, L_COPY);
pta1 = boxConvertToPta(box, ncorners);
ptaJoin(pta, pta1, 0, -1);
boxDestroy(&box);
ptaDestroy(&pta1);
}
return pta;
}
/*!
* \brief ptaConvertToBoxa()
*
* \param[in] pta
* \param[in] ncorners 2 or 4 for the representation of each box
* \return boxa with one box for each 2 or 4 points in the pta,
* or NULL on error
*
* <pre>
* Notes:
* (1) For 2 corners, the order of the 2 points is UL, LR.
* For 4 corners, the order of points is UL, UR, LL, LR.
* (2) Each derived box is the minimum size containing all corners.
* </pre>
*/
BOXA *
ptaConvertToBoxa(PTA *pta,
l_int32 ncorners)
{
l_int32 i, n, nbox, x1, y1, x2, y2, x3, y3, x4, y4, x, y, xmax, ymax;
BOX *box;
BOXA *boxa;
PROCNAME("ptaConvertToBoxa");
if (!pta)
return (BOXA *)ERROR_PTR("pta not defined", procName, NULL);
if (ncorners != 2 && ncorners != 4)
return (BOXA *)ERROR_PTR("ncorners not 2 or 4", procName, NULL);
n = ptaGetCount(pta);
if (n % ncorners != 0)
return (BOXA *)ERROR_PTR("size % ncorners != 0", procName, NULL);
nbox = n / ncorners;
if ((boxa = boxaCreate(nbox)) == NULL)
return (BOXA *)ERROR_PTR("boxa not made", procName, NULL);
for (i = 0; i < n; i += ncorners) {
ptaGetIPt(pta, i, &x1, &y1);
ptaGetIPt(pta, i + 1, &x2, &y2);
if (ncorners == 2) {
box = boxCreate(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
boxaAddBox(boxa, box, L_INSERT);
continue;
}
ptaGetIPt(pta, i + 2, &x3, &y3);
ptaGetIPt(pta, i + 3, &x4, &y4);
x = L_MIN(x1, x3);
y = L_MIN(y1, y2);
xmax = L_MAX(x2, x4);
ymax = L_MAX(y3, y4);
box = boxCreate(x, y, xmax - x + 1, ymax - y + 1);
boxaAddBox(boxa, box, L_INSERT);
}
return boxa;
}
/*!
* \brief boxConvertToPta()
*
* \param[in] box
* \param[in] ncorners 2 or 4 for the representation of the box
* \return pta with %ncorners points, or NULL on error
*
* <pre>
* Notes:
* (1) If ncorners == 2, we select the UL and LR corners.
* Otherwise we save all 4 corners in this order: UL, UR, LL, LR.
* </pre>
*/
PTA *
boxConvertToPta(BOX *box,
l_int32 ncorners)
{
l_int32 x, y, w, h;
PTA *pta;
PROCNAME("boxConvertToPta");
if (!box)
return (PTA *)ERROR_PTR("box not defined", procName, NULL);
if (ncorners != 2 && ncorners != 4)
return (PTA *)ERROR_PTR("ncorners not 2 or 4", procName, NULL);
if ((pta = ptaCreate(ncorners)) == NULL)
return (PTA *)ERROR_PTR("pta not made", procName, NULL);
boxGetGeometry(box, &x, &y, &w, &h);
ptaAddPt(pta, x, y);
if (ncorners == 2) {
ptaAddPt(pta, x + w - 1, y + h - 1);
} else {
ptaAddPt(pta, x + w - 1, y);
ptaAddPt(pta, x, y + h - 1);
ptaAddPt(pta, x + w - 1, y + h - 1);
}
return pta;
}
/*!
* \brief ptaConvertToBox()
*
* \param[in] pta
* \return box minimum containing all points in the pta, or NULL on error
*
* <pre>
* Notes:
* (1) For 2 corners, the order of the 2 points is UL, LR.
* For 4 corners, the order of points is UL, UR, LL, LR.
* </pre>
*/
BOX *
ptaConvertToBox(PTA *pta)
{
l_int32 n, x1, y1, x2, y2, x3, y3, x4, y4, x, y, xmax, ymax;
PROCNAME("ptaConvertToBox");
if (!pta)
return (BOX *)ERROR_PTR("pta not defined", procName, NULL);
n = ptaGetCount(pta);
ptaGetIPt(pta, 0, &x1, &y1);
ptaGetIPt(pta, 1, &x2, &y2);
if (n == 2)
return boxCreate(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
/* 4 corners */
ptaGetIPt(pta, 2, &x3, &y3);
ptaGetIPt(pta, 3, &x4, &y4);
x = L_MIN(x1, x3);
y = L_MIN(y1, y2);
xmax = L_MAX(x2, x4);
ymax = L_MAX(y3, y4);
return boxCreate(x, y, xmax - x + 1, ymax - y + 1);
}
/*---------------------------------------------------------------------*
* Boxa sequence fitting *
*---------------------------------------------------------------------*/
/*!
* \brief boxaSmoothSequenceLS()
*
* \param[in] boxas source boxa
* \param[in] factor reject outliers with widths and heights deviating
* from the median by more than %factor times
* the median variation from the median; typically ~3
* \param[in] subflag L_USE_MINSIZE, L_USE_MAXSIZE,
* L_SUB_ON_LOC_DIFF, L_SUB_ON_SIZE_DIFF,
* L_USE_CAPPED_MIN, L_USE_CAPPED_MAX
* \param[in] maxdiff parameter used with L_SUB_ON_LOC_DIFF,
* L_SUB_ON_SIZE_DIFF, L_USE_CAPPED_MIN, L_USE_CAPPED_MAX
* \param[in] extrapixels pixels added on all sides (or subtracted
* if %extrapixels < 0) when using
* L_SUB_ON_LOC_DIFF and L_SUB_ON_SIZE_DIFF
* \param[in] debug 1 for debug output
* \return boxad fitted boxa, or NULL on error
*
* <pre>
* Notes:
* (1) This returns a modified version of %boxas by constructing
* for each input box a box that has been linear least square fit
* (LSF) to the entire set. The linear fitting is done to each of
* the box sides independently, after outliers are rejected,
* and it is computed separately for sequences of even and
* odd boxes. Once the linear LSF box is found, the output box
* (in %boxad) is constructed from the input box and the LSF
* box, depending on %subflag. See boxaModifyWithBoxa() for
* details on the use of %subflag and %maxdiff.
* (2) This is useful if, in both the even and odd sets, the box
* edges vary roughly linearly with its index in the set.
* </pre>
*/
BOXA *
boxaSmoothSequenceLS(BOXA *boxas,
l_float32 factor,
l_int32 subflag,
l_int32 maxdiff,
l_int32 extrapixels,
l_int32 debug)
{
l_int32 n;
BOXA *boxae, *boxao, *boxalfe, *boxalfo, *boxame, *boxamo, *boxad;
PROCNAME("boxaSmoothSequenceLS");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (factor <= 0.0) {
L_WARNING("factor must be > 0.0; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (maxdiff < 0) {
L_WARNING("maxdiff must be >= 0; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (subflag != L_USE_MINSIZE && subflag != L_USE_MAXSIZE &&
subflag != L_SUB_ON_LOC_DIFF && subflag != L_SUB_ON_SIZE_DIFF &&
subflag != L_USE_CAPPED_MIN && subflag != L_USE_CAPPED_MAX) {
L_WARNING("invalid subflag; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if ((n = boxaGetCount(boxas)) < 4) {
L_WARNING("need at least 4 boxes; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
boxaSplitEvenOdd(boxas, 1, &boxae, &boxao);
if (debug) {
lept_mkdir("smooth");
boxaWrite("/tmp/smooth/boxae.ba", boxae);
boxaWrite("/tmp/smooth/boxao.ba", boxao);
}
boxalfe = boxaLinearFit(boxae, factor, debug);
boxalfo = boxaLinearFit(boxao, factor, debug);
if (debug) {
boxaWrite("/tmp/smooth/boxalfe.ba", boxalfe);
boxaWrite("/tmp/smooth/boxalfo.ba", boxalfo);
}
boxame = boxaModifyWithBoxa(boxae, boxalfe, subflag, maxdiff, extrapixels);
boxamo = boxaModifyWithBoxa(boxao, boxalfo, subflag, maxdiff, extrapixels);
if (debug) {
boxaWrite("/tmp/smooth/boxame.ba", boxame);
boxaWrite("/tmp/smooth/boxamo.ba", boxamo);
}
boxad = boxaMergeEvenOdd(boxame, boxamo, 1);
boxaDestroy(&boxae);
boxaDestroy(&boxao);
boxaDestroy(&boxalfe);
boxaDestroy(&boxalfo);
boxaDestroy(&boxame);
boxaDestroy(&boxamo);
return boxad;
}
/*!
* \brief boxaSmoothSequenceMedian()
*
* \param[in] boxas source boxa
* \param[in] halfwin half-width of sliding window; used to find median
* \param[in] subflag L_USE_MINSIZE, L_USE_MAXSIZE,
* L_SUB_ON_LOC_DIFF, L_SUB_ON_SIZE_DIFF,
* L_USE_CAPPED_MIN, L_USE_CAPPED_MAX
* \param[in] maxdiff parameter used with L_SUB_ON_LOC_DIFF,
* L_SUB_ON_SIZE_DIFF, L_USE_CAPPED_MIN, L_USE_CAPPED_MAX
* \param[in] extrapixels pixels added on all sides (or subtracted
* if %extrapixels < 0) when using
* L_SUB_ON_LOC_DIFF and L_SUB_ON_SIZE_DIFF
* \param[in] debug 1 for debug output
* \return boxad fitted boxa, or NULL on error
*
* <pre>
* Notes:
* (1) The target width of the sliding window is 2 * %halfwin + 1.
* If necessary, this will be reduced by boxaWindowedMedian().
* (2) This returns a modified version of %boxas by constructing
* for each input box a box that has been smoothed with windowed
* median filtering. The filtering is done to each of the
* box sides independently, and it is computed separately for
* sequences of even and odd boxes. The output %boxad is
* constructed from the input boxa and the filtered boxa,
* depending on %subflag. See boxaModifyWithBoxa() for
* details on the use of %subflag, %maxdiff and %extrapixels.
* (3) This is useful for removing noise separately in the even
* and odd sets, where the box edge locations can have
* discontinuities but otherwise vary roughly linearly within
* intervals of size %halfwin or larger.
* (4) If you don't need to handle even and odd sets separately,
* just do this:
* boxam = boxaWindowedMedian(boxas, halfwin, debug);
* boxad = boxaModifyWithBoxa(boxas, boxam, subflag, maxdiff,
* extrapixels);
* boxaDestroy(&boxam);
* </pre>
*/
BOXA *
boxaSmoothSequenceMedian(BOXA *boxas,
l_int32 halfwin,
l_int32 subflag,
l_int32 maxdiff,
l_int32 extrapixels,
l_int32 debug)
{
l_int32 n;
BOXA *boxae, *boxao, *boxamede, *boxamedo, *boxame, *boxamo, *boxad;
PROCNAME("boxaSmoothSequenceMedian");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (halfwin <= 0) {
L_WARNING("halfwin must be > 0; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (maxdiff < 0) {
L_WARNING("maxdiff must be >= 0; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (subflag != L_USE_MINSIZE && subflag != L_USE_MAXSIZE &&
subflag != L_SUB_ON_LOC_DIFF && subflag != L_SUB_ON_SIZE_DIFF &&
subflag != L_USE_CAPPED_MIN && subflag != L_USE_CAPPED_MAX) {
L_WARNING("invalid subflag; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if ((n = boxaGetCount(boxas)) < 6) {
L_WARNING("need at least 6 boxes; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
boxaSplitEvenOdd(boxas, 0, &boxae, &boxao);
if (debug) {
lept_mkdir("smooth");
boxaWrite("/tmp/smooth/boxae.ba", boxae);
boxaWrite("/tmp/smooth/boxao.ba", boxao);
}
boxamede = boxaWindowedMedian(boxae, halfwin, debug);
boxamedo = boxaWindowedMedian(boxao, halfwin, debug);
if (debug) {
boxaWrite("/tmp/smooth/boxamede.ba", boxamede);
boxaWrite("/tmp/smooth/boxamedo.ba", boxamedo);
}
boxame = boxaModifyWithBoxa(boxae, boxamede, subflag, maxdiff, extrapixels);
boxamo = boxaModifyWithBoxa(boxao, boxamedo, subflag, maxdiff, extrapixels);
if (debug) {
boxaWrite("/tmp/smooth/boxame.ba", boxame);
boxaWrite("/tmp/smooth/boxamo.ba", boxamo);
}
boxad = boxaMergeEvenOdd(boxame, boxamo, 0);
if (debug) {
boxaPlotSides(boxas, NULL, NULL, NULL, NULL, NULL, NULL);
boxaPlotSides(boxad, NULL, NULL, NULL, NULL, NULL, NULL);
boxaPlotSizes(boxas, NULL, NULL, NULL, NULL);
boxaPlotSizes(boxad, NULL, NULL, NULL, NULL);
}
boxaDestroy(&boxae);
boxaDestroy(&boxao);
boxaDestroy(&boxamede);
boxaDestroy(&boxamedo);
boxaDestroy(&boxame);
boxaDestroy(&boxamo);
return boxad;
}
/*!
* \brief boxaLinearFit()
*
* \param[in] boxas source boxa
* \param[in] factor reject outliers with widths and heights deviating
* from the median by more than %factor times
* the median deviation from the median; typically ~3
* \param[in] debug 1 for debug output
* \return boxad fitted boxa, or NULL on error
*
* <pre>
* Notes:
* (1) This finds a set of boxes (boxad) where each edge of each box is
* a linear least square fit (LSF) to the edges of the
* input set of boxes (boxas). Before fitting, outliers in
* the boxes in boxas are removed (see below).
* (2) This is useful when each of the box edges in boxas are expected
* to vary linearly with box index in the set. These could
* be, for example, noisy measurements of similar regions
* on successive scanned pages.
* (3) Method: there are 2 steps:
* (a) Find and remove outliers, separately based on the deviation
* from the median of the width and height of the box.
* Use %factor to specify tolerance to outliers; use a very
* large value of %factor to avoid rejecting any box sides
* in the linear LSF.
* (b) On the remaining boxes, do a linear LSF independently
* for each of the four sides.
* (4) Invalid input boxes are not used in computation of the LSF.
* (5) The returned boxad can then be used in boxaModifyWithBoxa()
* to selectively change the boxes in boxas.
* </pre>
*/
BOXA *
boxaLinearFit(BOXA *boxas,
l_float32 factor,
l_int32 debug)
{
l_int32 n, i, w, h, lval, tval, rval, bval, rejectlr, rejecttb;
l_float32 al, bl, at, bt, ar, br, ab, bb; /* LSF coefficients */
l_float32 medw, medh, medvarw, medvarh;
BOX *box, *boxempty;
BOXA *boxalr, *boxatb, *boxad;
NUMA *naw, *nah;
PTA *ptal, *ptat, *ptar, *ptab;
PROCNAME("boxaLinearFit");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if ((n = boxaGetCount(boxas)) < 2)
return (BOXA *)ERROR_PTR("need at least 2 boxes", procName, NULL);
/* Remove outliers based on width and height.
* First find the median width and the median deviation from
* the median width. Ditto for the height. */
boxaExtractAsNuma(boxas, NULL, NULL, NULL, NULL, &naw, &nah, 0);
numaGetMedianVariation(naw, &medw, &medvarw);
numaGetMedianVariation(nah, &medh, &medvarh);
numaDestroy(&naw);
numaDestroy(&nah);
if (debug) {
fprintf(stderr, "medw = %7.3f, medvarw = %7.3f\n", medw, medvarw);
fprintf(stderr, "medh = %7.3f, medvarh = %7.3f\n", medh, medvarh);
}
/* To fit the left and right sides, only use boxes whose
* width is within (factor * medvarw) of the median width.
* Ditto for the top and bottom sides. Add empty boxes
* in as placeholders so that the index remains the same
* as in boxas. */
boxalr = boxaCreate(n);
boxatb = boxaCreate(n);
boxempty = boxCreate(0, 0, 0, 0); /* placeholders */
rejectlr = rejecttb = 0;
for (i = 0; i < n; i++) {
if ((box = boxaGetValidBox(boxas, i, L_CLONE)) == NULL) {
boxaAddBox(boxalr, boxempty, L_COPY);
boxaAddBox(boxatb, boxempty, L_COPY);
continue;
}
boxGetGeometry(box, NULL, NULL, &w, &h);
if (L_ABS(w - medw) <= factor * medvarw) {
boxaAddBox(boxalr, box, L_COPY);
} else {
rejectlr++;
boxaAddBox(boxalr, boxempty, L_COPY);
}
if (L_ABS(h - medh) <= factor * medvarh) {
boxaAddBox(boxatb, box, L_COPY);
} else {
rejecttb++;
boxaAddBox(boxatb, boxempty, L_COPY);
}
boxDestroy(&box);
}
boxDestroy(&boxempty);
if (boxaGetCount(boxalr) < 2 || boxaGetCount(boxatb) < 2) {
boxaDestroy(&boxalr);
boxaDestroy(&boxatb);
return (BOXA *)ERROR_PTR("need at least 2 valid boxes", procName, NULL);
}
if (debug) {
L_INFO("# lr reject = %d, # tb reject = %d\n", procName,
rejectlr, rejecttb);
lept_mkdir("linfit");
boxaWrite("/tmp/linfit/boxalr.ba", boxalr);
boxaWrite("/tmp/linfit/boxatb.ba", boxatb);
}
/* Extract the valid left and right box sides, along with the box
* index, from boxalr. This only extracts pts corresponding to
* valid boxes. Ditto: top and bottom sides from boxatb. */
boxaExtractAsPta(boxalr, &ptal, NULL, &ptar, NULL, NULL, NULL, 0);
boxaExtractAsPta(boxatb, NULL, &ptat, NULL, &ptab, NULL, NULL, 0);
boxaDestroy(&boxalr);
boxaDestroy(&boxatb);
if (debug) {
ptaWrite("/tmp/linfit/ptal.pta", ptal, 1);
ptaWrite("/tmp/linfit/ptar.pta", ptar, 1);
ptaWrite("/tmp/linfit/ptat.pta", ptat, 1);
ptaWrite("/tmp/linfit/ptab.pta", ptab, 1);
}
/* Do a linear LSF fit to the points that are width and height
* validated. Because we've eliminated the outliers, there is no
* need to use ptaNoisyLinearLSF(ptal, factor, NULL, &al, &bl, ...) */
ptaGetLinearLSF(ptal, &al, &bl, NULL);
ptaGetLinearLSF(ptat, &at, &bt, NULL);
ptaGetLinearLSF(ptar, &ar, &br, NULL);
ptaGetLinearLSF(ptab, &ab, &bb, NULL);
/* Return the LSF smoothed values, interleaved with invalid
* boxes when the corresponding box in boxas is invalid. */
boxad = boxaCreate(n);
boxempty = boxCreate(0, 0, 0, 0); /* use for placeholders */
for (i = 0; i < n; i++) {
lval = (l_int32)(al * i + bl + 0.5);
tval = (l_int32)(at * i + bt + 0.5);
rval = (l_int32)(ar * i + br + 0.5);
bval = (l_int32)(ab * i + bb + 0.5);
if ((box = boxaGetValidBox(boxas, i, L_CLONE)) == NULL) {
boxaAddBox(boxad, boxempty, L_COPY);
} else {
boxDestroy(&box);
box = boxCreate(lval, tval, rval - lval + 1, bval - tval + 1);
boxaAddBox(boxad, box, L_INSERT);
}
}
boxDestroy(&boxempty);
if (debug) {
boxaPlotSides(boxad, NULL, NULL, NULL, NULL, NULL, NULL);
boxaPlotSizes(boxad, NULL, NULL, NULL, NULL);
}
ptaDestroy(&ptal);
ptaDestroy(&ptat);
ptaDestroy(&ptar);
ptaDestroy(&ptab);
return boxad;
}
/*!
* \brief boxaWindowedMedian()
*
* \param[in] boxas source boxa
* \param[in] halfwin half width of window over which the median is found
* \param[in] debug 1 for debug output
* \return boxad smoothed boxa, or NULL on error
*
* <pre>
* Notes:
* (1) This finds a set of boxes (boxad) where each edge of each box is
* a windowed median smoothed value to the edges of the
* input set of boxes (boxas).
* (2) Invalid input boxes are filled from nearby ones.
* (3) The returned boxad can then be used in boxaModifyWithBoxa()
* to selectively change the boxes in the source boxa.
* </pre>
*/
BOXA *
boxaWindowedMedian(BOXA *boxas,
l_int32 halfwin,
l_int32 debug)
{
l_int32 n, i, left, top, right, bot;
BOX *box;
BOXA *boxaf, *boxad;
NUMA *nal, *nat, *nar, *nab, *naml, *namt, *namr, *namb;
PROCNAME("boxaWindowedMedian");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if ((n = boxaGetCount(boxas)) < 3) {
L_WARNING("less than 3 boxes; returning a copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (halfwin <= 0) {
L_WARNING("halfwin must be > 0; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
/* Fill invalid boxes in the input sequence */
if ((boxaf = boxaFillSequence(boxas, L_USE_ALL_BOXES, debug)) == NULL)
return (BOXA *)ERROR_PTR("filled boxa not made", procName, NULL);
/* Get the windowed median output from each of the sides */
boxaExtractAsNuma(boxaf, &nal, &nat, &nar, &nab, NULL, NULL, 0);
naml = numaWindowedMedian(nal, halfwin);
namt = numaWindowedMedian(nat, halfwin);
namr = numaWindowedMedian(nar, halfwin);
namb = numaWindowedMedian(nab, halfwin);
n = boxaGetCount(boxaf);
boxad = boxaCreate(n);
for (i = 0; i < n; i++) {
numaGetIValue(naml, i, &left);
numaGetIValue(namt, i, &top);
numaGetIValue(namr, i, &right);
numaGetIValue(namb, i, &bot);
box = boxCreate(left, top, right - left + 1, bot - top + 1);
boxaAddBox(boxad, box, L_INSERT);
}
if (debug) {
boxaPlotSides(boxaf, NULL, NULL, NULL, NULL, NULL, NULL);
boxaPlotSides(boxad, NULL, NULL, NULL, NULL, NULL, NULL);
boxaPlotSizes(boxaf, NULL, NULL, NULL, NULL);
boxaPlotSizes(boxad, NULL, NULL, NULL, NULL);
}
boxaDestroy(&boxaf);
numaDestroy(&nal);
numaDestroy(&nat);
numaDestroy(&nar);
numaDestroy(&nab);
numaDestroy(&naml);
numaDestroy(&namt);
numaDestroy(&namr);
numaDestroy(&namb);
return boxad;
}
/*!
* \brief boxaModifyWithBoxa()
*
* \param[in] boxas
* \param[in] boxam boxa with boxes used to modify those in boxas
* \param[in] subflag L_USE_MINSIZE, L_USE_MAXSIZE,
* L_SUB_ON_LOC_DIFF, L_SUB_ON_SIZE_DIFF,
* L_USE_CAPPED_MIN, L_USE_CAPPED_MAX
* \param[in] maxdiff parameter used with L_SUB_ON_LOC_DIFF,
* L_SUB_ON_SIZE_DIFF, L_USE_CAPPED_MIN, L_USE_CAPPED_MAX
* \param[in] extrapixels pixels added on all sides (or subtracted
* if %extrapixels < 0) when using
* L_SUB_ON_LOC_DIFF and L_SUB_ON_SIZE_DIFF
* \return boxad result after adjusting boxes in boxas, or NULL
* on error.
*
* <pre>
* Notes:
* (1) This takes two input boxa (boxas, boxam) and constructs boxad,
* where each box in boxad is generated from the corresponding
* boxes in boxas and boxam. The rule for constructing each
* output box depends on %subflag and %maxdiff. Let boxs be
* a box from %boxas and boxm be a box from %boxam.
* * If %subflag == L_USE_MINSIZE: the output box is the intersection
* of the two input boxes.
* * If %subflag == L_USE_MAXSIZE: the output box is the union of the
* two input boxes; i.e., the minimum bounding rectangle for the
* two input boxes.
* * If %subflag == L_SUB_ON_LOC_DIFF: each side of the output box
* is found separately from the corresponding side of boxs and boxm.
* Use the boxm side, expanded by %extrapixels, if greater than
* %maxdiff pixels from the boxs side.
* * If %subflag == L_SUB_ON_SIZE_DIFF: the sides of the output box
* are determined in pairs from the width and height of boxs
* and boxm. If the boxm width differs by more than %maxdiff
* pixels from boxs, use the boxm left and right sides,
* expanded by %extrapixels. Ditto for the height difference.
* For the last two flags, each side of the output box is found
* separately from the corresponding side of boxs and boxm,
* according to these rules, where "smaller"("bigger") mean in a
* direction that decreases(increases) the size of the output box:
* * If %subflag == L_USE_CAPPED_MIN: use the Min of boxm
* with the Max of (boxs, boxm +- %maxdiff), where the sign
* is adjusted to make the box smaller (e.g., use "+" on left side).
* * If %subflag == L_USE_CAPPED_MAX: use the Max of boxm
* with the Min of (boxs, boxm +- %maxdiff), where the sign
* is adjusted to make the box bigger (e.g., use "-" on left side).
* Use of the last 2 flags is further explained in (3) and (4).
* (2) boxas and boxam must be the same size. If boxam == NULL,
* this returns a copy of boxas with a warning.
* (3) If %subflag == L_SUB_ON_LOC_DIFF, use boxm for each side
* where the corresponding sides differ by more than %maxdiff.
* Two extreme cases:
* (a) set %maxdiff == 0 to use only values from boxam in boxad.
* (b) set %maxdiff == 10000 to ignore all values from boxam;
* then boxad will be the same as boxas.
* (4) If %subflag == L_USE_CAPPED_MAX: use boxm if boxs is smaller;
* use boxs if boxs is bigger than boxm by an amount up to %maxdiff;
* and use boxm +- %maxdiff (the 'capped' value) if boxs is
* bigger than boxm by an amount larger than %maxdiff.
* Similarly, with interchange of Min/Max and sign of %maxdiff,
* for %subflag == L_USE_CAPPED_MIN.
* (5) If either of corresponding boxes in boxas and boxam is invalid,
* an invalid box is copied to the result.
* (6) Typical input for boxam may be the output of boxaLinearFit().
* where outliers have been removed and each side is LS fit to a line.
* (7) Unlike boxaAdjustWidthToTarget() and boxaAdjustHeightToTarget(),
* this uses two boxes and does not specify target dimensions.
* Additional constraints on the size of each box can be enforced
* by following this operation with boxaConstrainSize(), taking
* boxad as input.
* </pre>
*/
BOXA *
boxaModifyWithBoxa(BOXA *boxas,
BOXA *boxam,
l_int32 subflag,
l_int32 maxdiff,
l_int32 extrapixels)
{
l_int32 n, i, ls, ts, rs, bs, ws, hs, lm, tm, rm, bm, wm, hm, ld, td, rd, bd;
BOX *boxs, *boxm, *boxd, *boxempty;
BOXA *boxad;
PROCNAME("boxaModifyWithBoxa");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (!boxam) {
L_WARNING("boxam not defined; returning copy", procName);
return boxaCopy(boxas, L_COPY);
}
if (subflag != L_USE_MINSIZE && subflag != L_USE_MAXSIZE &&
subflag != L_SUB_ON_LOC_DIFF && subflag != L_SUB_ON_SIZE_DIFF &&
subflag != L_USE_CAPPED_MIN && subflag != L_USE_CAPPED_MAX) {
L_WARNING("invalid subflag; returning copy", procName);
return boxaCopy(boxas, L_COPY);
}
n = boxaGetCount(boxas);
if (n != boxaGetCount(boxam)) {
L_WARNING("boxas and boxam sizes differ; returning copy", procName);
return boxaCopy(boxas, L_COPY);
}
boxad = boxaCreate(n);
boxempty = boxCreate(0, 0, 0, 0); /* placeholders */
for (i = 0; i < n; i++) {
boxs = boxaGetValidBox(boxas, i, L_CLONE);
boxm = boxaGetValidBox(boxam, i, L_CLONE);
if (!boxs || !boxm) {
boxaAddBox(boxad, boxempty, L_COPY);
} else {
boxGetGeometry(boxs, &ls, &ts, &ws, &hs);
boxGetGeometry(boxm, &lm, &tm, &wm, &hm);
rs = ls + ws - 1;
bs = ts + hs - 1;
rm = lm + wm - 1;
bm = tm + hm - 1;
if (subflag == L_USE_MINSIZE) {
ld = L_MAX(ls, lm);
rd = L_MIN(rs, rm);
td = L_MAX(ts, tm);
bd = L_MIN(bs, bm);
} else if (subflag == L_USE_MAXSIZE) {
ld = L_MIN(ls, lm);
rd = L_MAX(rs, rm);
td = L_MIN(ts, tm);
bd = L_MAX(bs, bm);
} else if (subflag == L_SUB_ON_LOC_DIFF) {
ld = (L_ABS(lm - ls) <= maxdiff) ? ls : lm - extrapixels;
td = (L_ABS(tm - ts) <= maxdiff) ? ts : tm - extrapixels;
rd = (L_ABS(rm - rs) <= maxdiff) ? rs : rm + extrapixels;
bd = (L_ABS(bm - bs) <= maxdiff) ? bs : bm + extrapixels;
} else if (subflag == L_SUB_ON_SIZE_DIFF) {
ld = (L_ABS(wm - ws) <= maxdiff) ? ls : lm - extrapixels;
td = (L_ABS(hm - hs) <= maxdiff) ? ts : tm - extrapixels;
rd = (L_ABS(wm - ws) <= maxdiff) ? rs : rm + extrapixels;
bd = (L_ABS(hm - hs) <= maxdiff) ? bs : bm + extrapixels;
} else if (subflag == L_USE_CAPPED_MIN) {
ld = L_MAX(lm, L_MIN(ls, lm + maxdiff));
td = L_MAX(tm, L_MIN(ts, tm + maxdiff));
rd = L_MIN(rm, L_MAX(rs, rm - maxdiff));
bd = L_MIN(bm, L_MAX(bs, bm - maxdiff));
} else { /* subflag == L_USE_CAPPED_MAX */
ld = L_MIN(lm, L_MAX(ls, lm - maxdiff));
td = L_MIN(tm, L_MAX(ts, tm - maxdiff));
rd = L_MAX(rm, L_MIN(rs, rm + maxdiff));
bd = L_MAX(bm, L_MIN(bs, bm + maxdiff));
}
boxd = boxCreate(ld, td, rd - ld + 1, bd - td + 1);
boxaAddBox(boxad, boxd, L_INSERT);
}
boxDestroy(&boxs);
boxDestroy(&boxm);
}
boxDestroy(&boxempty);
return boxad;
}
/*!
* \brief boxaConstrainSize()
*
* \param[in] boxas
* \param[in] width force width of all boxes to this size;
* input 0 to use the median width
* \param[in] widthflag L_ADJUST_SKIP, L_ADJUST_LEFT, L_ADJUST_RIGHT,
* or L_ADJUST_LEFT_AND_RIGHT
* \param[in] height force height of all boxes to this size;
* input 0 to use the median height
* \param[in] heightflag L_ADJUST_SKIP, L_ADJUST_TOP, L_ADJUST_BOT,
* or L_ADJUST_TOP_AND_BOT
* \return boxad adjusted so all boxes are the same size
*
* <pre>
* Notes:
* (1) Forces either width or height (or both) of every box in
* the boxa to a specified size, by moving the indicated sides.
* (2) Not all input boxes need to be valid. Median values will be
* used with invalid boxes.
* (3) Typical input might be the output of boxaLinearFit(),
* where each side has been fit.
* (4) Unlike boxaAdjustWidthToTarget() and boxaAdjustHeightToTarget(),
* this is not dependent on a difference threshold to change the size.
* (5) On error, a message is issued and a copy of the input boxa
* is returned.
* </pre>
*/
BOXA *
boxaConstrainSize(BOXA *boxas,
l_int32 width,
l_int32 widthflag,
l_int32 height,
l_int32 heightflag)
{
l_int32 n, i, x, y, w, h, invalid;
l_int32 delw, delh, del_left, del_right, del_top, del_bot;
BOX *medbox, *boxs, *boxd;
BOXA *boxad;
PROCNAME("boxaConstrainSize");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
/* Need median values if requested or if there are invalid boxes */
invalid = boxaGetCount(boxas) - boxaGetValidCount(boxas);
medbox = NULL;
if (width == 0 || height == 0 || invalid > 0) {
if (boxaGetMedianVals(boxas, &x, &y, &w, &h)) {
L_ERROR("median vals not returned", procName);
return boxaCopy(boxas, L_COPY);
}
medbox = boxCreate(x, y, w, h);
if (width == 0) width = w;
if (height == 0) height = h;
}
n = boxaGetCount(boxas);
boxad = boxaCreate(n);
for (i = 0; i < n; i++) {
if ((boxs = boxaGetValidBox(boxas, i, L_COPY)) == NULL)
boxs = boxCopy(medbox);
boxGetGeometry(boxs, NULL, NULL, &w, &h);
delw = width - w;
delh = height - h;
del_left = del_right = del_top = del_bot = 0;
if (widthflag == L_ADJUST_LEFT) {
del_left = -delw;
} else if (widthflag == L_ADJUST_RIGHT) {
del_right = delw;
} else {
del_left = -delw / 2;
del_right = delw / 2 + L_SIGN(delw) * (delw & 1);
}
if (heightflag == L_ADJUST_TOP) {
del_top = -delh;
} else if (heightflag == L_ADJUST_BOT) {
del_bot = delh;
} else {
del_top = -delh / 2;
del_bot = delh / 2 + L_SIGN(delh) * (delh & 1);
}
boxd = boxAdjustSides(NULL, boxs, del_left, del_right,
del_top, del_bot);
boxaAddBox(boxad, boxd, L_INSERT);
boxDestroy(&boxs);
}
boxDestroy(&medbox);
return boxad;
}
/*!
* \brief boxaReconcileEvenOddHeight()
*
* \param[in] boxas containing at least 3 valid boxes in even and odd
* \param[in] sides L_ADJUST_TOP, L_ADJUST_BOT, L_ADJUST_TOP_AND_BOT
* \param[in] delh threshold on median height difference
* \param[in] op L_ADJUST_CHOOSE_MIN, L_ADJUST_CHOOSE_MAX
* \param[in] factor > 0.0, typically near 1.0
* \param[in] start 0 if pairing (0,1), etc; 1 if pairing (1,2), etc
* \return boxad adjusted, or a copy of boxas on error
*
* <pre>
* Notes:
* (1) The basic idea is to reconcile differences in box height
* in the even and odd boxes, by moving the top and/or bottom
* edges in the even and odd boxes. Choose the edge or edges
* to be moved, whether to adjust the boxes with the min
* or the max of the medians, and the threshold on the median
* difference between even and odd box heights for the operations
* to take place. The same threshold is also used to
* determine if each individual box edge is to be adjusted.
* (2) Boxes are conditionally reset with either the same top (y)
* value or the same bottom value, or both. The value is
* determined by the greater or lesser of the medians of the
* even and odd boxes, with the choice depending on the value
* of %op, which selects for either min or max median height.
* If the median difference between even and odd boxes is
* greater than %dely, then any individual box edge that differs
* from the selected median by more than %dely is set to
* the selected median times a factor typically near 1.0.
* (3) Note that if selecting for minimum height, you will choose
* the largest y-value for the top and the smallest y-value for
* the bottom of the box.
* (4) Typical input might be the output of boxaSmoothSequence(),
* where even and odd boxa have been independently regulated.
* (5) Require at least 3 valid even boxes and 3 valid odd boxes.
* Median values will be used for invalid boxes.
* (6) If the median height is not representative of the boxes
* in %boxas, this can make things much worse. In that case,
* ignore the value of %op, and force pairwise equality of the
* heights, with pairwise maximal vertical extension.
* </pre>
*/
BOXA *
boxaReconcileEvenOddHeight(BOXA *boxas,
l_int32 sides,
l_int32 delh,
l_int32 op,
l_float32 factor,
l_int32 start)
{
l_int32 n, he, ho, hmed, doeven;
l_float32 del1, del2;
BOXA *boxae, *boxao, *boxa1e, *boxa1o, *boxad;
PROCNAME("boxaReconcileEvenOddHeight");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (sides != L_ADJUST_TOP && sides != L_ADJUST_BOT &&
sides != L_ADJUST_TOP_AND_BOT) {
L_WARNING("no action requested; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if ((n = boxaGetValidCount(boxas)) < 6) {
L_WARNING("need at least 6 valid boxes; returning copy\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (factor <= 0.0) {
L_WARNING("invalid factor; setting to 1.0\n", procName);
factor = 1.0;
}
/* Require at least 3 valid boxes of both types */
boxaSplitEvenOdd(boxas, 0, &boxae, &boxao);
if (boxaGetValidCount(boxae) < 3 || boxaGetValidCount(boxao) < 3) {
boxaDestroy(&boxae);
boxaDestroy(&boxao);
return boxaCopy(boxas, L_COPY);
}
/* Get the median heights for each set */
boxaGetMedianVals(boxae, NULL, NULL, NULL, &he);
boxaGetMedianVals(boxao, NULL, NULL, NULL, &ho);
L_INFO("median he = %d, median ho = %d\n", procName, he, ho);
/* If the difference in median height reaches the threshold %delh,
* only adjust the side(s) of one of the sets. If we choose
* the minimum median height as the target, allow the target
* to be scaled by a factor, typically near 1.0, of the
* minimum median height. And similarly if the target is
* the maximum median height. */
if (L_ABS(he - ho) > delh) {
if (op == L_ADJUST_CHOOSE_MIN) {
doeven = (ho < he) ? TRUE : FALSE;
hmed = (l_int32)(factor * L_MIN(he, ho));
hmed = L_MIN(hmed, L_MAX(he, ho)); /* don't make it bigger! */
} else { /* max height */
doeven = (ho > he) ? TRUE : FALSE;
hmed = (l_int32)(factor * L_MAX(he, ho));
hmed = L_MAX(hmed, L_MIN(he, ho)); /* don't make it smaller! */
}
if (doeven) {
boxa1e = boxaAdjustHeightToTarget(NULL, boxae, sides, hmed, delh);
boxa1o = boxaCopy(boxao, L_COPY);
} else { /* !doeven */
boxa1e = boxaCopy(boxae, L_COPY);
boxa1o = boxaAdjustHeightToTarget(NULL, boxao, sides, hmed, delh);
}
} else {
boxa1e = boxaCopy(boxae, L_CLONE);
boxa1o = boxaCopy(boxao, L_CLONE);
}
boxaDestroy(&boxae);
boxaDestroy(&boxao);
/* It can happen that the median is not a good measure for an
* entire book. In that case, the reconciliation above can do
* more harm than good. Sanity check by comparing height and y
* differences of adjacent even/odd boxes, before and after
* reconciliation. */
boxad = boxaMergeEvenOdd(boxa1e, boxa1o, 0);
boxaTestEvenOddHeight(boxas, boxad, start, &del1, &del2);
boxaDestroy(&boxa1e);
boxaDestroy(&boxa1o);
if (del2 < del1 + 10.)
return boxad;
/* Using the median made it worse. Skip reconciliation:
* forcing all pairs of top and bottom values to have
* maximum extent does not improve the situation either. */
L_INFO("Got worse: del2 = %f > del1 = %f\n", procName, del2, del1);
boxaDestroy(&boxad);
return boxaCopy(boxas, L_COPY);
}
/*!
* \brief boxaTestEvenOddHeight()
*
* \param[in] boxa1, boxa2
* \param[in] start 0 if pairing (0,1), etc; 1 if pairing (1,2), etc
* \param[out] pdel1 root mean of (dely^2 + delh^2 for boxa1
* \param[out] pdel2 root mean of (dely^2 + delh^2 for boxa2
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) This compares differences in the y location and height of
* adjacent boxes, in each of the input boxa.
* </pre>
*/
static l_int32
boxaTestEvenOddHeight(BOXA *boxa1,
BOXA *boxa2,
l_int32 start,
l_float32 *pdel1,
l_float32 *pdel2)
{
l_int32 i, n, npairs, y1a, y1b, y2a, y2b, h1a, h1b, h2a, h2b;
l_float32 del1, del2;
PROCNAME("boxaTestEvenOddHeight");
if (pdel1) *pdel1 = 0.0;
if (pdel2) *pdel2 = 0.0;
if (!pdel1 || !pdel2)
return ERROR_INT("&del1 and &del2 not both defined", procName, 1);
if (!boxa1 || !boxa2)
return ERROR_INT("boxa1 and boxa2 not both defined", procName, 1);
n = L_MIN(boxaGetCount(boxa1), boxaGetCount(boxa2));
/* For boxa1 and boxa2 separately, we expect the y and h values
* to be similar for adjacent boxes. Get a measure of similarity
* by finding the sum of squares of differences between
* y values and between h values, and adding them. */
del1 = del2 = 0.0;
npairs = (n - start) / 2;
for (i = start; i < 2 * npairs; i += 2) {
boxaGetBoxGeometry(boxa1, i, NULL, &y1a, NULL, &h1a);
boxaGetBoxGeometry(boxa1, i + 1, NULL, &y1b, NULL, &h1b);
del1 += (l_float32)(y1a - y1b) * (y1a - y1b)
+ (h1a - h1b) * (h1a - h1b);
boxaGetBoxGeometry(boxa2, i, NULL, &y2a, NULL, &h2a);
boxaGetBoxGeometry(boxa2, i + 1, NULL, &y2b, NULL, &h2b);
del2 += (l_float32)(y2a - y2b) * (y2a - y2b)
+ (h2a - h2b) * (h2a - h2b);
}
/* Get the root of the average of the sum of square differences */
*pdel1 = (l_float32)sqrt((l_float64)del1 / (0.5 * n));
*pdel2 = (l_float32)sqrt((l_float64)del2 / (0.5 * n));
return 0;
}
/*!
* \brief boxaReconcilePairWidth()
*
* \param[in] boxas
* \param[in] delw threshold on adjacent width difference
* \param[in] op L_ADJUST_CHOOSE_MIN, L_ADJUST_CHOOSE_MAX
* \param[in] factor > 0.0, typically near 1.0
* \param[in] na [optional] indicator array allowing change
* \return boxad adjusted, or a copy of boxas on error
*
* <pre>
* Notes:
* (1) This reconciles differences in the width of adjacent boxes,
* by moving one side of one of the boxes in each pair.
* If the widths in the pair differ by more than some
* threshold, move either the left side for even boxes or
* the right side for odd boxes, depending on if we're choosing
* the min or max. If choosing min, the width of the max is
* set to factor * (width of min). If choosing max, the width
* of the min is set to factor * (width of max).
* (2) If %na exists, it is an indicator array corresponding to the
* boxes in %boxas. If %na != NULL, only boxes with an
* indicator value of 1 are allowed to adjust; otherwise,
* all boxes can adjust.
* (3) Typical input might be the output of boxaSmoothSequence(),
* where even and odd boxa have been independently regulated.
* </pre>
*/
BOXA *
boxaReconcilePairWidth(BOXA *boxas,
l_int32 delw,
l_int32 op,
l_float32 factor,
NUMA *na)
{
l_int32 i, ne, no, nmin, xe, we, xo, wo, inde, indo, x, w;
BOX *boxe, *boxo;
BOXA *boxae, *boxao, *boxad;
PROCNAME("boxaReconcilePairWidth");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (factor <= 0.0) {
L_WARNING("invalid factor; setting to 1.0\n", procName);
factor = 1.0;
}
/* Taking the boxes in pairs, if the difference in width reaches
* the threshold %delw, adjust the left or right side of one
* of the pair. */
boxaSplitEvenOdd(boxas, 0, &boxae, &boxao);
ne = boxaGetCount(boxae);
no = boxaGetCount(boxao);
nmin = L_MIN(ne, no);
for (i = 0; i < nmin; i++) {
/* Set indicator values */
if (na) {
numaGetIValue(na, 2 * i, &inde);
numaGetIValue(na, 2 * i + 1, &indo);
} else {
inde = indo = 1;
}
if (inde == 0 && indo == 0) continue;
boxe = boxaGetBox(boxae, i, L_CLONE);
boxo = boxaGetBox(boxao, i, L_CLONE);
boxGetGeometry(boxe, &xe, NULL, &we, NULL);
boxGetGeometry(boxo, &xo, NULL, &wo, NULL);
if (we == 0 || wo == 0) { /* if either is invalid; skip */
boxDestroy(&boxe);
boxDestroy(&boxo);
continue;
} else if (L_ABS(we - wo) > delw) {
if (op == L_ADJUST_CHOOSE_MIN) {
if (we > wo && inde == 1) {
/* move left side of even to the right */
w = factor * wo;
x = xe + (we - w);
boxSetGeometry(boxe, x, -1, w, -1);
} else if (we < wo && indo == 1) {
/* move right side of odd to the left */
w = factor * we;
boxSetGeometry(boxo, -1, -1, w, -1);
}
} else { /* maximize width */
if (we < wo && inde == 1) {
/* move left side of even to the left */
w = factor * wo;
x = L_MAX(0, xe + (we - w));
w = we + (xe - x); /* covers both cases for the max */
boxSetGeometry(boxe, x, -1, w, -1);
} else if (we > wo && indo == 1) {
/* move right side of odd to the right */
w = factor * we;
boxSetGeometry(boxo, -1, -1, w, -1);
}
}
}
boxDestroy(&boxe);
boxDestroy(&boxo);
}
boxad = boxaMergeEvenOdd(boxae, boxao, 0);
boxaDestroy(&boxae);
boxaDestroy(&boxao);
return boxad;
}
/*!
* \brief boxaPlotSides()
*
* \param[in] boxa source boxa
* \param[in] plotname [optional], can be NULL
* \param[out] pnal [optional] na of left sides
* \param[out] pnat [optional] na of top sides
* \param[out] pnar [optional] na of right sides
* \param[out] pnab [optional] na of bottom sides
* \param[out] ppixd [optional] pix of the output plot
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) This debugging function shows the progression of the four
* sides in the boxa. There must be at least 2 boxes.
* (2) If there are invalid boxes (e.g., if only even or odd
* indices have valid boxes), this will fill them with the
* nearest valid box before plotting.
* (3) The plotfiles are put in /tmp/lept/plots/, and are named
* either with %plotname or, if NULL, a default name.
* </pre>
*/
l_int32
boxaPlotSides(BOXA *boxa,
const char *plotname,
NUMA **pnal,
NUMA **pnat,
NUMA **pnar,
NUMA **pnab,
PIX **ppixd)
{
char buf[128], titlebuf[128];
static l_int32 plotid = 0;
l_int32 n, i, w, h, left, top, right, bot;
BOXA *boxat;
GPLOT *gplot;
NUMA *nal, *nat, *nar, *nab;
PROCNAME("boxaPlotSides");
if (pnal) *pnal = NULL;
if (pnat) *pnat = NULL;
if (pnar) *pnar = NULL;
if (pnab) *pnab = NULL;
if (ppixd) *ppixd = NULL;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
if ((n = boxaGetCount(boxa)) < 2)
return ERROR_INT("less than 2 boxes", procName, 1);
boxat = boxaFillSequence(boxa, L_USE_ALL_BOXES, 0);
/* Build the numas for each side */
nal = numaCreate(n);
nat = numaCreate(n);
nar = numaCreate(n);
nab = numaCreate(n);
for (i = 0; i < n; i++) {
boxaGetBoxGeometry(boxat, i, &left, &top, &w, &h);
right = left + w - 1;
bot = top + h - 1;
numaAddNumber(nal, left);
numaAddNumber(nat, top);
numaAddNumber(nar, right);
numaAddNumber(nab, bot);
}
boxaDestroy(&boxat);
lept_mkdir("lept/plots");
if (plotname) {
snprintf(buf, sizeof(buf), "/tmp/lept/plots/sides.%s", plotname);
snprintf(titlebuf, sizeof(titlebuf), "%s: Box sides vs. box index",
plotname);
} else {
snprintf(buf, sizeof(buf), "/tmp/lept/plots/sides.%d", plotid++);
snprintf(titlebuf, sizeof(titlebuf), "Box sides vs. box index");
}
gplot = gplotCreate(buf, GPLOT_PNG, titlebuf,
"box index", "side location");
gplotAddPlot(gplot, NULL, nal, GPLOT_LINES, "left side");
gplotAddPlot(gplot, NULL, nat, GPLOT_LINES, "top side");
gplotAddPlot(gplot, NULL, nar, GPLOT_LINES, "right side");
gplotAddPlot(gplot, NULL, nab, GPLOT_LINES, "bottom side");
gplotMakeOutput(gplot);
gplotDestroy(&gplot);
if (ppixd) {
stringCat(buf, sizeof(buf), ".png");
*ppixd = pixRead(buf);
}
if (pnal)
*pnal = nal;
else
numaDestroy(&nal);
if (pnat)
*pnat = nat;
else
numaDestroy(&nat);
if (pnar)
*pnar = nar;
else
numaDestroy(&nar);
if (pnab)
*pnab = nab;
else
numaDestroy(&nab);
return 0;
}
/*!
* \brief boxaPlotSizes()
*
* \param[in] boxa source boxa
* \param[in] plotname [optional], can be NULL
* \param[out] pnaw [optional] na of widths
* \param[out] pnah [optional] na of heights
* \param[out] ppixd [optional] pix of the output plot
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) This debugging function shows the progression of box width
* and height in the boxa. There must be at least 2 boxes.
* (2) If there are invalid boxes (e.g., if only even or odd
* indices have valid boxes), this will fill them with the
* nearest valid box before plotting.
* (3) The plotfiles are put in /tmp/lept/plots/, and are named
* either with %plotname or, if NULL, a default name. Make sure
* that %plotname is a string with no whitespace characters.
* </pre>
*/
l_int32
boxaPlotSizes(BOXA *boxa,
const char *plotname,
NUMA **pnaw,
NUMA **pnah,
PIX **ppixd)
{
char buf[128], titlebuf[128];
static l_int32 plotid = 0;
l_int32 n, i, w, h;
BOXA *boxat;
GPLOT *gplot;
NUMA *naw, *nah;
PROCNAME("boxaPlotSizes");
if (pnaw) *pnaw = NULL;
if (pnah) *pnah = NULL;
if (ppixd) *ppixd = NULL;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
if ((n = boxaGetCount(boxa)) < 2)
return ERROR_INT("less than 2 boxes", procName, 1);
boxat = boxaFillSequence(boxa, L_USE_ALL_BOXES, 0);
/* Build the numas for the width and height */
naw = numaCreate(n);
nah = numaCreate(n);
for (i = 0; i < n; i++) {
boxaGetBoxGeometry(boxat, i, NULL, NULL, &w, &h);
numaAddNumber(naw, w);
numaAddNumber(nah, h);
}
boxaDestroy(&boxat);
lept_mkdir("lept/plots");
if (plotname) {
snprintf(buf, sizeof(buf), "/tmp/lept/plots/size.%s", plotname);
snprintf(titlebuf, sizeof(titlebuf), "%s: Box size vs. box index",
plotname);
} else {
snprintf(buf, sizeof(buf), "/tmp/lept/plots/size.%d", plotid++);
snprintf(titlebuf, sizeof(titlebuf), "Box size vs. box index");
}
gplot = gplotCreate(buf, GPLOT_PNG, titlebuf,
"box index", "box dimension");
gplotAddPlot(gplot, NULL, naw, GPLOT_LINES, "width");
gplotAddPlot(gplot, NULL, nah, GPLOT_LINES, "height");
gplotMakeOutput(gplot);
gplotDestroy(&gplot);
if (ppixd) {
stringCat(buf, sizeof(buf), ".png");
*ppixd = pixRead(buf);
}
if (pnaw)
*pnaw = naw;
else
numaDestroy(&naw);
if (pnah)
*pnah = nah;
else
numaDestroy(&nah);
return 0;
}
/*!
* \brief boxaFillSequence()
*
* \param[in] boxas with at least 3 boxes
* \param[in] useflag L_USE_ALL_BOXES, L_USE_SAME_PARITY_BOXES
* \param[in] debug 1 for debug output
* \return boxad filled boxa, or NULL on error
*
* <pre>
* Notes:
* (1) This simple function replaces invalid boxes with a copy of
* the nearest valid box, selected from either the entire
* sequence (L_USE_ALL_BOXES) or from the boxes with the
* same parity (L_USE_SAME_PARITY_BOXES). It returns a new boxa.
* (2) This is useful if you expect boxes in the sequence to
* vary slowly with index.
* </pre>
*/
BOXA *
boxaFillSequence(BOXA *boxas,
l_int32 useflag,
l_int32 debug)
{
l_int32 n, nv;
BOXA *boxae, *boxao, *boxad;
PROCNAME("boxaFillSequence");
if (!boxas)
return (BOXA *)ERROR_PTR("boxas not defined", procName, NULL);
if (useflag != L_USE_ALL_BOXES && useflag != L_USE_SAME_PARITY_BOXES)
return (BOXA *)ERROR_PTR("invalid useflag", procName, NULL);
n = boxaGetCount(boxas);
nv = boxaGetValidCount(boxas);
if (n == nv)
return boxaCopy(boxas, L_COPY); /* all valid */
if (debug)
L_INFO("%d valid boxes, %d invalid boxes\n", procName, nv, n - nv);
if (useflag == L_USE_SAME_PARITY_BOXES && n < 3) {
L_WARNING("n < 3; some invalid\n", procName);
return boxaCopy(boxas, L_COPY);
}
if (useflag == L_USE_ALL_BOXES) {
boxad = boxaCopy(boxas, L_COPY);
boxaFillAll(boxad);
} else {
boxaSplitEvenOdd(boxas, 0, &boxae, &boxao);
boxaFillAll(boxae);
boxaFillAll(boxao);
boxad = boxaMergeEvenOdd(boxae, boxao, 0);
boxaDestroy(&boxae);
boxaDestroy(&boxao);
}
nv = boxaGetValidCount(boxad);
if (n != nv)
L_WARNING("there are still %d invalid boxes\n", procName, n - nv);
return boxad;
}
/*!
* \brief boxaFillAll()
*
* \param[in] boxa
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) This static function replaces every invalid box with the
* nearest valid box. If there are no valid boxes, it
* issues a warning.
* </pre>
*/
static l_int32
boxaFillAll(BOXA *boxa)
{
l_int32 n, nv, i, j, spandown, spanup;
l_int32 *indic;
BOX *box, *boxt;
PROCNAME("boxaFillAll");
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
n = boxaGetCount(boxa);
nv = boxaGetValidCount(boxa);
if (n == nv) return 0;
if (nv == 0) {
L_WARNING("no valid boxes out of %d boxes\n", procName, n);
return 0;
}
/* Make indicator array for valid boxes */
if ((indic = (l_int32 *)LEPT_CALLOC(n, sizeof(l_int32))) == NULL)
return ERROR_INT("indic not made", procName, 1);
for (i = 0; i < n; i++) {
box = boxaGetValidBox(boxa, i, L_CLONE);
if (box)
indic[i] = 1;
boxDestroy(&box);
}
/* Replace invalid boxes with the nearest valid one */
for (i = 0; i < n; i++) {
box = boxaGetValidBox(boxa, i, L_CLONE);
if (!box) {
spandown = spanup = 10000000;
for (j = i - 1; j >= 0; j--) {
if (indic[j] == 1) {
spandown = i - j;
break;
}
}
for (j = i + 1; j < n; j++) {
if (indic[j] == 1) {
spanup = j - i;
break;
}
}
if (spandown < spanup)
boxt = boxaGetBox(boxa, i - spandown, L_COPY);
else
boxt = boxaGetBox(boxa, i + spanup, L_COPY);
boxaReplaceBox(boxa, i, boxt);
}
boxDestroy(&box);
}
LEPT_FREE(indic);
return 0;
}
/*---------------------------------------------------------------------*
* Miscellaneous Boxa functions *
*---------------------------------------------------------------------*/
/*!
* \brief boxaGetExtent()
*
* \param[in] boxa
* \param[out] pw [optional] width
* \param[out] ph [optional] height
* \param[out] pbox [optional] minimum box containing all boxes
* in boxa
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) The returned w and h are the minimum size image
* that would contain all boxes untranslated.
* (2) If there are no valid boxes, returned w and h are 0 and
* all parameters in the returned box are 0. This
* is not an error, because an empty boxa is valid and
* boxaGetExtent() is required for serialization.
* </pre>
*/
l_int32
boxaGetExtent(BOXA *boxa,
l_int32 *pw,
l_int32 *ph,
BOX **pbox)
{
l_int32 i, n, x, y, w, h, xmax, ymax, xmin, ymin, found;
PROCNAME("boxaGetExtent");
if (!pw && !ph && !pbox)
return ERROR_INT("no ptrs defined", procName, 1);
if (pw) *pw = 0;
if (ph) *ph = 0;
if (pbox) *pbox = NULL;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
n = boxaGetCount(boxa);
xmax = ymax = 0;
xmin = ymin = 100000000;
found = FALSE;
for (i = 0; i < n; i++) {
boxaGetBoxGeometry(boxa, i, &x, &y, &w, &h);
if (w <= 0 || h <= 0)
continue;
found = TRUE;
xmin = L_MIN(xmin, x);
ymin = L_MIN(ymin, y);
xmax = L_MAX(xmax, x + w);
ymax = L_MAX(ymax, y + h);
}
if (found == FALSE) /* no valid boxes in boxa */
xmin = ymin = 0;
if (pw) *pw = xmax;
if (ph) *ph = ymax;
if (pbox)
*pbox = boxCreate(xmin, ymin, xmax - xmin, ymax - ymin);
return 0;
}
/*!
* \brief boxaGetCoverage()
*
* \param[in] boxa
* \param[in] wc, hc dimensions of overall clipping rectangle with UL
* corner at (0, 0 that is covered by the boxes.
* \param[in] exactflag 1 for guaranteeing an exact result; 0 for getting
* an exact result only if the boxes do not overlap
* \param[out] pfract sum of box area as fraction of w * h
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) The boxes in boxa are clipped to the input rectangle.
* (2) * When %exactflag == 1, we generate a 1 bpp pix of size
* wc x hc, paint all the boxes black, and count the fg pixels.
* This can take 1 msec on a large page with many boxes.
* * When %exactflag == 0, we clip each box to the wc x hc region
* and sum the resulting areas. This is faster.
* * The results are the same when none of the boxes overlap
* within the wc x hc region.
* </pre>
*/
l_int32
boxaGetCoverage(BOXA *boxa,
l_int32 wc,
l_int32 hc,
l_int32 exactflag,
l_float32 *pfract)
{
l_int32 i, n, x, y, w, h, sum;
BOX *box, *boxc;
PIX *pixt;
PROCNAME("boxaGetCoverage");
if (!pfract)
return ERROR_INT("&fract not defined", procName, 1);
*pfract = 0.0;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
n = boxaGetCount(boxa);
if (n == 0)
return ERROR_INT("no boxes in boxa", procName, 1);
if (exactflag == 0) { /* quick and dirty */
sum = 0;
for (i = 0; i < n; i++) {
box = boxaGetBox(boxa, i, L_CLONE);
if ((boxc = boxClipToRectangle(box, wc, hc)) != NULL) {
boxGetGeometry(boxc, NULL, NULL, &w, &h);
sum += w * h;
boxDestroy(&boxc);
}
boxDestroy(&box);
}
} else { /* slower and exact */
pixt = pixCreate(wc, hc, 1);
for (i = 0; i < n; i++) {
box = boxaGetBox(boxa, i, L_CLONE);
boxGetGeometry(box, &x, &y, &w, &h);
pixRasterop(pixt, x, y, w, h, PIX_SET, NULL, 0, 0);
boxDestroy(&box);
}
pixCountPixels(pixt, &sum, NULL);
pixDestroy(&pixt);
}
*pfract = (l_float32)sum / (l_float32)(wc * hc);
return 0;
}
/*!
* \brief boxaaSizeRange()
*
* \param[in] baa
* \param[out] pminw, pminh, pmaxw, pmaxh [optional] range of
* dimensions of all boxes
* \return 0 if OK, 1 on error
*/
l_int32
boxaaSizeRange(BOXAA *baa,
l_int32 *pminw,
l_int32 *pminh,
l_int32 *pmaxw,
l_int32 *pmaxh)
{
l_int32 minw, minh, maxw, maxh, minbw, minbh, maxbw, maxbh, i, n;
BOXA *boxa;
PROCNAME("boxaaSizeRange");
if (!pminw && !pmaxw && !pminh && !pmaxh)
return ERROR_INT("no data can be returned", procName, 1);
if (pminw) *pminw = 0;
if (pminh) *pminh = 0;
if (pmaxw) *pmaxw = 0;
if (pmaxh) *pmaxh = 0;
if (!baa)
return ERROR_INT("baa not defined", procName, 1);
minw = minh = 100000000;
maxw = maxh = 0;
n = boxaaGetCount(baa);
for (i = 0; i < n; i++) {
boxa = boxaaGetBoxa(baa, i, L_CLONE);
boxaSizeRange(boxa, &minbw, &minbh, &maxbw, &maxbh);
if (minbw < minw)
minw = minbw;
if (minbh < minh)
minh = minbh;
if (maxbw > maxw)
maxw = maxbw;
if (maxbh > maxh)
maxh = maxbh;
boxaDestroy(&boxa);
}
if (pminw) *pminw = minw;
if (pminh) *pminh = minh;
if (pmaxw) *pmaxw = maxw;
if (pmaxh) *pmaxh = maxh;
return 0;
}
/*!
* \brief boxaSizeRange()
*
* \param[in] boxa
* \param[out] pminw, pminh, pmaxw, pmaxh [optional] range of
* dimensions of box in the array
* \return 0 if OK, 1 on error
*/
l_int32
boxaSizeRange(BOXA *boxa,
l_int32 *pminw,
l_int32 *pminh,
l_int32 *pmaxw,
l_int32 *pmaxh)
{
l_int32 minw, minh, maxw, maxh, i, n, w, h;
PROCNAME("boxaSizeRange");
if (!pminw && !pmaxw && !pminh && !pmaxh)
return ERROR_INT("no data can be returned", procName, 1);
if (pminw) *pminw = 0;
if (pminh) *pminh = 0;
if (pmaxw) *pmaxw = 0;
if (pmaxh) *pmaxh = 0;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
minw = minh = 100000000;
maxw = maxh = 0;
n = boxaGetCount(boxa);
for (i = 0; i < n; i++) {
boxaGetBoxGeometry(boxa, i, NULL, NULL, &w, &h);
if (w < minw)
minw = w;
if (h < minh)
minh = h;
if (w > maxw)
maxw = w;
if (h > maxh)
maxh = h;
}
if (pminw) *pminw = minw;
if (pminh) *pminh = minh;
if (pmaxw) *pmaxw = maxw;
if (pmaxh) *pmaxh = maxh;
return 0;
}
/*!
* \brief boxaLocationRange()
*
* \param[in] boxa
* \param[out] pminx, pminy, pmaxx, pmaxy [optional] range of
* UL corner positions
* \return 0 if OK, 1 on error
*/
l_int32
boxaLocationRange(BOXA *boxa,
l_int32 *pminx,
l_int32 *pminy,
l_int32 *pmaxx,
l_int32 *pmaxy)
{
l_int32 minx, miny, maxx, maxy, i, n, x, y;
PROCNAME("boxaLocationRange");
if (!pminx && !pminy && !pmaxx && !pmaxy)
return ERROR_INT("no data can be returned", procName, 1);
if (pminx) *pminx = 0;
if (pminy) *pminy = 0;
if (pmaxx) *pmaxx = 0;
if (pmaxy) *pmaxy = 0;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
minx = miny = 100000000;
maxx = maxy = 0;
n = boxaGetCount(boxa);
for (i = 0; i < n; i++) {
boxaGetBoxGeometry(boxa, i, &x, &y, NULL, NULL);
if (x < minx)
minx = x;
if (y < miny)
miny = y;
if (x > maxx)
maxx = x;
if (y > maxy)
maxy = y;
}
if (pminx) *pminx = minx;
if (pminy) *pminy = miny;
if (pmaxx) *pmaxx = maxx;
if (pmaxy) *pmaxy = maxy;
return 0;
}
/*!
* \brief boxaGetSizes()
*
* \param[in] boxa
* \param[out] pnaw, pnah [optional] widths and heights of valid boxes
* \return 0 if OK, 1 on error
*/
l_int32
boxaGetSizes(BOXA *boxa,
NUMA **pnaw,
NUMA **pnah)
{
l_int32 i, n, w, h;
BOX *box;
PROCNAME("boxaGetSizes");
if (pnaw) *pnaw = NULL;
if (pnah) *pnah = NULL;
if (!pnaw && !pnah)
return ERROR_INT("no output requested", procName, 1);
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
n = boxaGetValidCount(boxa);
if (pnaw) *pnaw = numaCreate(n);
if (pnah) *pnah = numaCreate(n);
for (i = 0; i < n; i++) {
box = boxaGetValidBox(boxa, i, L_COPY);
if (box) {
boxGetGeometry(box, NULL, NULL, &w, &h);
if (pnaw) numaAddNumber(*pnaw, w);
if (pnah) numaAddNumber(*pnah, h);
boxDestroy(&box);
}
}
return 0;
}
/*!
* \brief boxaGetArea()
*
* \param[in] boxa
* \param[out] parea total area of all boxes
* \return 0 if OK, 1 on error
*
* <pre>
* Notes:
* (1) Measures the total area of the boxes, without regard to overlaps.
* </pre>
*/
l_int32
boxaGetArea(BOXA *boxa,
l_int32 *parea)
{
l_int32 i, n, w, h;
PROCNAME("boxaGetArea");
if (!parea)
return ERROR_INT("&area not defined", procName, 1);
*parea = 0;
if (!boxa)
return ERROR_INT("boxa not defined", procName, 1);
n = boxaGetCount(boxa);
for (i = 0; i < n; i++) {
boxaGetBoxGeometry(boxa, i, NULL, NULL, &w, &h);
*parea += w * h;
}
return 0;
}
/*!
* \brief boxaDisplayTiled()
*
* \param[in] boxas
* \param[in] pixa [optional] background for each box
* \param[in] maxwidth of output image
* \param[in] linewidth width of box outlines, before scaling
* \param[in] scalefactor applied to every box; use 1.0 for no scaling
* \param[in] background 0 for white, 1 for black; this is the color
* of the spacing between the images
* \param[in] spacing between images, and on outside
* \param[in] border width of black border added to each image;
* use 0 for no border
* \return pixd of tiled images of boxes, or NULL on error
*
* <pre>
* Notes:
* (1) Displays each box separately in a tiled 32 bpp image.
* (2) If pixa is defined, it must have the same count as the boxa,
* and it will be a background over with each box is rendered.
* If pixa is not defined, the boxes will be rendered over
* blank images of identical size.
* (3) See pixaDisplayTiledInRows() for other parameters.
* </pre>
*/
PIX *
boxaDisplayTiled(BOXA *boxas,
PIXA *pixa,
l_int32 maxwidth,
l_int32 linewidth,
l_float32 scalefactor,
l_int32 background,
l_int32 spacing,
l_int32 border)
{
char buf[32];
l_int32 i, n, npix, w, h, fontsize;
L_BMF *bmf;
BOX *box;
BOXA *boxa;
PIX *pix1, *pix2, *pixd;
PIXA *pixat;
PROCNAME("boxaDisplayTiled");
if (!boxas)
return (PIX *)ERROR_PTR("boxas not defined", procName, NULL);
boxa = boxaSaveValid(boxas, L_COPY);
n = boxaGetCount(boxa);
if (pixa) {
npix = pixaGetCount(pixa);
if (n != npix) {
boxaDestroy(&boxa);
return (PIX *)ERROR_PTR("boxa and pixa counts differ",
procName, NULL);
}
}
/* Because the bitmap font will be reduced when tiled, choose the
* font size inversely with the scale factor. */
if (scalefactor > 0.8)
fontsize = 6;
else if (scalefactor > 0.6)
fontsize = 10;
else if (scalefactor > 0.4)
fontsize = 14;
else if (scalefactor > 0.3)
fontsize = 18;
else fontsize = 20;
bmf = bmfCreate(NULL, fontsize);
pixat = pixaCreate(n);
boxaGetExtent(boxa, &w, &h, NULL);
for (i = 0; i < n; i++) {
box = boxaGetBox(boxa, i, L_CLONE);
if (!pixa) {
pix1 = pixCreate(w, h, 32);
pixSetAll(pix1);
} else {
pix1 = pixaGetPix(pixa, i, L_COPY);
}
pixSetBorderVal(pix1, 0, 0, 0, 2, 0x0000ff00);
snprintf(buf, sizeof(buf), "%d", i);
pix2 = pixAddSingleTextblock(pix1, bmf, buf, 0x00ff0000,
L_ADD_BELOW, NULL);
pixDestroy(&pix1);
pixRenderBoxArb(pix2, box, linewidth, 255, 0, 0);
pixaAddPix(pixat, pix2, L_INSERT);
boxDestroy(&box);
}
bmfDestroy(&bmf);
boxaDestroy(&boxa);
pixd = pixaDisplayTiledInRows(pixat, 32, maxwidth, scalefactor, background,
spacing, border);
pixaDestroy(&pixat);
return pixd;
}
| {
"content_hash": "7347ddc847b2cea02beffff28fa2b5dc",
"timestamp": "",
"source": "github",
"line_count": 2744,
"max_line_length": 80,
"avg_line_length": 33.69606413994169,
"alnum_prop": 0.5428175899288357,
"repo_name": "renard314/textfairy",
"id": "233df4b32c8120ce9553478dd2994d233c7563f4",
"size": "93976",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/libs/leptonica/leptonica/src/boxfunc4.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "29869"
},
{
"name": "Batchfile",
"bytes": "125"
},
{
"name": "C",
"bytes": "14662896"
},
{
"name": "C++",
"bytes": "8614900"
},
{
"name": "CMake",
"bytes": "72361"
},
{
"name": "Dockerfile",
"bytes": "450"
},
{
"name": "HTML",
"bytes": "513192"
},
{
"name": "Java",
"bytes": "636382"
},
{
"name": "Kotlin",
"bytes": "31552"
},
{
"name": "M4",
"bytes": "5645"
},
{
"name": "Makefile",
"bytes": "64144"
},
{
"name": "Module Management System",
"bytes": "13253"
},
{
"name": "Objective-C",
"bytes": "1501"
},
{
"name": "Perl",
"bytes": "6181"
},
{
"name": "PostScript",
"bytes": "3630"
},
{
"name": "Python",
"bytes": "18946"
},
{
"name": "Roff",
"bytes": "78198"
},
{
"name": "SAS",
"bytes": "13756"
},
{
"name": "Shell",
"bytes": "494839"
},
{
"name": "Smalltalk",
"bytes": "6201"
},
{
"name": "TeX",
"bytes": "2741"
},
{
"name": "WebAssembly",
"bytes": "13560"
}
],
"symlink_target": ""
} |
if (!window.InspectorExtensionRegistry) {
/**
* @unrestricted
*/
Extensions.InspectorExtensionRegistryStub = class {
getExtensionsAsync() {
}
};
self.InspectorExtensionRegistry = new Extensions.InspectorExtensionRegistryStub();
}
| {
"content_hash": "b8ccf29bba2ca0d24910d000b6213d55",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 84,
"avg_line_length": 21.166666666666668,
"alnum_prop": 0.7165354330708661,
"repo_name": "weexteam/weex-toolkit",
"id": "54c182444870daa46a9023894f861c4d2a0d12f0",
"size": "1816",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "packages/@weex/plugins/debug/frontend/src/assets/inspector/extensions/ExtensionRegistryStub.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "510216"
},
{
"name": "HTML",
"bytes": "60414"
},
{
"name": "JavaScript",
"bytes": "10092059"
},
{
"name": "Python",
"bytes": "3359"
},
{
"name": "Shell",
"bytes": "230"
},
{
"name": "TypeScript",
"bytes": "584132"
}
],
"symlink_target": ""
} |
<?php
namespace Chamilo\Core\Repository\ContentObject\Survey\Page\Question\Choice\Common\Rendition\Html;
use Chamilo\Libraries\Platform\Translation;
/**
*
* @package repository.content_object.survey_matrix_question
* @author Eduard Vossen
* @author Magali Gillard
* @author Hans De Bisschop
*/
class HtmlFormRenditionImplementation extends \Chamilo\Core\Repository\ContentObject\Survey\Page\Question\Common\Rendition\Html\HtmlFormRenditionImplementation
{
/**
*
* @return \Chamilo\Libraries\Format\Form\FormValidator
*/
public function initialize()
{
$formValidator = parent::initialize();
$renderer = $formValidator->get_renderer();
$question = $this->get_content_object();
$questionId = $this->getQuestionId();
$options = $question->getOptions();
$table_header = array();
$table_header[] = '<div >';
$table_header[] = '<table class="table table-striped table-bordered table-hover table-data take_survey">';
$table_header[] = '<thead>';
$table_header[] = '<tr>';
$table_header[] = '<th></th>';
$table_header[] = '<th class="info" >' . Translation::get('SelectYourChoice') . '</th>';
$table_header[] = '</tr>';
$table_header[] = '</thead>';
$table_header[] = '<tbody>';
$formValidator->addElement('html', implode(PHP_EOL, $table_header));
$attributes = $this->getAttributes();
if ($this->getPrefix())
{
$option_name = $this->getPrefix() . '_' . $questionId;
}
else
{
$option_name = $questionId;
}
foreach ($options as $i => $option)
{
$group = array();
$radio = $formValidator->createElement('radio', $option_name, null, null, $i, $attributes);
$group[] = $radio;
$group[] = $formValidator->createElement(
'static',
null,
null,
'<div style="text-align: left;">' . $option . '</div>');
$formValidator->addGroup($group, 'choice_option_' . $i, null, '', false);
$renderer->setElementTemplate(
'<tr class="' . ($i % 2 == 0 ? 'row_even' : 'row_odd') . '">{element}</tr>',
'choice_option_' . $i);
$renderer->setGroupElementTemplate('<td style="text-align: center;">{element}</td>', 'choice_option_' . $i);
}
$table_footer[] = '</tbody>';
$table_footer[] = '</table>';
$table_footer[] = '</div>';
$formValidator->addElement('html', implode(PHP_EOL, $table_footer));
return $formValidator;
}
} | {
"content_hash": "0199b79181f87febba214a3e0acb5b0b",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 159,
"avg_line_length": 34.67901234567901,
"alnum_prop": 0.5286578853684585,
"repo_name": "cosnicsTHLU/cosnics",
"id": "b2eaf0e96e11003c20bcd202d0d433cd43f8b0ac",
"size": "2809",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/Chamilo/Core/Repository/ContentObject/Survey/Page/Question/Choice/Common/Rendition/Html/HtmlFormRenditionImplementation.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "86189"
},
{
"name": "C#",
"bytes": "23363"
},
{
"name": "CSS",
"bytes": "1135928"
},
{
"name": "CoffeeScript",
"bytes": "17503"
},
{
"name": "Gherkin",
"bytes": "24033"
},
{
"name": "HTML",
"bytes": "542339"
},
{
"name": "JavaScript",
"bytes": "5296016"
},
{
"name": "Makefile",
"bytes": "3221"
},
{
"name": "PHP",
"bytes": "21903304"
},
{
"name": "Ruby",
"bytes": "618"
},
{
"name": "Shell",
"bytes": "6385"
},
{
"name": "Smarty",
"bytes": "15750"
},
{
"name": "XSLT",
"bytes": "44115"
}
],
"symlink_target": ""
} |
package rc.championship.decoder.explorer;
import javax.swing.ActionMap;
import org.netbeans.api.settings.ConvertAsProperties;
import org.openide.awt.ActionID;
import org.openide.awt.ActionReference;
import org.openide.explorer.ExplorerManager;
import org.openide.explorer.ExplorerUtils;
import org.openide.explorer.view.BeanTreeView;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.TopComponent;
/**
* Top component which displays something.
*/
@ConvertAsProperties(
dtd = "-//rc.championship.decoder.explorer//DecoderExplorer//EN",
autostore = false
)
@TopComponent.Description(
preferredID = "DecoderExplorerTopComponent",
//iconBase="SET/PATH/TO/ICON/HERE",
persistenceType = TopComponent.PERSISTENCE_ALWAYS
)
@TopComponent.Registration(mode = "explorer", openAtStartup = true)
@ActionID(category = "Window", id = "rc.championship.decoder.explorer.DecoderExplorerTopComponent")
@ActionReference(path = "Menu/Window" /*, position = 333 */)
@TopComponent.OpenActionRegistration(
displayName = "#CTL_DecoderExplorerAction",
preferredID = "DecoderExplorerTopComponent"
)
@Messages({
"CTL_DecoderExplorerAction=DecoderExplorer",
"CTL_DecoderExplorerTopComponent=DecoderExplorer Window",
"HINT_DecoderExplorerTopComponent=This is a DecoderExplorer window"
})
public final class DecoderExplorerTopComponent extends TopComponent implements ExplorerManager.Provider {
private transient ExplorerManager explorerManager = new ExplorerManager();
public DecoderExplorerTopComponent() {
initComponents();
setName(Bundle.CTL_DecoderExplorerTopComponent());
setToolTipText(Bundle.HINT_DecoderExplorerTopComponent());
ActionMap actionMap = getActionMap();
actionMap.put("delete", ExplorerUtils.actionDelete(explorerManager, true)); // NOI18N
associateLookup(ExplorerUtils.createLookup(explorerManager, actionMap));
DecoderFactory decoderFactory = new DecoderFactory();
explorerManager.setRootContext(new DecoderRootNode(decoderFactory));
explorerManager.getRootContext().setDisplayName("Decoders");
}
public ExplorerManager getExplorerManager() {
return explorerManager;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
decoderPane = new BeanTreeView();
setLayout(new java.awt.BorderLayout());
add(decoderPane, java.awt.BorderLayout.CENTER);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JScrollPane decoderPane;
// End of variables declaration//GEN-END:variables
@Override
public void componentOpened() {
ExplorerUtils.activateActions(explorerManager, true);
}
@Override
public void componentClosed() {
ExplorerUtils.activateActions(explorerManager, false);
}
void writeProperties(java.util.Properties p) {
// better to version settings since initial version as advocated at
// http://wiki.apidesign.org/wiki/PropertyFiles
p.setProperty("version", "1.0");
// TODO store your settings
}
void readProperties(java.util.Properties p) {
String version = p.getProperty("version");
// TODO read your settings according to their version
}
}
| {
"content_hash": "0122631f77acbde61625d6cb75e21d40",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 105,
"avg_line_length": 37.597938144329895,
"alnum_prop": 0.72772141486153,
"repo_name": "softstuff/rc-championship-platform",
"id": "b8ac5a90a0d9d4c5e34e95cd3065a0c646d65abd",
"size": "3647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rc-championship-platform/rc-championship-decoders/decoder-control/src/main/java/rc/championship/decoder/explorer/DecoderExplorerTopComponent.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "162"
},
{
"name": "Java",
"bytes": "305103"
}
],
"symlink_target": ""
} |
package net.uiqui.oblivion.client.rest;
import java.io.IOException;
import java.net.URL;
import java.util.concurrent.TimeUnit;
import net.uiqui.oblivion.client.api.Cluster;
import com.squareup.okhttp.MediaType;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody;
import com.squareup.okhttp.Response;
public class RestClient {
public static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
private OkHttpClient client = null;
public RestClient(final Cluster cluster) {
client = new OkHttpClient();
client.setConnectTimeout(5, TimeUnit.SECONDS);
client.setWriteTimeout(5, TimeUnit.SECONDS);
client.setReadTimeout(5, TimeUnit.SECONDS);
client.interceptors().add(new RetryHandler(cluster));
}
public RestOutput put(final URL url, final String json) throws IOException {
RequestBody body = RequestBody.create(JSON, json);
Request request = new Request.Builder().url(url).put(body).build();
Response response = client.newCall(request).execute();
return RestOutput.parse(response);
}
public RestOutput post(final URL url, final String json) throws IOException {
RequestBody body = RequestBody.create(JSON, json);
Request request = new Request.Builder().url(url).post(body).build();
Response response = client.newCall(request).execute();
return RestOutput.parse(response);
}
public RestOutput get(final URL url) throws IOException {
Request request = new Request.Builder().url(url).build();
Response response = client.newCall(request).execute();
return RestOutput.parse(response);
}
public RestOutput delete(final URL url) throws IOException {
Request request = new Request.Builder().url(url).delete().build();
Response response = client.newCall(request).execute();
return RestOutput.parse(response);
}
public RestOutput head(final URL url) throws IOException {
Request request = new Request.Builder().url(url).head().build();
Response response = client.newCall(request).execute();
return RestOutput.parse(response);
}
}
| {
"content_hash": "3d055fa7c569083ac208f08cfacf829b",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 89,
"avg_line_length": 28.45945945945946,
"alnum_prop": 0.7502374169040835,
"repo_name": "0blivi0n/oblivion-rest-client-java",
"id": "aff760e195f58ad6da902bfb4c12483fd17e64c2",
"size": "2800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/net/uiqui/oblivion/client/rest/RestClient.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "43376"
}
],
"symlink_target": ""
} |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcType]{Types used in the typechecker}
This module provides the Type interface for front-end parts of the
compiler. These parts
* treat "source types" as opaque:
newtypes, and predicates are meaningful.
* look through usage types
The "tc" prefix is for "TypeChecker", because the type checker
is the principal client.
-}
{-# LANGUAGE CPP, MultiWayIf #-}
module TcType (
--------------------------------
-- Types
TcType, TcSigmaType, TcRhoType, TcTauType, TcPredType, TcThetaType,
TcTyVar, TcTyVarSet, TcDTyVarSet, TcTyCoVarSet, TcDTyCoVarSet,
TcKind, TcCoVar, TcTyCoVar, TcTyBinder, TcTyVarBinder, TcTyCon,
ExpType(..), ExpSigmaType, ExpRhoType, mkCheckExpType,
SyntaxOpType(..), synKnownType, mkSynFunTys,
-- TcLevel
TcLevel(..), topTcLevel, pushTcLevel, isTopTcLevel,
strictlyDeeperThan, sameDepthAs, fmvTcLevel,
--------------------------------
-- MetaDetails
UserTypeCtxt(..), pprUserTypeCtxt, isSigMaybe,
TcTyVarDetails(..), pprTcTyVarDetails, vanillaSkolemTv, superSkolemTv,
MetaDetails(Flexi, Indirect), MetaInfo(..),
isImmutableTyVar, isSkolemTyVar, isMetaTyVar, isMetaTyVarTy, isTyVarTy,
isSigTyVar, isOverlappableTyVar, isTyConableTyVar,
isFskTyVar, isFmvTyVar, isFlattenTyVar,
isAmbiguousTyVar, metaTvRef, metaTyVarInfo,
isFlexi, isIndirect, isRuntimeUnkSkol,
metaTyVarTcLevel, setMetaTyVarTcLevel, metaTyVarTcLevel_maybe,
isTouchableMetaTyVar, isTouchableOrFmv,
isFloatedTouchableMetaTyVar,
canUnifyWithPolyType,
--------------------------------
-- Builders
mkPhiTy, mkInfSigmaTy, mkSpecSigmaTy, mkSigmaTy,
mkNakedTyConApp, mkNakedAppTys, mkNakedAppTy,
mkNakedCastTy,
--------------------------------
-- Splitters
-- These are important because they do not look through newtypes
getTyVar,
tcSplitForAllTy_maybe,
tcSplitForAllTys, tcSplitPiTys, tcSplitForAllTyVarBndrs,
tcSplitPhiTy, tcSplitPredFunTy_maybe,
tcSplitFunTy_maybe, tcSplitFunTys, tcFunArgTy, tcFunResultTy, tcSplitFunTysN,
tcSplitTyConApp, tcSplitTyConApp_maybe, tcRepSplitTyConApp_maybe,
tcTyConAppTyCon, tcTyConAppArgs,
tcSplitAppTy_maybe, tcSplitAppTy, tcSplitAppTys, tcRepSplitAppTy_maybe,
tcGetTyVar_maybe, tcGetTyVar, nextRole,
tcSplitSigmaTy, tcDeepSplitSigmaTy_maybe,
---------------------------------
-- Predicates.
-- Again, newtypes are opaque
eqType, eqTypes, nonDetCmpType, nonDetCmpTypes, eqTypeX,
pickyEqType, tcEqType, tcEqKind, tcEqTypeNoKindCheck, tcEqTypeVis,
isSigmaTy, isRhoTy, isRhoExpTy, isOverloadedTy,
isFloatingTy, isDoubleTy, isFloatTy, isIntTy, isWordTy, isStringTy,
isIntegerTy, isBoolTy, isUnitTy, isCharTy, isCallStackTy, isCallStackPred,
isTauTy, isTauTyCon, tcIsTyVarTy, tcIsForAllTy,
isPredTy, isTyVarClassPred, isTyVarExposed, isTyVarUnderDatatype,
checkValidClsArgs, hasTyVarHead,
isRigidEqPred, isRigidTy,
---------------------------------
-- Misc type manipulators
deNoteType, occurCheckExpand, OccCheckResult(..),
orphNamesOfType, orphNamesOfCo,
orphNamesOfTypes, orphNamesOfCoCon,
getDFunTyKey,
evVarPred_maybe, evVarPred,
---------------------------------
-- Predicate types
mkMinimalBySCs, transSuperClasses,
pickQuantifiablePreds, pickCapturedPreds,
immSuperClasses,
isImprovementPred,
-- * Finding type instances
tcTyFamInsts,
-- * Finding "exact" (non-dead) type variables
exactTyCoVarsOfType, exactTyCoVarsOfTypes,
splitDepVarsOfType, splitDepVarsOfTypes, TcDepVars(..), tcDepVarSet,
-- * Extracting bound variables
allBoundVariables, allBoundVariabless,
---------------------------------
-- Foreign import and export
isFFIArgumentTy, -- :: DynFlags -> Safety -> Type -> Bool
isFFIImportResultTy, -- :: DynFlags -> Type -> Bool
isFFIExportResultTy, -- :: Type -> Bool
isFFIExternalTy, -- :: Type -> Bool
isFFIDynTy, -- :: Type -> Type -> Bool
isFFIPrimArgumentTy, -- :: DynFlags -> Type -> Bool
isFFIPrimResultTy, -- :: DynFlags -> Type -> Bool
isFFILabelTy, -- :: Type -> Bool
isFFITy, -- :: Type -> Bool
isFunPtrTy, -- :: Type -> Bool
tcSplitIOType_maybe, -- :: Type -> Maybe Type
--------------------------------
-- Rexported from Kind
Kind, typeKind,
liftedTypeKind,
constraintKind,
isLiftedTypeKind, isUnliftedTypeKind, classifiesTypeWithValues,
--------------------------------
-- Rexported from Type
Type, PredType, ThetaType, TyBinder, ArgFlag(..),
mkForAllTy, mkForAllTys, mkInvForAllTys, mkSpecForAllTys, mkInvForAllTy,
mkFunTy, mkFunTys,
mkTyConApp, mkAppTy, mkAppTys,
mkTyConTy, mkTyVarTy,
mkTyVarTys,
isClassPred, isEqPred, isNomEqPred, isIPPred,
mkClassPred,
isDictLikeTy,
tcSplitDFunTy, tcSplitDFunHead, tcSplitMethodTy,
isRuntimeRepVar, isRuntimeRepPolymorphic,
isVisibleBinder, isInvisibleBinder,
-- Type substitutions
TCvSubst(..), -- Representation visible to a few friends
TvSubstEnv, emptyTCvSubst,
zipTvSubst,
mkTvSubstPrs, notElemTCvSubst, unionTCvSubst,
getTvSubstEnv, setTvSubstEnv, getTCvInScope, extendTCvInScope,
extendTCvInScopeList, extendTCvInScopeSet, extendTvSubstAndInScope,
Type.lookupTyVar, Type.extendTCvSubst, Type.substTyVarBndr,
Type.extendTvSubst,
isInScope, mkTCvSubst, mkTvSubst, zipTyEnv, zipCoEnv,
Type.substTy, substTys, substTyWith, substTyWithCoVars,
substTyAddInScope,
substTyUnchecked, substTysUnchecked, substThetaUnchecked,
substTyWithUnchecked,
substCoUnchecked, substCoWithUnchecked,
substTheta,
isUnliftedType, -- Source types are always lifted
isUnboxedTupleType, -- Ditto
isPrimitiveType,
coreView,
tyCoVarsOfType, tyCoVarsOfTypes, closeOverKinds,
tyCoFVsOfType, tyCoFVsOfTypes,
tyCoVarsOfTypeDSet, tyCoVarsOfTypesDSet, closeOverKindsDSet,
tyCoVarsOfTypeList, tyCoVarsOfTypesList,
--------------------------------
-- Transforming Types to TcTypes
toTcType, -- :: Type -> TcType
toTcTypeBag, -- :: Bag EvVar -> Bag EvVar
pprKind, pprParendKind, pprSigmaType,
pprType, pprParendType, pprTypeApp, pprTyThingCategory,
pprTheta, pprThetaArrowTy, pprClassPred,
pprTvBndr, pprTvBndrs,
TypeSize, sizeType, sizeTypes, toposortTyVars
) where
#include "HsVersions.h"
-- friends:
import Kind
import TyCoRep
import Class
import Var
import ForeignCall
import VarSet
import Coercion
import Type
import TyCon
-- others:
import DynFlags
import CoreFVs
import Name -- hiding (varName)
-- We use this to make dictionaries for type literals.
-- Perhaps there's a better way to do this?
import NameSet
import VarEnv
import PrelNames
import TysWiredIn( coercibleClass, unitTyCon, unitTyConKey
, listTyCon, constraintKind )
import BasicTypes
import Util
import Bag
import Maybes
import Pair( pFst )
import Outputable
import FastString
import ErrUtils( Validity(..), MsgDoc, isValid )
import FV
import qualified GHC.LanguageExtensions as LangExt
import Data.IORef
import Control.Monad (liftM, ap)
import Data.Functor.Identity
{-
************************************************************************
* *
Types
* *
************************************************************************
The type checker divides the generic Type world into the
following more structured beasts:
sigma ::= forall tyvars. phi
-- A sigma type is a qualified type
--
-- Note that even if 'tyvars' is empty, theta
-- may not be: e.g. (?x::Int) => Int
-- Note that 'sigma' is in prenex form:
-- all the foralls are at the front.
-- A 'phi' type has no foralls to the right of
-- an arrow
phi :: theta => rho
rho ::= sigma -> rho
| tau
-- A 'tau' type has no quantification anywhere
-- Note that the args of a type constructor must be taus
tau ::= tyvar
| tycon tau_1 .. tau_n
| tau_1 tau_2
| tau_1 -> tau_2
-- In all cases, a (saturated) type synonym application is legal,
-- provided it expands to the required form.
-}
type TcTyVar = TyVar -- Used only during type inference
type TcCoVar = CoVar -- Used only during type inference
type TcType = Type -- A TcType can have mutable type variables
type TcTyCoVar = Var -- Either a TcTyVar or a CoVar
-- Invariant on ForAllTy in TcTypes:
-- forall a. T
-- a cannot occur inside a MutTyVar in T; that is,
-- T is "flattened" before quantifying over a
type TcTyVarBinder = TyVarBinder
type TcTyBinder = TyBinder
type TcTyCon = TyCon -- these can be the TcTyCon constructor
-- These types do not have boxy type variables in them
type TcPredType = PredType
type TcThetaType = ThetaType
type TcSigmaType = TcType
type TcRhoType = TcType -- Note [TcRhoType]
type TcTauType = TcType
type TcKind = Kind
type TcTyVarSet = TyVarSet
type TcTyCoVarSet = TyCoVarSet
type TcDTyVarSet = DTyVarSet
type TcDTyCoVarSet = DTyCoVarSet
{- *********************************************************************
* *
ExpType: an "expected type" in the type checker
* *
********************************************************************* -}
-- | An expected type to check against during type-checking.
-- See Note [ExpType] in TcMType, where you'll also find manipulators.
data ExpType = Check TcType
| Infer Unique -- for debugging only
TcLevel -- See Note [TcLevel of ExpType] in TcMType
Kind
(IORef (Maybe TcType))
type ExpSigmaType = ExpType
type ExpRhoType = ExpType
instance Outputable ExpType where
ppr (Check ty) = ppr ty
ppr (Infer u lvl ki _)
= parens (text "Infer" <> braces (ppr u <> comma <> ppr lvl)
<+> dcolon <+> ppr ki)
-- | Make an 'ExpType' suitable for checking.
mkCheckExpType :: TcType -> ExpType
mkCheckExpType = Check
{- *********************************************************************
* *
SyntaxOpType
* *
********************************************************************* -}
-- | What to expect for an argument to a rebindable-syntax operator.
-- Quite like 'Type', but allows for holes to be filled in by tcSyntaxOp.
-- The callback called from tcSyntaxOp gets a list of types; the meaning
-- of these types is determined by a left-to-right depth-first traversal
-- of the 'SyntaxOpType' tree. So if you pass in
--
-- > SynAny `SynFun` (SynList `SynFun` SynType Int) `SynFun` SynAny
--
-- you'll get three types back: one for the first 'SynAny', the /element/
-- type of the list, and one for the last 'SynAny'. You don't get anything
-- for the 'SynType', because you've said positively that it should be an
-- Int, and so it shall be.
--
-- This is defined here to avoid defining it in TcExpr.hs-boot.
data SyntaxOpType
= SynAny -- ^ Any type
| SynRho -- ^ A rho type, deeply skolemised or instantiated as appropriate
| SynList -- ^ A list type. You get back the element type of the list
| SynFun SyntaxOpType SyntaxOpType
-- ^ A function.
| SynType ExpType -- ^ A known type.
infixr 0 `SynFun`
-- | Like 'SynType' but accepts a regular TcType
synKnownType :: TcType -> SyntaxOpType
synKnownType = SynType . mkCheckExpType
-- | Like 'mkFunTys' but for 'SyntaxOpType'
mkSynFunTys :: [SyntaxOpType] -> ExpType -> SyntaxOpType
mkSynFunTys arg_tys res_ty = foldr SynFun (SynType res_ty) arg_tys
{-
Note [TcRhoType]
~~~~~~~~~~~~~~~~
A TcRhoType has no foralls or contexts at the top, or to the right of an arrow
YES (forall a. a->a) -> Int
NO forall a. a -> Int
NO Eq a => a -> a
NO Int -> forall a. a -> Int
************************************************************************
* *
TyVarDetails, MetaDetails, MetaInfo
* *
************************************************************************
TyVarDetails gives extra info about type variables, used during type
checking. It's attached to mutable type variables only.
It's knot-tied back to Var.hs. There is no reason in principle
why Var.hs shouldn't actually have the definition, but it "belongs" here.
Note [Signature skolems]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
f :: forall a. [a] -> Int
f (x::b : xs) = 3
Here 'b' is a lexically scoped type variable, but it turns out to be
the same as the skolem 'a'. So we have a special kind of skolem
constant, SigTv, which can unify with other SigTvs. They are used
*only* for pattern type signatures.
Similarly consider
data T (a:k1) = MkT (S a)
data S (b:k2) = MkS (T b)
When doing kind inference on {S,T} we don't want *skolems* for k1,k2,
because they end up unifying; we want those SigTvs again.
Note [TyVars and TcTyVars during type checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The Var type has constructors TyVar and TcTyVar. They are used
as follows:
* TcTyVar: used /only/ during type checking. Should never appear
afterwards. May contain a mutable field, in the MetaTv case.
* TyVar: is never seen by the constraint solver, except locally
inside a type like (forall a. [a] ->[a]), where 'a' is a TyVar.
We instantiate these with TcTyVars before exposing the type
to the constraint solver.
I have swithered about the latter invariant, excluding TyVars from the
constraint solver. It's not strictly essential, and indeed
(historically but still there) Var.tcTyVarDetails returns
vanillaSkolemTv for a TyVar.
But ultimately I want to seeparate Type from TcType, and in that case
we would need to enforce the separation.
-}
-- A TyVarDetails is inside a TyVar
-- See Note [TyVars and TcTyVars]
data TcTyVarDetails
= SkolemTv -- A skolem
Bool -- True <=> this skolem type variable can be overlapped
-- when looking up instances
-- See Note [Binding when looking up instances] in InstEnv
| FlatSkol -- A flatten-skolem. It stands for the TcType, and zonking
TcType -- will replace it by that type.
-- See Note [The flattening story] in TcFlatten
| RuntimeUnk -- Stands for an as-yet-unknown type in the GHCi
-- interactive context
| MetaTv { mtv_info :: MetaInfo
, mtv_ref :: IORef MetaDetails
, mtv_tclvl :: TcLevel } -- See Note [TcLevel and untouchable type variables]
vanillaSkolemTv, superSkolemTv :: TcTyVarDetails
-- See Note [Binding when looking up instances] in InstEnv
vanillaSkolemTv = SkolemTv False -- Might be instantiated
superSkolemTv = SkolemTv True -- Treat this as a completely distinct type
-----------------------------
data MetaDetails
= Flexi -- Flexi type variables unify to become Indirects
| Indirect TcType
data MetaInfo
= TauTv -- This MetaTv is an ordinary unification variable
-- A TauTv is always filled in with a tau-type, which
-- never contains any ForAlls.
| SigTv -- A variant of TauTv, except that it should not be
-- unified with a type, only with a type variable
-- SigTvs are only distinguished to improve error messages
-- see Note [Signature skolems]
-- The MetaDetails, if filled in, will
-- always be another SigTv or a SkolemTv
| FlatMetaTv -- A flatten meta-tyvar
-- It is a meta-tyvar, but it is always untouchable, with level 0
-- See Note [The flattening story] in TcFlatten
instance Outputable MetaDetails where
ppr Flexi = text "Flexi"
ppr (Indirect ty) = text "Indirect" <+> ppr ty
pprTcTyVarDetails :: TcTyVarDetails -> SDoc
-- For debugging
pprTcTyVarDetails (SkolemTv True) = text "ssk"
pprTcTyVarDetails (SkolemTv False) = text "sk"
pprTcTyVarDetails (RuntimeUnk {}) = text "rt"
pprTcTyVarDetails (FlatSkol {}) = text "fsk"
pprTcTyVarDetails (MetaTv { mtv_info = info, mtv_tclvl = tclvl })
= pp_info <> colon <> ppr tclvl
where
pp_info = case info of
TauTv -> text "tau"
SigTv -> text "sig"
FlatMetaTv -> text "fuv"
{- *********************************************************************
* *
UserTypeCtxt
* *
********************************************************************* -}
-------------------------------------
-- UserTypeCtxt describes the origin of the polymorphic type
-- in the places where we need to an expression has that type
data UserTypeCtxt
= FunSigCtxt -- Function type signature, when checking the type
-- Also used for types in SPECIALISE pragmas
Name -- Name of the function
Bool -- True <=> report redundant constraints
-- This is usually True, but False for
-- * Record selectors (not important here)
-- * Class and instance methods. Here
-- the code may legitimately be more
-- polymorphic than the signature
-- generated from the class
-- declaration
| InfSigCtxt Name -- Inferred type for function
| ExprSigCtxt -- Expression type signature
| TypeAppCtxt -- Visible type application
| ConArgCtxt Name -- Data constructor argument
| TySynCtxt Name -- RHS of a type synonym decl
| PatSynCtxt Name -- Type sig for a pattern synonym
| PatSigCtxt -- Type sig in pattern
-- eg f (x::t) = ...
-- or (x::t, y) = e
| RuleSigCtxt Name -- LHS of a RULE forall
-- RULE "foo" forall (x :: a -> a). f (Just x) = ...
| ResSigCtxt -- Result type sig
-- f x :: t = ....
| ForSigCtxt Name -- Foreign import or export signature
| DefaultDeclCtxt -- Types in a default declaration
| InstDeclCtxt -- An instance declaration
| SpecInstCtxt -- SPECIALISE instance pragma
| ThBrackCtxt -- Template Haskell type brackets [t| ... |]
| GenSigCtxt -- Higher-rank or impredicative situations
-- e.g. (f e) where f has a higher-rank type
-- We might want to elaborate this
| GhciCtxt -- GHCi command :kind <type>
| ClassSCCtxt Name -- Superclasses of a class
| SigmaCtxt -- Theta part of a normal for-all type
-- f :: <S> => a -> a
| DataTyCtxt Name -- The "stupid theta" part of a data decl
-- data <S> => T a = MkT a
{-
-- Notes re TySynCtxt
-- We allow type synonyms that aren't types; e.g. type List = []
--
-- If the RHS mentions tyvars that aren't in scope, we'll
-- quantify over them:
-- e.g. type T = a->a
-- will become type T = forall a. a->a
--
-- With gla-exts that's right, but for H98 we should complain.
-}
pprUserTypeCtxt :: UserTypeCtxt -> SDoc
pprUserTypeCtxt (FunSigCtxt n _) = text "the type signature for" <+> quotes (ppr n)
pprUserTypeCtxt (InfSigCtxt n) = text "the inferred type for" <+> quotes (ppr n)
pprUserTypeCtxt (RuleSigCtxt n) = text "a RULE for" <+> quotes (ppr n)
pprUserTypeCtxt ExprSigCtxt = text "an expression type signature"
pprUserTypeCtxt TypeAppCtxt = text "a type argument"
pprUserTypeCtxt (ConArgCtxt c) = text "the type of the constructor" <+> quotes (ppr c)
pprUserTypeCtxt (TySynCtxt c) = text "the RHS of the type synonym" <+> quotes (ppr c)
pprUserTypeCtxt ThBrackCtxt = text "a Template Haskell quotation [t|...|]"
pprUserTypeCtxt PatSigCtxt = text "a pattern type signature"
pprUserTypeCtxt ResSigCtxt = text "a result type signature"
pprUserTypeCtxt (ForSigCtxt n) = text "the foreign declaration for" <+> quotes (ppr n)
pprUserTypeCtxt DefaultDeclCtxt = text "a type in a `default' declaration"
pprUserTypeCtxt InstDeclCtxt = text "an instance declaration"
pprUserTypeCtxt SpecInstCtxt = text "a SPECIALISE instance pragma"
pprUserTypeCtxt GenSigCtxt = text "a type expected by the context"
pprUserTypeCtxt GhciCtxt = text "a type in a GHCi command"
pprUserTypeCtxt (ClassSCCtxt c) = text "the super-classes of class" <+> quotes (ppr c)
pprUserTypeCtxt SigmaCtxt = text "the context of a polymorphic type"
pprUserTypeCtxt (DataTyCtxt tc) = text "the context of the data type declaration for" <+> quotes (ppr tc)
pprUserTypeCtxt (PatSynCtxt n) = text "the signature for pattern synonym" <+> quotes (ppr n)
isSigMaybe :: UserTypeCtxt -> Maybe Name
isSigMaybe (FunSigCtxt n _) = Just n
isSigMaybe (ConArgCtxt n) = Just n
isSigMaybe (ForSigCtxt n) = Just n
isSigMaybe (PatSynCtxt n) = Just n
isSigMaybe _ = Nothing
{- *********************************************************************
* *
Untoucable type variables
* *
********************************************************************* -}
newtype TcLevel = TcLevel Int deriving( Eq, Ord )
-- See Note [TcLevel and untouchable type variables] for what this Int is
-- See also Note [TcLevel assignment]
{-
Note [TcLevel and untouchable type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Each unification variable (MetaTv)
and each Implication
has a level number (of type TcLevel)
* INVARIANTS. In a tree of Implications,
(ImplicInv) The level number of an Implication is
STRICTLY GREATER THAN that of its parent
(MetaTvInv) The level number of a unification variable is
LESS THAN OR EQUAL TO that of its parent
implication
* A unification variable is *touchable* if its level number
is EQUAL TO that of its immediate parent implication.
* INVARIANT
(GivenInv) The free variables of the ic_given of an
implication are all untouchable; ie their level
numbers are LESS THAN the ic_tclvl of the implication
Note [Skolem escape prevention]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We only unify touchable unification variables. Because of
(MetaTvInv), there can be no occurrences of the variable further out,
so the unification can't cause the skolems to escape. Example:
data T = forall a. MkT a (a->Int)
f x (MkT v f) = length [v,x]
We decide (x::alpha), and generate an implication like
[1]forall a. (a ~ alpha[0])
But we must not unify alpha:=a, because the skolem would escape.
For the cases where we DO want to unify, we rely on floating the
equality. Example (with same T)
g x (MkT v f) = x && True
We decide (x::alpha), and generate an implication like
[1]forall a. (Bool ~ alpha[0])
We do NOT unify directly, bur rather float out (if the constraint
does not mention 'a') to get
(Bool ~ alpha[0]) /\ [1]forall a.()
and NOW we can unify alpha.
The same idea of only unifying touchables solves another problem.
Suppose we had
(F Int ~ uf[0]) /\ [1](forall a. C a => F Int ~ beta[1])
In this example, beta is touchable inside the implication. The
first solveSimpleWanteds step leaves 'uf' un-unified. Then we move inside
the implication where a new constraint
uf ~ beta
emerges. If we (wrongly) spontaneously solved it to get uf := beta,
the whole implication disappears but when we pop out again we are left with
(F Int ~ uf) which will be unified by our final zonking stage and
uf will get unified *once more* to (F Int).
Note [TcLevel assignment]
~~~~~~~~~~~~~~~~~~~~~~~~~
We arrange the TcLevels like this
1 Top level
2 Flatten-meta-vars of level 3
3 First-level implication constraints
4 Flatten-meta-vars of level 5
5 Second-level implication constraints
...etc...
The even-numbered levels are for the flatten-meta-variables assigned
at the next level in. Eg for a second-level implication conststraint
(level 5), the flatten meta-vars are level 4, which makes them untouchable.
The flatten meta-vars could equally well all have level 0, or just NotALevel
since they do not live across implications.
-}
fmvTcLevel :: TcLevel -> TcLevel
-- See Note [TcLevel assignment]
fmvTcLevel (TcLevel n) = TcLevel (n-1)
topTcLevel :: TcLevel
-- See Note [TcLevel assignment]
topTcLevel = TcLevel 1 -- 1 = outermost level
isTopTcLevel :: TcLevel -> Bool
isTopTcLevel (TcLevel 1) = True
isTopTcLevel _ = False
pushTcLevel :: TcLevel -> TcLevel
-- See Note [TcLevel assignment]
pushTcLevel (TcLevel us) = TcLevel (us + 2)
strictlyDeeperThan :: TcLevel -> TcLevel -> Bool
strictlyDeeperThan (TcLevel tv_tclvl) (TcLevel ctxt_tclvl)
= tv_tclvl > ctxt_tclvl
sameDepthAs :: TcLevel -> TcLevel -> Bool
sameDepthAs (TcLevel ctxt_tclvl) (TcLevel tv_tclvl)
= ctxt_tclvl == tv_tclvl -- NB: invariant ctxt_tclvl >= tv_tclvl
-- So <= would be equivalent
checkTcLevelInvariant :: TcLevel -> TcLevel -> Bool
-- Checks (MetaTvInv) from Note [TcLevel and untouchable type variables]
checkTcLevelInvariant (TcLevel ctxt_tclvl) (TcLevel tv_tclvl)
= ctxt_tclvl >= tv_tclvl
instance Outputable TcLevel where
ppr (TcLevel us) = ppr us
{- *********************************************************************
* *
Finding type family instances
* *
************************************************************************
-}
-- | Finds outermost type-family applications occuring in a type,
-- after expanding synonyms. In the list (F, tys) that is returned
-- we guarantee that tys matches F's arity. For example, given
-- type family F a :: * -> * (arity 1)
-- calling tcTyFamInsts on (Maybe (F Int Bool) will return
-- (F, [Int]), not (F, [Int,Bool])
--
-- This is important for its use in deciding termination of type
-- instances (see Trac #11581). E.g.
-- type instance G [Int] = ...(F Int <big type>)...
-- we don't need to take <big type> into account when asking if
-- the calls on the RHS are smaller than the LHS
tcTyFamInsts :: Type -> [(TyCon, [Type])]
tcTyFamInsts ty
| Just exp_ty <- coreView ty = tcTyFamInsts exp_ty
tcTyFamInsts (TyVarTy _) = []
tcTyFamInsts (TyConApp tc tys)
| isTypeFamilyTyCon tc = [(tc, take (tyConArity tc) tys)]
| otherwise = concat (map tcTyFamInsts tys)
tcTyFamInsts (LitTy {}) = []
tcTyFamInsts (ForAllTy bndr ty) = tcTyFamInsts (binderKind bndr)
++ tcTyFamInsts ty
tcTyFamInsts (FunTy ty1 ty2) = tcTyFamInsts ty1 ++ tcTyFamInsts ty2
tcTyFamInsts (AppTy ty1 ty2) = tcTyFamInsts ty1 ++ tcTyFamInsts ty2
tcTyFamInsts (CastTy ty _) = tcTyFamInsts ty
tcTyFamInsts (CoercionTy _) = [] -- don't count tyfams in coercions,
-- as they never get normalized, anyway
{-
************************************************************************
* *
The "exact" free variables of a type
* *
************************************************************************
Note [Silly type synonym]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
type T a = Int
What are the free tyvars of (T x)? Empty, of course!
Here's the example that Ralf Laemmel showed me:
foo :: (forall a. C u a -> C u a) -> u
mappend :: Monoid u => u -> u -> u
bar :: Monoid u => u
bar = foo (\t -> t `mappend` t)
We have to generalise at the arg to f, and we don't
want to capture the constraint (Monad (C u a)) because
it appears to mention a. Pretty silly, but it was useful to him.
exactTyCoVarsOfType is used by the type checker to figure out exactly
which type variables are mentioned in a type. It's also used in the
smart-app checking code --- see TcExpr.tcIdApp
On the other hand, consider a *top-level* definition
f = (\x -> x) :: T a -> T a
If we don't abstract over 'a' it'll get fixed to GHC.Prim.Any, and then
if we have an application like (f "x") we get a confusing error message
involving Any. So the conclusion is this: when generalising
- at top level use tyCoVarsOfType
- in nested bindings use exactTyCoVarsOfType
See Trac #1813 for example.
-}
exactTyCoVarsOfType :: Type -> TyCoVarSet
-- Find the free type variables (of any kind)
-- but *expand* type synonyms. See Note [Silly type synonym] above.
exactTyCoVarsOfType ty
= go ty
where
go ty | Just ty' <- coreView ty = go ty' -- This is the key line
go (TyVarTy tv) = unitVarSet tv `unionVarSet` go (tyVarKind tv)
go (TyConApp _ tys) = exactTyCoVarsOfTypes tys
go (LitTy {}) = emptyVarSet
go (AppTy fun arg) = go fun `unionVarSet` go arg
go (FunTy arg res) = go arg `unionVarSet` go res
go (ForAllTy bndr ty) = delBinderVar (go ty) bndr `unionVarSet` go (binderKind bndr)
go (CastTy ty co) = go ty `unionVarSet` goCo co
go (CoercionTy co) = goCo co
goCo (Refl _ ty) = go ty
goCo (TyConAppCo _ _ args)= goCos args
goCo (AppCo co arg) = goCo co `unionVarSet` goCo arg
goCo (ForAllCo tv k_co co)
= goCo co `delVarSet` tv `unionVarSet` goCo k_co
goCo (CoVarCo v) = unitVarSet v `unionVarSet` go (varType v)
goCo (AxiomInstCo _ _ args) = goCos args
goCo (UnivCo p _ t1 t2) = goProv p `unionVarSet` go t1 `unionVarSet` go t2
goCo (SymCo co) = goCo co
goCo (TransCo co1 co2) = goCo co1 `unionVarSet` goCo co2
goCo (NthCo _ co) = goCo co
goCo (LRCo _ co) = goCo co
goCo (InstCo co arg) = goCo co `unionVarSet` goCo arg
goCo (CoherenceCo c1 c2) = goCo c1 `unionVarSet` goCo c2
goCo (KindCo co) = goCo co
goCo (SubCo co) = goCo co
goCo (AxiomRuleCo _ c) = goCos c
goCos cos = foldr (unionVarSet . goCo) emptyVarSet cos
goProv UnsafeCoerceProv = emptyVarSet
goProv (PhantomProv kco) = goCo kco
goProv (ProofIrrelProv kco) = goCo kco
goProv (PluginProv _) = emptyVarSet
goProv (HoleProv _) = emptyVarSet
exactTyCoVarsOfTypes :: [Type] -> TyVarSet
exactTyCoVarsOfTypes tys = mapUnionVarSet exactTyCoVarsOfType tys
{- *********************************************************************
* *
Bound variables in a type
* *
********************************************************************* -}
-- | Find all variables bound anywhere in a type.
-- See also Note [Scope-check inferred kinds] in TcHsType
allBoundVariables :: Type -> TyVarSet
allBoundVariables ty = fvVarSet $ go ty
where
go :: Type -> FV
go (TyVarTy tv) = go (tyVarKind tv)
go (TyConApp _ tys) = mapUnionFV go tys
go (AppTy t1 t2) = go t1 `unionFV` go t2
go (FunTy t1 t2) = go t1 `unionFV` go t2
go (ForAllTy (TvBndr tv _) t2) = FV.unitFV tv `unionFV`
go (tyVarKind tv) `unionFV` go t2
go (LitTy {}) = emptyFV
go (CastTy ty _) = go ty
go (CoercionTy {}) = emptyFV
-- any types mentioned in a coercion should also be mentioned in
-- a type.
allBoundVariabless :: [Type] -> TyVarSet
allBoundVariabless = mapUnionVarSet allBoundVariables
{- *********************************************************************
* *
Type and kind variables in a type
* *
********************************************************************* -}
data TcDepVars -- See Note [Dependent type variables]
-- See Note [TcDepVars determinism]
= DV { dv_kvs :: DTyCoVarSet -- "kind" variables (dependent)
, dv_tvs :: DTyVarSet -- "type" variables (non-dependent)
-- A variable may appear in both sets
-- E.g. T k (x::k) The first occurence of k makes it
-- show up in dv_tvs, the second in dv_kvs
-- See Note [Dependent type variables]
}
tcDepVarSet :: TcDepVars -> TyVarSet
-- Actually can contain CoVars, but never mind
tcDepVarSet (DV { dv_kvs = kvs, dv_tvs = tvs })
= dVarSetToVarSet kvs `unionVarSet` dVarSetToVarSet tvs
instance Monoid TcDepVars where
mempty = DV { dv_kvs = emptyDVarSet, dv_tvs = emptyDVarSet }
mappend (DV { dv_kvs = kv1, dv_tvs = tv1 })
(DV { dv_kvs = kv2, dv_tvs = tv2 })
= DV { dv_kvs = kv1 `unionDVarSet` kv2
, dv_tvs = tv1 `unionDVarSet` tv2}
instance Outputable TcDepVars where
ppr (DV {dv_kvs = kvs, dv_tvs = tvs })
= text "DV" <+> braces (sep [ text "dv_kvs =" <+> ppr kvs
, text "dv_tvs =" <+> ppr tvs ])
{- Note [Dependent type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In Haskell type inference we quantify over type variables; but we only
quantify over /kind/ variables when -XPolyKinds is on. So when
collecting the free vars of a type, prior to quantifying, we must keep
the type and kind variables separate. But what does that mean in a
system where kind variables /are/ type variables? It's a fairly
arbitrary distinction based on how the variables appear:
- "Kind variables" appear in the kind of some other free variable
PLUS any free coercion variables
- "Type variables" are all free vars that are not kind variables
E.g. In the type T k (a::k)
'k' is a kind variable, because it occurs in the kind of 'a',
even though it also appears at "top level" of the type
'a' is a type variable, because it doesn't
We gather these variables using a TcDepVars record:
DV { dv_kvs: Variables free in the kind of a free type variable
or of a forall-bound type variable
, dv_tvs: Variables sytactically free in the type }
So: dv_kvs are the kind variables of the type
(dv_tvs - dv_kvs) are the type variable of the type
Note that
* A variable can occur in both.
T k (x::k) The first occurence of k makes it
show up in dv_tvs, the second in dv_kvs
* We include any coercion variables in the "dependent",
"kind-variable" set because we never quantify over them.
* Both sets are un-ordered, of course.
* The "kind variables" might depend on each other; e.g
(k1 :: k2), (k2 :: *)
The "type variables" do not depend on each other; if
one did, it'd be classified as a kind variable!
Note [TcDepVars determinism]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we quantify over type variables we decide the order in which they
appear in the final type. Because the order of type variables in the type
can end up in the interface file and affects some optimizations like
worker-wrapper we want this order to be deterministic.
To achieve that we use deterministic sets of variables that can be converted to
lists in a deterministic order.
For more information about deterministic sets see
Note [Deterministic UniqFM] in UniqDFM.
-}
-- | Like 'splitDepVarsOfType', but over a list of types
splitDepVarsOfTypes :: [Type] -> TcDepVars
splitDepVarsOfTypes = foldMap splitDepVarsOfType
-- | Worker for 'splitDepVarsOfType'. This might output the same var
-- in both sets, if it's used in both a type and a kind.
-- See Note [TcDepVars determinism]
-- See Note [Dependent type variables]
splitDepVarsOfType :: Type -> TcDepVars
splitDepVarsOfType = go
where
go (TyVarTy tv) = DV { dv_kvs =tyCoVarsOfTypeDSet $ tyVarKind tv
, dv_tvs = unitDVarSet tv }
go (AppTy t1 t2) = go t1 `mappend` go t2
go (TyConApp _ tys) = foldMap go tys
go (FunTy arg res) = go arg `mappend` go res
go (LitTy {}) = mempty
go (CastTy ty co) = go ty `mappend` go_co co
go (CoercionTy co) = go_co co
go (ForAllTy (TvBndr tv _) ty)
= let DV { dv_kvs = kvs, dv_tvs = tvs } = go ty in
DV { dv_kvs = (kvs `delDVarSet` tv)
`extendDVarSetList` tyCoVarsOfTypeList (tyVarKind tv)
, dv_tvs = tvs `delDVarSet` tv }
go_co co = DV { dv_kvs = tyCoVarsOfCoDSet co
, dv_tvs = emptyDVarSet }
{-
************************************************************************
* *
Predicates
* *
************************************************************************
-}
isTouchableOrFmv :: TcLevel -> TcTyVar -> Bool
isTouchableOrFmv ctxt_tclvl tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv { mtv_tclvl = tv_tclvl, mtv_info = info }
-> ASSERT2( checkTcLevelInvariant ctxt_tclvl tv_tclvl,
ppr tv $$ ppr tv_tclvl $$ ppr ctxt_tclvl )
case info of
FlatMetaTv -> True
_ -> tv_tclvl `sameDepthAs` ctxt_tclvl
_ -> False
isTouchableMetaTyVar :: TcLevel -> TcTyVar -> Bool
isTouchableMetaTyVar ctxt_tclvl tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv { mtv_tclvl = tv_tclvl }
-> ASSERT2( checkTcLevelInvariant ctxt_tclvl tv_tclvl,
ppr tv $$ ppr tv_tclvl $$ ppr ctxt_tclvl )
tv_tclvl `sameDepthAs` ctxt_tclvl
_ -> False
isFloatedTouchableMetaTyVar :: TcLevel -> TcTyVar -> Bool
isFloatedTouchableMetaTyVar ctxt_tclvl tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv { mtv_tclvl = tv_tclvl } -> tv_tclvl `strictlyDeeperThan` ctxt_tclvl
_ -> False
isImmutableTyVar :: TyVar -> Bool
isImmutableTyVar tv
| isTcTyVar tv = isSkolemTyVar tv
| otherwise = True
isTyConableTyVar, isSkolemTyVar, isOverlappableTyVar,
isMetaTyVar, isAmbiguousTyVar,
isFmvTyVar, isFskTyVar, isFlattenTyVar :: TcTyVar -> Bool
isTyConableTyVar tv
-- True of a meta-type variable that can be filled in
-- with a type constructor application; in particular,
-- not a SigTv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv { mtv_info = SigTv } -> False
_ -> True
isFmvTyVar tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv { mtv_info = FlatMetaTv } -> True
_ -> False
-- | True of both given and wanted flatten-skolems (fak and usk)
isFlattenTyVar tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
FlatSkol {} -> True
MetaTv { mtv_info = FlatMetaTv } -> True
_ -> False
-- | True of FlatSkol skolems only
isFskTyVar tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
FlatSkol {} -> True
_ -> False
isSkolemTyVar tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv {} -> False
_other -> True
isOverlappableTyVar tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
SkolemTv overlappable -> overlappable
_ -> False
isMetaTyVar tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv {} -> True
_ -> False
-- isAmbiguousTyVar is used only when reporting type errors
-- It picks out variables that are unbound, namely meta
-- type variables and the RuntimUnk variables created by
-- RtClosureInspect.zonkRTTIType. These are "ambiguous" in
-- the sense that they stand for an as-yet-unknown type
isAmbiguousTyVar tv
= case tcTyVarDetails tv of
MetaTv {} -> True
RuntimeUnk {} -> True
_ -> False
isMetaTyVarTy :: TcType -> Bool
isMetaTyVarTy (TyVarTy tv) = isMetaTyVar tv
isMetaTyVarTy _ = False
metaTyVarInfo :: TcTyVar -> MetaInfo
metaTyVarInfo tv
= case tcTyVarDetails tv of
MetaTv { mtv_info = info } -> info
_ -> pprPanic "metaTyVarInfo" (ppr tv)
metaTyVarTcLevel :: TcTyVar -> TcLevel
metaTyVarTcLevel tv
= case tcTyVarDetails tv of
MetaTv { mtv_tclvl = tclvl } -> tclvl
_ -> pprPanic "metaTyVarTcLevel" (ppr tv)
metaTyVarTcLevel_maybe :: TcTyVar -> Maybe TcLevel
metaTyVarTcLevel_maybe tv
= case tcTyVarDetails tv of
MetaTv { mtv_tclvl = tclvl } -> Just tclvl
_ -> Nothing
setMetaTyVarTcLevel :: TcTyVar -> TcLevel -> TcTyVar
setMetaTyVarTcLevel tv tclvl
= case tcTyVarDetails tv of
details@(MetaTv {}) -> setTcTyVarDetails tv (details { mtv_tclvl = tclvl })
_ -> pprPanic "metaTyVarTcLevel" (ppr tv)
isSigTyVar :: Var -> Bool
isSigTyVar tv
= case tcTyVarDetails tv of
MetaTv { mtv_info = SigTv } -> True
_ -> False
metaTvRef :: TyVar -> IORef MetaDetails
metaTvRef tv
= case tcTyVarDetails tv of
MetaTv { mtv_ref = ref } -> ref
_ -> pprPanic "metaTvRef" (ppr tv)
isFlexi, isIndirect :: MetaDetails -> Bool
isFlexi Flexi = True
isFlexi _ = False
isIndirect (Indirect _) = True
isIndirect _ = False
isRuntimeUnkSkol :: TyVar -> Bool
-- Called only in TcErrors; see Note [Runtime skolems] there
isRuntimeUnkSkol x
| isTcTyVar x, RuntimeUnk <- tcTyVarDetails x = True
| otherwise = False
{-
************************************************************************
* *
\subsection{Tau, sigma and rho}
* *
************************************************************************
-}
mkSigmaTy :: [TyVarBinder] -> [PredType] -> Type -> Type
mkSigmaTy bndrs theta tau = mkForAllTys bndrs (mkPhiTy theta tau)
-- | Make a sigma ty wherea ll type variables are 'Inferred'. That is,
-- they cannot be used with visible type application.
mkInfSigmaTy :: [TyVar] -> [PredType] -> Type -> Type
mkInfSigmaTy tyvars ty = mkSigmaTy (mkTyVarBinders Inferred tyvars) ty
-- | Make a sigma ty where all type variables are "specified". That is,
-- they can be used with visible type application
mkSpecSigmaTy :: [TyVar] -> [PredType] -> Type -> Type
mkSpecSigmaTy tyvars ty = mkSigmaTy (mkTyVarBinders Specified tyvars) ty
mkPhiTy :: [PredType] -> Type -> Type
mkPhiTy = mkFunTys
-- @isTauTy@ tests if a type is "simple"..
isTauTy :: Type -> Bool
isTauTy ty | Just ty' <- coreView ty = isTauTy ty'
isTauTy (TyVarTy _) = True
isTauTy (LitTy {}) = True
isTauTy (TyConApp tc tys) = all isTauTy tys && isTauTyCon tc
isTauTy (AppTy a b) = isTauTy a && isTauTy b
isTauTy (FunTy a b) = isTauTy a && isTauTy b
isTauTy (ForAllTy {}) = False
isTauTy (CastTy _ _) = False
isTauTy (CoercionTy _) = False
isTauTyCon :: TyCon -> Bool
-- Returns False for type synonyms whose expansion is a polytype
isTauTyCon tc
| Just (_, rhs) <- synTyConDefn_maybe tc = isTauTy rhs
| otherwise = True
---------------
getDFunTyKey :: Type -> OccName -- Get some string from a type, to be used to
-- construct a dictionary function name
getDFunTyKey ty | Just ty' <- coreView ty = getDFunTyKey ty'
getDFunTyKey (TyVarTy tv) = getOccName tv
getDFunTyKey (TyConApp tc _) = getOccName tc
getDFunTyKey (LitTy x) = getDFunTyLitKey x
getDFunTyKey (AppTy fun _) = getDFunTyKey fun
getDFunTyKey (FunTy _ _) = getOccName funTyCon
getDFunTyKey (ForAllTy _ t) = getDFunTyKey t
getDFunTyKey (CastTy ty _) = getDFunTyKey ty
getDFunTyKey t@(CoercionTy _) = pprPanic "getDFunTyKey" (ppr t)
getDFunTyLitKey :: TyLit -> OccName
getDFunTyLitKey (NumTyLit n) = mkOccName Name.varName (show n)
getDFunTyLitKey (StrTyLit n) = mkOccName Name.varName (show n) -- hm
---------------
mkNakedTyConApp :: TyCon -> [Type] -> Type
-- Builds a TyConApp
-- * without being strict in TyCon,
-- * without satisfying the invariants of TyConApp
-- A subsequent zonking will establish the invariants
-- See Note [Type-checking inside the knot] in TcHsType
mkNakedTyConApp tc tys = TyConApp tc tys
mkNakedAppTys :: Type -> [Type] -> Type
-- See Note [Type-checking inside the knot] in TcHsType
mkNakedAppTys ty1 [] = ty1
mkNakedAppTys (TyConApp tc tys1) tys2 = mkNakedTyConApp tc (tys1 ++ tys2)
mkNakedAppTys ty1 tys2 = foldl AppTy ty1 tys2
mkNakedAppTy :: Type -> Type -> Type
-- See Note [Type-checking inside the knot] in TcHsType
mkNakedAppTy ty1 ty2 = mkNakedAppTys ty1 [ty2]
mkNakedCastTy :: Type -> Coercion -> Type
-- Do simple, fast compaction; especially dealing with Refl
-- for which it's plain stupid to create a cast
-- This simple function killed off a huge number of Refl casts
-- in types, at birth.
-- Note that it's fine to do this even for a "mkNaked" function,
-- because we don't look at TyCons. isReflCo checks if the coercion
-- is structurally Refl; it does not check for shape k ~ k.
mkNakedCastTy ty co | isReflCo co = ty
mkNakedCastTy (CastTy ty co1) co2 = CastTy ty (co1 `mkTransCo` co2)
mkNakedCastTy ty co = CastTy ty co
{-
************************************************************************
* *
\subsection{Expanding and splitting}
* *
************************************************************************
These tcSplit functions are like their non-Tc analogues, but
*) they do not look through newtypes
However, they are non-monadic and do not follow through mutable type
variables. It's up to you to make sure this doesn't matter.
-}
-- | Splits a forall type into a list of 'TyBinder's and the inner type.
-- Always succeeds, even if it returns an empty list.
tcSplitPiTys :: Type -> ([TyBinder], Type)
tcSplitPiTys = splitPiTys
tcSplitForAllTy_maybe :: Type -> Maybe (TyVarBinder, Type)
tcSplitForAllTy_maybe ty | Just ty' <- coreView ty = tcSplitForAllTy_maybe ty'
tcSplitForAllTy_maybe (ForAllTy tv ty) = Just (tv, ty)
tcSplitForAllTy_maybe _ = Nothing
-- | Like 'tcSplitPiTys', but splits off only named binders, returning
-- just the tycovars.
tcSplitForAllTys :: Type -> ([TyVar], Type)
tcSplitForAllTys = splitForAllTys
-- | Like 'tcSplitForAllTys', but splits off only named binders.
tcSplitForAllTyVarBndrs :: Type -> ([TyVarBinder], Type)
tcSplitForAllTyVarBndrs = splitForAllTyVarBndrs
-- | Is this a ForAllTy with a named binder?
tcIsForAllTy :: Type -> Bool
tcIsForAllTy ty | Just ty' <- coreView ty = tcIsForAllTy ty'
tcIsForAllTy (ForAllTy {}) = True
tcIsForAllTy _ = False
tcSplitPredFunTy_maybe :: Type -> Maybe (PredType, Type)
-- Split off the first predicate argument from a type
tcSplitPredFunTy_maybe ty
| Just ty' <- coreView ty = tcSplitPredFunTy_maybe ty'
tcSplitPredFunTy_maybe (FunTy arg res)
| isPredTy arg = Just (arg, res)
tcSplitPredFunTy_maybe _
= Nothing
tcSplitPhiTy :: Type -> (ThetaType, Type)
tcSplitPhiTy ty
= split ty []
where
split ty ts
= case tcSplitPredFunTy_maybe ty of
Just (pred, ty) -> split ty (pred:ts)
Nothing -> (reverse ts, ty)
-- | Split a sigma type into its parts.
tcSplitSigmaTy :: Type -> ([TyVar], ThetaType, Type)
tcSplitSigmaTy ty = case tcSplitForAllTys ty of
(tvs, rho) -> case tcSplitPhiTy rho of
(theta, tau) -> (tvs, theta, tau)
-----------------------
tcDeepSplitSigmaTy_maybe
:: TcSigmaType -> Maybe ([TcType], [TyVar], ThetaType, TcSigmaType)
-- Looks for a *non-trivial* quantified type, under zero or more function arrows
-- By "non-trivial" we mean either tyvars or constraints are non-empty
tcDeepSplitSigmaTy_maybe ty
| Just (arg_ty, res_ty) <- tcSplitFunTy_maybe ty
, Just (arg_tys, tvs, theta, rho) <- tcDeepSplitSigmaTy_maybe res_ty
= Just (arg_ty:arg_tys, tvs, theta, rho)
| (tvs, theta, rho) <- tcSplitSigmaTy ty
, not (null tvs && null theta)
= Just ([], tvs, theta, rho)
| otherwise = Nothing
-----------------------
tcTyConAppTyCon :: Type -> TyCon
tcTyConAppTyCon ty = case tcSplitTyConApp_maybe ty of
Just (tc, _) -> tc
Nothing -> pprPanic "tcTyConAppTyCon" (pprType ty)
tcTyConAppArgs :: Type -> [Type]
tcTyConAppArgs ty = case tcSplitTyConApp_maybe ty of
Just (_, args) -> args
Nothing -> pprPanic "tcTyConAppArgs" (pprType ty)
tcSplitTyConApp :: Type -> (TyCon, [Type])
tcSplitTyConApp ty = case tcSplitTyConApp_maybe ty of
Just stuff -> stuff
Nothing -> pprPanic "tcSplitTyConApp" (pprType ty)
tcSplitTyConApp_maybe :: Type -> Maybe (TyCon, [Type])
tcSplitTyConApp_maybe ty | Just ty' <- coreView ty = tcSplitTyConApp_maybe ty'
tcSplitTyConApp_maybe ty = tcRepSplitTyConApp_maybe ty
tcRepSplitTyConApp_maybe :: Type -> Maybe (TyCon, [Type])
tcRepSplitTyConApp_maybe (TyConApp tc tys) = Just (tc, tys)
tcRepSplitTyConApp_maybe (FunTy arg res) = Just (funTyCon, [arg,res])
tcRepSplitTyConApp_maybe _ = Nothing
-----------------------
tcSplitFunTys :: Type -> ([Type], Type)
tcSplitFunTys ty = case tcSplitFunTy_maybe ty of
Nothing -> ([], ty)
Just (arg,res) -> (arg:args, res')
where
(args,res') = tcSplitFunTys res
tcSplitFunTy_maybe :: Type -> Maybe (Type, Type)
tcSplitFunTy_maybe ty | Just ty' <- coreView ty = tcSplitFunTy_maybe ty'
tcSplitFunTy_maybe (FunTy arg res) | not (isPredTy arg) = Just (arg, res)
tcSplitFunTy_maybe _ = Nothing
-- Note the typeKind guard
-- Consider (?x::Int) => Bool
-- We don't want to treat this as a function type!
-- A concrete example is test tc230:
-- f :: () -> (?p :: ()) => () -> ()
--
-- g = f () ()
tcSplitFunTysN :: Arity -- N: Number of desired args
-> TcRhoType
-> Either Arity -- Number of missing arrows
([TcSigmaType], -- Arg types (N or fewer)
TcSigmaType) -- The rest of the type
-- ^ Split off exactly the specified number argument types
-- Returns
-- (Left m) if there are 'm' missing arrows in the type
-- (Right (tys,res)) if the type looks like t1 -> ... -> tn -> res
tcSplitFunTysN n ty
| n == 0
= Right ([], ty)
| Just (arg,res) <- tcSplitFunTy_maybe ty
= case tcSplitFunTysN (n-1) res of
Left m -> Left m
Right (args,body) -> Right (arg:args, body)
| otherwise
= Left n
tcSplitFunTy :: Type -> (Type, Type)
tcSplitFunTy ty = expectJust "tcSplitFunTy" (tcSplitFunTy_maybe ty)
tcFunArgTy :: Type -> Type
tcFunArgTy ty = fst (tcSplitFunTy ty)
tcFunResultTy :: Type -> Type
tcFunResultTy ty = snd (tcSplitFunTy ty)
-----------------------
tcSplitAppTy_maybe :: Type -> Maybe (Type, Type)
tcSplitAppTy_maybe ty | Just ty' <- coreView ty = tcSplitAppTy_maybe ty'
tcSplitAppTy_maybe ty = tcRepSplitAppTy_maybe ty
tcSplitAppTy :: Type -> (Type, Type)
tcSplitAppTy ty = case tcSplitAppTy_maybe ty of
Just stuff -> stuff
Nothing -> pprPanic "tcSplitAppTy" (pprType ty)
tcSplitAppTys :: Type -> (Type, [Type])
tcSplitAppTys ty
= go ty []
where
go ty args = case tcSplitAppTy_maybe ty of
Just (ty', arg) -> go ty' (arg:args)
Nothing -> (ty,args)
-----------------------
tcGetTyVar_maybe :: Type -> Maybe TyVar
tcGetTyVar_maybe ty | Just ty' <- coreView ty = tcGetTyVar_maybe ty'
tcGetTyVar_maybe (TyVarTy tv) = Just tv
tcGetTyVar_maybe _ = Nothing
tcGetTyVar :: String -> Type -> TyVar
tcGetTyVar msg ty = expectJust msg (tcGetTyVar_maybe ty)
tcIsTyVarTy :: Type -> Bool
tcIsTyVarTy ty | Just ty' <- coreView ty = tcIsTyVarTy ty'
tcIsTyVarTy (CastTy ty _) = tcIsTyVarTy ty -- look through casts, as
-- this is only used for
-- e.g., FlexibleContexts
tcIsTyVarTy (TyVarTy _) = True
tcIsTyVarTy _ = False
-----------------------
tcSplitDFunTy :: Type -> ([TyVar], [Type], Class, [Type])
-- Split the type of a dictionary function
-- We don't use tcSplitSigmaTy, because a DFun may (with NDP)
-- have non-Pred arguments, such as
-- df :: forall m. (forall b. Eq b => Eq (m b)) -> C m
--
-- Also NB splitFunTys, not tcSplitFunTys;
-- the latter specifically stops at PredTy arguments,
-- and we don't want to do that here
tcSplitDFunTy ty
= case tcSplitForAllTys ty of { (tvs, rho) ->
case splitFunTys rho of { (theta, tau) ->
case tcSplitDFunHead tau of { (clas, tys) ->
(tvs, theta, clas, tys) }}}
tcSplitDFunHead :: Type -> (Class, [Type])
tcSplitDFunHead = getClassPredTys
tcSplitMethodTy :: Type -> ([TyVar], PredType, Type)
-- A class method (selector) always has a type like
-- forall as. C as => blah
-- So if the class looks like
-- class C a where
-- op :: forall b. (Eq a, Ix b) => a -> b
-- the class method type looks like
-- op :: forall a. C a => forall b. (Eq a, Ix b) => a -> b
--
-- tcSplitMethodTy just peels off the outer forall and
-- that first predicate
tcSplitMethodTy ty
| (sel_tyvars,sel_rho) <- tcSplitForAllTys ty
, Just (first_pred, local_meth_ty) <- tcSplitPredFunTy_maybe sel_rho
= (sel_tyvars, first_pred, local_meth_ty)
| otherwise
= pprPanic "tcSplitMethodTy" (ppr ty)
-----------------------
tcEqKind :: TcKind -> TcKind -> Bool
tcEqKind = tcEqType
tcEqType :: TcType -> TcType -> Bool
-- tcEqType is a proper implements the same Note [Non-trivial definitional
-- equality] (in TyCoRep) as `eqType`, but Type.eqType believes (* ==
-- Constraint), and that is NOT what we want in the type checker!
tcEqType ty1 ty2
= isNothing (tc_eq_type coreView ki1 ki2) &&
isNothing (tc_eq_type coreView ty1 ty2)
where
ki1 = typeKind ty1
ki2 = typeKind ty2
-- | Just like 'tcEqType', but will return True for types of different kinds
-- as long as their non-coercion structure is identical.
tcEqTypeNoKindCheck :: TcType -> TcType -> Bool
tcEqTypeNoKindCheck ty1 ty2
= isNothing $ tc_eq_type coreView ty1 ty2
-- | Like 'tcEqType', but returns information about whether the difference
-- is visible in the case of a mismatch.
-- @Nothing@ : the types are equal
-- @Just True@ : the types differ, and the point of difference is visible
-- @Just False@ : the types differ, and the point of difference is invisible
tcEqTypeVis :: TcType -> TcType -> Maybe Bool
tcEqTypeVis ty1 ty2
= tc_eq_type coreView ty1 ty2 <!> invis (tc_eq_type coreView ki1 ki2)
where
ki1 = typeKind ty1
ki2 = typeKind ty2
-- convert Just True to Just False
invis :: Maybe Bool -> Maybe Bool
invis = fmap (const False)
(<!>) :: Maybe Bool -> Maybe Bool -> Maybe Bool
Nothing <!> x = x
Just True <!> _ = Just True
Just _vis <!> Just True = Just True
Just vis <!> _ = Just vis
infixr 3 <!>
-- | Real worker for 'tcEqType'. No kind check!
tc_eq_type :: (TcType -> Maybe TcType) -- ^ @coreView@, if you want unwrapping
-> Type -> Type -> Maybe Bool
tc_eq_type view_fun orig_ty1 orig_ty2 = go True orig_env orig_ty1 orig_ty2
where
go vis env t1 t2 | Just t1' <- view_fun t1 = go vis env t1' t2
go vis env t1 t2 | Just t2' <- view_fun t2 = go vis env t1 t2'
go vis env (TyVarTy tv1) (TyVarTy tv2)
= check vis $ rnOccL env tv1 == rnOccR env tv2
go vis _ (LitTy lit1) (LitTy lit2)
= check vis $ lit1 == lit2
go vis env (ForAllTy (TvBndr tv1 vis1) ty1)
(ForAllTy (TvBndr tv2 vis2) ty2)
= go (isVisibleArgFlag vis1) env (tyVarKind tv1) (tyVarKind tv2)
<!> go vis (rnBndr2 env tv1 tv2) ty1 ty2
<!> check vis (vis1 == vis2)
go vis env (FunTy arg1 res1) (FunTy arg2 res2)
= go vis env arg1 arg2 <!> go vis env res1 res2
-- See Note [Equality on AppTys] in Type
go vis env (AppTy s1 t1) ty2
| Just (s2, t2) <- tcRepSplitAppTy_maybe ty2
= go vis env s1 s2 <!> go vis env t1 t2
go vis env ty1 (AppTy s2 t2)
| Just (s1, t1) <- tcRepSplitAppTy_maybe ty1
= go vis env s1 s2 <!> go vis env t1 t2
go vis env (TyConApp tc1 ts1) (TyConApp tc2 ts2)
= check vis (tc1 == tc2) <!> gos (tc_vis vis tc1) env ts1 ts2
go vis env (CastTy t1 _) t2 = go vis env t1 t2
go vis env t1 (CastTy t2 _) = go vis env t1 t2
go _ _ (CoercionTy {}) (CoercionTy {}) = Nothing
go vis _ _ _ = Just vis
gos _ _ [] [] = Nothing
gos (v:vs) env (t1:ts1) (t2:ts2) = go v env t1 t2 <!> gos vs env ts1 ts2
gos (v:_) _ _ _ = Just v
gos _ _ _ _ = panic "tc_eq_type"
tc_vis :: Bool -> TyCon -> [Bool]
tc_vis True tc = viss ++ repeat True
-- the repeat True is necessary because tycons can legitimately
-- be oversaturated
where
bndrs = tyConBinders tc
viss = map (isVisibleArgFlag . tyConBinderArgFlag) bndrs
tc_vis False _ = repeat False -- if we're not in a visible context, our args
-- aren't either
check :: Bool -> Bool -> Maybe Bool
check _ True = Nothing
check vis False = Just vis
orig_env = mkRnEnv2 $ mkInScopeSet $ tyCoVarsOfTypes [orig_ty1, orig_ty2]
-- | Like 'pickyEqTypeVis', but returns a Bool for convenience
pickyEqType :: TcType -> TcType -> Bool
-- Check when two types _look_ the same, _including_ synonyms.
-- So (pickyEqType String [Char]) returns False
-- This ignores kinds and coercions, because this is used only for printing.
pickyEqType ty1 ty2
= isNothing $
tc_eq_type (const Nothing) ty1 ty2
{-
Note [Occurs check expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(occurCheckExpand tv xi) expands synonyms in xi just enough to get rid
of occurrences of tv outside type function arguments, if that is
possible; otherwise, it returns Nothing.
For example, suppose we have
type F a b = [a]
Then
occurCheckExpand b (F Int b) = Just [Int]
but
occurCheckExpand a (F a Int) = Nothing
We don't promise to do the absolute minimum amount of expanding
necessary, but we try not to do expansions we don't need to. We
prefer doing inner expansions first. For example,
type F a b = (a, Int, a, [a])
type G b = Char
We have
occurCheckExpand b (F (G b)) = F Char
even though we could also expand F to get rid of b.
The two variants of the function are to support TcUnify.checkTauTvUpdate,
which wants to prevent unification with type families. For more on this
point, see Note [Prevent unification with type families] in TcUnify.
See also Note [occurCheckExpand] in TcCanonical
-}
data OccCheckResult a
= OC_OK a
| OC_Forall
| OC_Occurs
instance Functor OccCheckResult where
fmap = liftM
instance Applicative OccCheckResult where
pure = OC_OK
(<*>) = ap
instance Monad OccCheckResult where
OC_OK x >>= k = k x
OC_Forall >>= _ = OC_Forall
OC_Occurs >>= _ = OC_Occurs
occurCheckExpand :: DynFlags -> TcTyVar -> Type -> OccCheckResult Type
-- See Note [Occurs check expansion]
-- Check whether
-- a) the given variable occurs in the given type.
-- b) there is a forall in the type (unless we have -XImpredicativeTypes)
--
-- We may have needed to do some type synonym unfolding in order to
-- get rid of the variable (or forall), so we also return the unfolded
-- version of the type, which is guaranteed to be syntactically free
-- of the given type variable. If the type is already syntactically
-- free of the variable, then the same type is returned.
--
-- NB: in the past we also rejected a SigTv matched with a non-tyvar
-- But it is wrong to reject that for Givens;
-- and SigTv is in any case handled separately by
-- - TcUnify.checkTauTvUpdate (on-the-fly unifier)
-- - TcInteract.canSolveByUnification (main constraint solver)
occurCheckExpand dflags tv ty
| fast_check ty = return ty
| otherwise = go emptyVarEnv ty
where
details = tcTyVarDetails tv
impredicative = canUnifyWithPolyType dflags details
-- True => fine
fast_check (LitTy {}) = True
fast_check (TyVarTy tv') = tv /= tv' && fast_check (tyVarKind tv')
fast_check (TyConApp tc tys) = all fast_check tys
&& (isTauTyCon tc || impredicative)
fast_check (FunTy a r) = fast_check a && fast_check r
fast_check (AppTy fun arg) = fast_check fun && fast_check arg
fast_check (ForAllTy (TvBndr tv' _) ty)
= impredicative
&& fast_check (tyVarKind tv')
&& (tv == tv' || fast_check ty)
fast_check (CastTy ty co) = fast_check ty && fast_check_co co
fast_check (CoercionTy co) = fast_check_co co
-- we really are only doing an occurs check here; no bother about
-- impredicativity in coercions, as they're inferred
fast_check_co co = not (tv `elemVarSet` tyCoVarsOfCo co)
go :: VarEnv TyVar -- carries mappings necessary because of kind expansion
-> Type -> OccCheckResult Type
go env (TyVarTy tv')
| tv == tv' = OC_Occurs
| Just tv'' <- lookupVarEnv env tv' = return (mkTyVarTy tv'')
| otherwise = do { k' <- go env (tyVarKind tv')
; return (mkTyVarTy $
setTyVarKind tv' k') }
go _ ty@(LitTy {}) = return ty
go env (AppTy ty1 ty2) = do { ty1' <- go env ty1
; ty2' <- go env ty2
; return (mkAppTy ty1' ty2') }
go env (FunTy ty1 ty2) = do { ty1' <- go env ty1
; ty2' <- go env ty2
; return (mkFunTy ty1' ty2') }
go env ty@(ForAllTy (TvBndr tv' vis) body_ty)
| not impredicative = OC_Forall
| tv == tv' = return ty
| otherwise = do { ki' <- go env ki
; let tv'' = setTyVarKind tv' ki'
env' = extendVarEnv env tv' tv''
; body' <- go env' body_ty
; return (ForAllTy (TvBndr tv'' vis) body') }
where ki = tyVarKind tv'
-- For a type constructor application, first try expanding away the
-- offending variable from the arguments. If that doesn't work, next
-- see if the type constructor is a type synonym, and if so, expand
-- it and try again.
go env ty@(TyConApp tc tys)
= case do { tys <- mapM (go env) tys
; return (mkTyConApp tc tys) } of
OC_OK ty
| impredicative || isTauTyCon tc
-> return ty -- First try to eliminate the tyvar from the args
| otherwise
-> OC_Forall -- A type synonym with a forall on the RHS
bad | Just ty' <- coreView ty -> go env ty'
| otherwise -> bad
-- Failing that, try to expand a synonym
go env (CastTy ty co) = do { ty' <- go env ty
; co' <- go_co env co
; return (mkCastTy ty' co') }
go env (CoercionTy co) = do { co' <- go_co env co
; return (mkCoercionTy co') }
go_co env (Refl r ty) = do { ty' <- go env ty
; return (mkReflCo r ty') }
-- Note: Coercions do not contain type synonyms
go_co env (TyConAppCo r tc args) = do { args' <- mapM (go_co env) args
; return (mkTyConAppCo r tc args') }
go_co env (AppCo co arg) = do { co' <- go_co env co
; arg' <- go_co env arg
; return (mkAppCo co' arg') }
go_co env co@(ForAllCo tv' kind_co body_co)
| not impredicative = OC_Forall
| tv == tv' = return co
| otherwise = do { kind_co' <- go_co env kind_co
; let tv'' = setTyVarKind tv' $
pFst (coercionKind kind_co')
env' = extendVarEnv env tv' tv''
; body' <- go_co env' body_co
; return (ForAllCo tv'' kind_co' body') }
go_co env (CoVarCo c) = do { k' <- go env (varType c)
; return (mkCoVarCo (setVarType c k')) }
go_co env (AxiomInstCo ax ind args) = do { args' <- mapM (go_co env) args
; return (mkAxiomInstCo ax ind args') }
go_co env (UnivCo p r ty1 ty2) = do { p' <- go_prov env p
; ty1' <- go env ty1
; ty2' <- go env ty2
; return (mkUnivCo p' r ty1' ty2') }
go_co env (SymCo co) = do { co' <- go_co env co
; return (mkSymCo co') }
go_co env (TransCo co1 co2) = do { co1' <- go_co env co1
; co2' <- go_co env co2
; return (mkTransCo co1' co2') }
go_co env (NthCo n co) = do { co' <- go_co env co
; return (mkNthCo n co') }
go_co env (LRCo lr co) = do { co' <- go_co env co
; return (mkLRCo lr co') }
go_co env (InstCo co arg) = do { co' <- go_co env co
; arg' <- go_co env arg
; return (mkInstCo co' arg') }
go_co env (CoherenceCo co1 co2) = do { co1' <- go_co env co1
; co2' <- go_co env co2
; return (mkCoherenceCo co1' co2') }
go_co env (KindCo co) = do { co' <- go_co env co
; return (mkKindCo co') }
go_co env (SubCo co) = do { co' <- go_co env co
; return (mkSubCo co') }
go_co env (AxiomRuleCo ax cs) = do { cs' <- mapM (go_co env) cs
; return (mkAxiomRuleCo ax cs') }
go_prov _ UnsafeCoerceProv = return UnsafeCoerceProv
go_prov env (PhantomProv co) = PhantomProv <$> go_co env co
go_prov env (ProofIrrelProv co) = ProofIrrelProv <$> go_co env co
go_prov _ p@(PluginProv _) = return p
go_prov _ p@(HoleProv _) = return p
canUnifyWithPolyType :: DynFlags -> TcTyVarDetails -> Bool
canUnifyWithPolyType dflags details
= case details of
MetaTv { mtv_info = SigTv } -> False
MetaTv { mtv_info = TauTv } -> xopt LangExt.ImpredicativeTypes dflags
_other -> True
-- We can have non-meta tyvars in given constraints
{- Note [Expanding superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we expand superclasses, we use the following algorithm:
expand( so_far, pred ) returns the transitive superclasses of pred,
not including pred itself
1. If pred is not a class constraint, return empty set
Otherwise pred = C ts
2. If C is in so_far, return empty set (breaks loops)
3. Find the immediate superclasses constraints of (C ts)
4. For each such sc_pred, return (sc_pred : expand( so_far+C, D ss )
Notice that
* With normal Haskell-98 classes, the loop-detector will never bite,
so we'll get all the superclasses.
* Since there is only a finite number of distinct classes, expansion
must terminate.
* The loop breaking is a bit conservative. Notably, a tuple class
could contain many times without threatening termination:
(Eq a, (Ord a, Ix a))
And this is try of any class that we can statically guarantee
as non-recursive (in some sense). For now, we just make a special
case for tuples. Somthing better would be cool.
See also TcTyDecls.checkClassCycles.
************************************************************************
* *
\subsection{Predicate types}
* *
************************************************************************
Deconstructors and tests on predicate types
Note [Kind polymorphic type classes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class C f where... -- C :: forall k. k -> Constraint
g :: forall (f::*). C f => f -> f
Here the (C f) in the signature is really (C * f), and we
don't want to complain that the * isn't a type variable!
-}
isTyVarClassPred :: PredType -> Bool
isTyVarClassPred ty = case getClassPredTys_maybe ty of
Just (_, tys) -> all isTyVarTy tys
_ -> False
-------------------------
checkValidClsArgs :: Bool -> Class -> [KindOrType] -> Bool
-- If the Bool is True (flexible contexts), return True (i.e. ok)
-- Otherwise, check that the type (not kind) args are all headed by a tyvar
-- E.g. (Eq a) accepted, (Eq (f a)) accepted, but (Eq Int) rejected
-- This function is here rather than in TcValidity because it is
-- called from TcSimplify, which itself is imported by TcValidity
checkValidClsArgs flexible_contexts cls kts
| flexible_contexts = True
| otherwise = all hasTyVarHead tys
where
tys = filterOutInvisibleTypes (classTyCon cls) kts
hasTyVarHead :: Type -> Bool
-- Returns true of (a t1 .. tn), where 'a' is a type variable
hasTyVarHead ty -- Haskell 98 allows predicates of form
| tcIsTyVarTy ty = True -- C (a ty1 .. tyn)
| otherwise -- where a is a type variable
= case tcSplitAppTy_maybe ty of
Just (ty, _) -> hasTyVarHead ty
Nothing -> False
evVarPred_maybe :: EvVar -> Maybe PredType
evVarPred_maybe v = if isPredTy ty then Just ty else Nothing
where ty = varType v
evVarPred :: EvVar -> PredType
evVarPred var
| debugIsOn
= case evVarPred_maybe var of
Just pred -> pred
Nothing -> pprPanic "tcEvVarPred" (ppr var <+> ppr (varType var))
| otherwise
= varType var
------------------
-- | When inferring types, should we quantify over a given predicate?
-- Generally true of classes; generally false of equality constraints.
-- Equality constraints that mention quantified type variables and
-- implicit variables complicate the story. See Notes
-- [Inheriting implicit parameters] and [Quantifying over equality constraints]
pickQuantifiablePreds
:: TyVarSet -- Quantifying over these
-> TcThetaType -- Proposed constraints to quantify
-> TcThetaType -- A subset that we can actually quantify
-- This function decides whether a particular constraint should be
-- quantified over, given the type variables that are being quantified
pickQuantifiablePreds qtvs theta
= let flex_ctxt = True in -- Quantify over non-tyvar constraints, even without
-- -XFlexibleContexts: see Trac #10608, #10351
-- flex_ctxt <- xoptM Opt_FlexibleContexts
filter (pick_me flex_ctxt) theta
where
pick_me flex_ctxt pred
= case classifyPredType pred of
ClassPred cls tys
| Just {} <- isCallStackPred pred
-- NEVER infer a CallStack constraint
-- Otherwise, we let the constraints bubble up to be
-- solved from the outer context, or be defaulted when we
-- reach the top-level.
-- see Note [Overview of implicit CallStacks]
-> False
| isIPClass cls -> True -- See note [Inheriting implicit parameters]
| otherwise
-> pick_cls_pred flex_ctxt cls tys
EqPred ReprEq ty1 ty2 -> pick_cls_pred flex_ctxt coercibleClass [ty1, ty2]
-- representational equality is like a class constraint
EqPred NomEq ty1 ty2 -> quant_fun ty1 || quant_fun ty2
IrredPred ty -> tyCoVarsOfType ty `intersectsVarSet` qtvs
pick_cls_pred flex_ctxt cls tys
= tyCoVarsOfTypes tys `intersectsVarSet` qtvs
&& (checkValidClsArgs flex_ctxt cls tys)
-- Only quantify over predicates that checkValidType
-- will pass! See Trac #10351.
-- See Note [Quantifying over equality constraints]
quant_fun ty
= case tcSplitTyConApp_maybe ty of
Just (tc, tys) | isTypeFamilyTyCon tc
-> tyCoVarsOfTypes tys `intersectsVarSet` qtvs
_ -> False
pickCapturedPreds
:: TyVarSet -- Quantifying over these
-> TcThetaType -- Proposed constraints to quantify
-> TcThetaType -- A subset that we can actually quantify
-- A simpler version of pickQuantifiablePreds, used to winnow down
-- the inferred constrains of a group of bindings, into those for
-- one particular identifier
pickCapturedPreds qtvs theta
= filter captured theta
where
captured pred = isIPPred pred || (tyCoVarsOfType pred `intersectsVarSet` qtvs)
-- Superclasses
type PredWithSCs = (PredType, [PredType])
mkMinimalBySCs :: [PredType] -> [PredType]
-- Remove predicates that can be deduced from others by superclasses
-- Result is a subset of the input
mkMinimalBySCs ptys = go preds_with_scs []
where
preds_with_scs :: [PredWithSCs]
preds_with_scs = [ (pred, transSuperClasses pred)
| pred <- ptys ]
go :: [PredWithSCs] -- Work list
-> [PredWithSCs] -- Accumulating result
-> [PredType]
go [] min_preds = map fst min_preds
go (work_item@(p,_) : work_list) min_preds
| p `in_cloud` work_list || p `in_cloud` min_preds
= go work_list min_preds
| otherwise
= go work_list (work_item : min_preds)
in_cloud :: PredType -> [PredWithSCs] -> Bool
in_cloud p ps = or [ p `eqType` p' | (_, scs) <- ps, p' <- scs ]
transSuperClasses :: PredType -> [PredType]
-- (transSuperClasses p) returns (p's superclasses) not including p
-- Stop if you encounter the same class again
-- See Note [Expanding superclasses]
transSuperClasses p
= go emptyNameSet p
where
go :: NameSet -> PredType -> [PredType]
go rec_clss p
| ClassPred cls tys <- classifyPredType p
, let cls_nm = className cls
, not (cls_nm `elemNameSet` rec_clss)
, let rec_clss' | isCTupleClass cls = rec_clss
| otherwise = rec_clss `extendNameSet` cls_nm
= [ p' | sc <- immSuperClasses cls tys
, p' <- sc : go rec_clss' sc ]
| otherwise
= []
immSuperClasses :: Class -> [Type] -> [PredType]
immSuperClasses cls tys
= substTheta (zipTvSubst tyvars tys) sc_theta
where
(tyvars,sc_theta,_,_) = classBigSig cls
isImprovementPred :: PredType -> Bool
-- Either it's an equality, or has some functional dependency
isImprovementPred ty
= case classifyPredType ty of
EqPred NomEq t1 t2 -> not (t1 `tcEqType` t2)
EqPred ReprEq _ _ -> False
ClassPred cls _ -> classHasFds cls
IrredPred {} -> True -- Might have equalities after reduction?
{-
Note [Inheriting implicit parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
f x = (x::Int) + ?y
where f is *not* a top-level binding.
From the RHS of f we'll get the constraint (?y::Int).
There are two types we might infer for f:
f :: Int -> Int
(so we get ?y from the context of f's definition), or
f :: (?y::Int) => Int -> Int
At first you might think the first was better, because then
?y behaves like a free variable of the definition, rather than
having to be passed at each call site. But of course, the WHOLE
IDEA is that ?y should be passed at each call site (that's what
dynamic binding means) so we'd better infer the second.
BOTTOM LINE: when *inferring types* you must quantify over implicit
parameters, *even if* they don't mention the bound type variables.
Reason: because implicit parameters, uniquely, have local instance
declarations. See pickQuantifiablePreds.
Note [Quantifying over equality constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Should we quantify over an equality constraint (s ~ t)? In general, we don't.
Doing so may simply postpone a type error from the function definition site to
its call site. (At worst, imagine (Int ~ Bool)).
However, consider this
forall a. (F [a] ~ Int) => blah
Should we quantify over the (F [a] ~ Int). Perhaps yes, because at the call
site we will know 'a', and perhaps we have instance F [Bool] = Int.
So we *do* quantify over a type-family equality where the arguments mention
the quantified variables.
************************************************************************
* *
\subsection{Predicates}
* *
************************************************************************
-}
isSigmaTy :: TcType -> Bool
-- isSigmaTy returns true of any qualified type. It doesn't
-- *necessarily* have any foralls. E.g
-- f :: (?x::Int) => Int -> Int
isSigmaTy ty | Just ty' <- coreView ty = isSigmaTy ty'
isSigmaTy (ForAllTy {}) = True
isSigmaTy (FunTy a _) = isPredTy a
isSigmaTy _ = False
isRhoTy :: TcType -> Bool -- True of TcRhoTypes; see Note [TcRhoType]
isRhoTy ty | Just ty' <- coreView ty = isRhoTy ty'
isRhoTy (ForAllTy {}) = False
isRhoTy (FunTy a r) = not (isPredTy a) && isRhoTy r
isRhoTy _ = True
-- | Like 'isRhoTy', but also says 'True' for 'Infer' types
isRhoExpTy :: ExpType -> Bool
isRhoExpTy (Check ty) = isRhoTy ty
isRhoExpTy (Infer {}) = True
isOverloadedTy :: Type -> Bool
-- Yes for a type of a function that might require evidence-passing
-- Used only by bindLocalMethods
isOverloadedTy ty | Just ty' <- coreView ty = isOverloadedTy ty'
isOverloadedTy (ForAllTy _ ty) = isOverloadedTy ty
isOverloadedTy (FunTy a _) = isPredTy a
isOverloadedTy _ = False
isFloatTy, isDoubleTy, isIntegerTy, isIntTy, isWordTy, isBoolTy,
isUnitTy, isCharTy, isAnyTy :: Type -> Bool
isFloatTy = is_tc floatTyConKey
isDoubleTy = is_tc doubleTyConKey
isIntegerTy = is_tc integerTyConKey
isIntTy = is_tc intTyConKey
isWordTy = is_tc wordTyConKey
isBoolTy = is_tc boolTyConKey
isUnitTy = is_tc unitTyConKey
isCharTy = is_tc charTyConKey
isAnyTy = is_tc anyTyConKey
-- | Does a type represent a floating-point number?
isFloatingTy :: Type -> Bool
isFloatingTy ty = isFloatTy ty || isDoubleTy ty
-- | Is a type 'String'?
isStringTy :: Type -> Bool
isStringTy ty
= case tcSplitTyConApp_maybe ty of
Just (tc, [arg_ty]) -> tc == listTyCon && isCharTy arg_ty
_ -> False
-- | Is a type a 'CallStack'?
isCallStackTy :: Type -> Bool
isCallStackTy ty
| Just tc <- tyConAppTyCon_maybe ty
= tc `hasKey` callStackTyConKey
| otherwise
= False
-- | Is a 'PredType' a 'CallStack' implicit parameter?
--
-- If so, return the name of the parameter.
isCallStackPred :: PredType -> Maybe FastString
isCallStackPred pred
| Just (str, ty) <- isIPPred_maybe pred
, isCallStackTy ty
= Just str
| otherwise
= Nothing
is_tc :: Unique -> Type -> Bool
-- Newtypes are opaque to this
is_tc uniq ty = case tcSplitTyConApp_maybe ty of
Just (tc, _) -> uniq == getUnique tc
Nothing -> False
-- | Does the given tyvar appear in the given type outside of any
-- non-newtypes? Assume we're looking for @a@. Says "yes" for
-- @a@, @N a@, @b a@, @a b@, @b (N a)@. Says "no" for
-- @[a]@, @Maybe a@, @T a@, where @N@ is a newtype and @T@ is a datatype.
isTyVarExposed :: TcTyVar -> TcType -> Bool
isTyVarExposed tv (TyVarTy tv') = tv == tv'
isTyVarExposed tv (TyConApp tc tys)
| isNewTyCon tc = any (isTyVarExposed tv) tys
| otherwise = False
isTyVarExposed _ (LitTy {}) = False
isTyVarExposed tv (AppTy fun arg) = isTyVarExposed tv fun
|| isTyVarExposed tv arg
isTyVarExposed _ (ForAllTy {}) = False
isTyVarExposed _ (FunTy {}) = False
isTyVarExposed tv (CastTy ty _) = isTyVarExposed tv ty
isTyVarExposed _ (CoercionTy {}) = False
-- | Does the given tyvar appear under a type generative w.r.t.
-- representational equality? See Note [Occurs check error] in
-- TcCanonical for the motivation for this function.
isTyVarUnderDatatype :: TcTyVar -> TcType -> Bool
isTyVarUnderDatatype tv = go False
where
go under_dt ty | Just ty' <- coreView ty = go under_dt ty'
go under_dt (TyVarTy tv') = under_dt && (tv == tv')
go under_dt (TyConApp tc tys) = let under_dt' = under_dt ||
isGenerativeTyCon tc
Representational
in any (go under_dt') tys
go _ (LitTy {}) = False
go _ (FunTy arg res) = go True arg || go True res
go under_dt (AppTy fun arg) = go under_dt fun || go under_dt arg
go under_dt (ForAllTy (TvBndr tv' _) inner_ty)
| tv' == tv = False
| otherwise = go under_dt inner_ty
go under_dt (CastTy ty _) = go under_dt ty
go _ (CoercionTy {}) = False
isRigidTy :: TcType -> Bool
isRigidTy ty
| Just (tc,_) <- tcSplitTyConApp_maybe ty = isGenerativeTyCon tc Nominal
| Just {} <- tcSplitAppTy_maybe ty = True
| isForAllTy ty = True
| otherwise = False
isRigidEqPred :: TcLevel -> PredTree -> Bool
-- ^ True of all Nominal equalities that are solidly insoluble
-- This means all equalities *except*
-- * Meta-tv non-SigTv on LHS
-- * Meta-tv SigTv on LHS, tyvar on right
isRigidEqPred tc_lvl (EqPred NomEq ty1 _)
| Just tv1 <- tcGetTyVar_maybe ty1
= ASSERT2( isTcTyVar tv1, ppr tv1 )
not (isMetaTyVar tv1) || isTouchableMetaTyVar tc_lvl tv1
| otherwise -- LHS is not a tyvar
= True
isRigidEqPred _ _ = False -- Not an equality
{-
************************************************************************
* *
\subsection{Transformation of Types to TcTypes}
* *
************************************************************************
-}
toTcType :: Type -> TcType
-- The constraint solver expects EvVars to have TcType, in which the
-- free type variables are TcTyVars. So we convert from Type to TcType here
-- A bit tiresome; but one day I expect the two types to be entirely separate
-- in which case we'll definitely need to do this
toTcType = runIdentity . to_tc_type emptyVarSet
toTcTypeBag :: Bag EvVar -> Bag EvVar -- All TyVars are transformed to TcTyVars
toTcTypeBag evvars = mapBag (\tv -> setTyVarKind tv (toTcType (tyVarKind tv))) evvars
to_tc_mapper :: TyCoMapper VarSet Identity
to_tc_mapper
= TyCoMapper { tcm_smart = False -- more efficient not to use smart ctors
, tcm_tyvar = tyvar
, tcm_covar = covar
, tcm_hole = hole
, tcm_tybinder = tybinder }
where
tyvar :: VarSet -> TyVar -> Identity Type
tyvar ftvs tv
| Just var <- lookupVarSet ftvs tv = return $ TyVarTy var
| isTcTyVar tv = TyVarTy <$> updateTyVarKindM (to_tc_type ftvs) tv
| otherwise
= do { kind' <- to_tc_type ftvs (tyVarKind tv)
; return $ TyVarTy $ mkTcTyVar (tyVarName tv) kind' vanillaSkolemTv }
covar :: VarSet -> CoVar -> Identity Coercion
covar ftvs cv
| Just var <- lookupVarSet ftvs cv = return $ CoVarCo var
| otherwise = CoVarCo <$> updateVarTypeM (to_tc_type ftvs) cv
hole :: VarSet -> CoercionHole -> Role -> Type -> Type
-> Identity Coercion
hole ftvs h r t1 t2 = mkHoleCo h r <$> to_tc_type ftvs t1
<*> to_tc_type ftvs t2
tybinder :: VarSet -> TyVar -> ArgFlag -> Identity (VarSet, TyVar)
tybinder ftvs tv _vis = do { kind' <- to_tc_type ftvs (tyVarKind tv)
; let tv' = mkTcTyVar (tyVarName tv) kind'
vanillaSkolemTv
; return (ftvs `extendVarSet` tv', tv') }
to_tc_type :: VarSet -> Type -> Identity TcType
to_tc_type = mapType to_tc_mapper
{-
************************************************************************
* *
\subsection{Misc}
* *
************************************************************************
Note [Visible type application]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC implements a generalisation of the algorithm described in the
"Visible Type Application" paper (available from
http://www.cis.upenn.edu/~sweirich/publications.html). A key part
of that algorithm is to distinguish user-specified variables from inferred
variables. For example, the following should typecheck:
f :: forall a b. a -> b -> b
f = const id
g = const id
x = f @Int @Bool 5 False
y = g 5 @Bool False
The idea is that we wish to allow visible type application when we are
instantiating a specified, fixed variable. In practice, specified, fixed
variables are either written in a type signature (or
annotation), OR are imported from another module. (We could do better here,
for example by doing SCC analysis on parts of a module and considering any
type from outside one's SCC to be fully specified, but this is very confusing to
users. The simple rule above is much more straightforward and predictable.)
So, both of f's quantified variables are specified and may be instantiated.
But g has no type signature, so only id's variable is specified (because id
is imported). We write the type of g as forall {a}. a -> forall b. b -> b.
Note that the a is in braces, meaning it cannot be instantiated with
visible type application.
Tracking specified vs. inferred variables is done conveniently by a field
in TyBinder.
-}
deNoteType :: Type -> Type
-- Remove all *outermost* type synonyms and other notes
deNoteType ty | Just ty' <- coreView ty = deNoteType ty'
deNoteType ty = ty
{-
Find the free tycons and classes of a type. This is used in the front
end of the compiler.
-}
{-
************************************************************************
* *
\subsection[TysWiredIn-ext-type]{External types}
* *
************************************************************************
The compiler's foreign function interface supports the passing of a
restricted set of types as arguments and results (the restricting factor
being the )
-}
tcSplitIOType_maybe :: Type -> Maybe (TyCon, Type)
-- (tcSplitIOType_maybe t) returns Just (IO,t',co)
-- if co : t ~ IO t'
-- returns Nothing otherwise
tcSplitIOType_maybe ty
= case tcSplitTyConApp_maybe ty of
Just (io_tycon, [io_res_ty])
| io_tycon `hasKey` ioTyConKey ->
Just (io_tycon, io_res_ty)
_ ->
Nothing
isFFITy :: Type -> Bool
-- True for any TyCon that can possibly be an arg or result of an FFI call
isFFITy ty = isValid (checkRepTyCon legalFFITyCon ty)
isFFIArgumentTy :: DynFlags -> Safety -> Type -> Validity
-- Checks for valid argument type for a 'foreign import'
isFFIArgumentTy dflags safety ty
= checkRepTyCon (legalOutgoingTyCon dflags safety) ty
isFFIExternalTy :: Type -> Validity
-- Types that are allowed as arguments of a 'foreign export'
isFFIExternalTy ty = checkRepTyCon legalFEArgTyCon ty
isFFIImportResultTy :: DynFlags -> Type -> Validity
isFFIImportResultTy dflags ty
= checkRepTyCon (legalFIResultTyCon dflags) ty
isFFIExportResultTy :: Type -> Validity
isFFIExportResultTy ty = checkRepTyCon legalFEResultTyCon ty
isFFIDynTy :: Type -> Type -> Validity
-- The type in a foreign import dynamic must be Ptr, FunPtr, or a newtype of
-- either, and the wrapped function type must be equal to the given type.
-- We assume that all types have been run through normaliseFfiType, so we don't
-- need to worry about expanding newtypes here.
isFFIDynTy expected ty
-- Note [Foreign import dynamic]
-- In the example below, expected would be 'CInt -> IO ()', while ty would
-- be 'FunPtr (CDouble -> IO ())'.
| Just (tc, [ty']) <- splitTyConApp_maybe ty
, tyConUnique tc `elem` [ptrTyConKey, funPtrTyConKey]
, eqType ty' expected
= IsValid
| otherwise
= NotValid (vcat [ text "Expected: Ptr/FunPtr" <+> pprParendType expected <> comma
, text " Actual:" <+> ppr ty ])
isFFILabelTy :: Type -> Validity
-- The type of a foreign label must be Ptr, FunPtr, or a newtype of either.
isFFILabelTy ty = checkRepTyCon ok ty
where
ok tc | tc `hasKey` funPtrTyConKey || tc `hasKey` ptrTyConKey
= IsValid
| otherwise
= NotValid (text "A foreign-imported address (via &foo) must have type (Ptr a) or (FunPtr a)")
isFFIPrimArgumentTy :: DynFlags -> Type -> Validity
-- Checks for valid argument type for a 'foreign import prim'
-- Currently they must all be simple unlifted types, or the well-known type
-- Any, which can be used to pass the address to a Haskell object on the heap to
-- the foreign function.
isFFIPrimArgumentTy dflags ty
| isAnyTy ty = IsValid
| otherwise = checkRepTyCon (legalFIPrimArgTyCon dflags) ty
isFFIPrimResultTy :: DynFlags -> Type -> Validity
-- Checks for valid result type for a 'foreign import prim'
-- Currently it must be an unlifted type, including unboxed tuples,
-- or the well-known type Any.
isFFIPrimResultTy dflags ty
| isAnyTy ty = IsValid
| otherwise = checkRepTyCon (legalFIPrimResultTyCon dflags) ty
isFunPtrTy :: Type -> Bool
isFunPtrTy ty
| Just (tc, [_]) <- splitTyConApp_maybe ty
= tc `hasKey` funPtrTyConKey
| otherwise
= False
-- normaliseFfiType gets run before checkRepTyCon, so we don't
-- need to worry about looking through newtypes or type functions
-- here; that's already been taken care of.
checkRepTyCon :: (TyCon -> Validity) -> Type -> Validity
checkRepTyCon check_tc ty
= case splitTyConApp_maybe ty of
Just (tc, tys)
| isNewTyCon tc -> NotValid (hang msg 2 (mk_nt_reason tc tys $$ nt_fix))
| otherwise -> case check_tc tc of
IsValid -> IsValid
NotValid extra -> NotValid (msg $$ extra)
Nothing -> NotValid (quotes (ppr ty) <+> text "is not a data type")
where
msg = quotes (ppr ty) <+> text "cannot be marshalled in a foreign call"
mk_nt_reason tc tys
| null tys = text "because its data constructor is not in scope"
| otherwise = text "because the data constructor for"
<+> quotes (ppr tc) <+> text "is not in scope"
nt_fix = text "Possible fix: import the data constructor to bring it into scope"
{-
Note [Foreign import dynamic]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A dynamic stub must be of the form 'FunPtr ft -> ft' where ft is any foreign
type. Similarly, a wrapper stub must be of the form 'ft -> IO (FunPtr ft)'.
We use isFFIDynTy to check whether a signature is well-formed. For example,
given a (illegal) declaration like:
foreign import ccall "dynamic"
foo :: FunPtr (CDouble -> IO ()) -> CInt -> IO ()
isFFIDynTy will compare the 'FunPtr' type 'CDouble -> IO ()' with the curried
result type 'CInt -> IO ()', and return False, as they are not equal.
----------------------------------------------
These chaps do the work; they are not exported
----------------------------------------------
-}
legalFEArgTyCon :: TyCon -> Validity
legalFEArgTyCon tc
-- It's illegal to make foreign exports that take unboxed
-- arguments. The RTS API currently can't invoke such things. --SDM 7/2000
= boxedMarshalableTyCon tc
legalFIResultTyCon :: DynFlags -> TyCon -> Validity
legalFIResultTyCon dflags tc
| tc == unitTyCon = IsValid
| otherwise = marshalableTyCon dflags tc
legalFEResultTyCon :: TyCon -> Validity
legalFEResultTyCon tc
| tc == unitTyCon = IsValid
| otherwise = boxedMarshalableTyCon tc
legalOutgoingTyCon :: DynFlags -> Safety -> TyCon -> Validity
-- Checks validity of types going from Haskell -> external world
legalOutgoingTyCon dflags _ tc
= marshalableTyCon dflags tc
legalFFITyCon :: TyCon -> Validity
-- True for any TyCon that can possibly be an arg or result of an FFI call
legalFFITyCon tc
| isUnliftedTyCon tc = IsValid
| tc == unitTyCon = IsValid
| otherwise = boxedMarshalableTyCon tc
marshalableTyCon :: DynFlags -> TyCon -> Validity
marshalableTyCon dflags tc
| isUnliftedTyCon tc
, not (isUnboxedTupleTyCon tc)
, case tyConPrimRep tc of -- Note [Marshalling VoidRep]
VoidRep -> False
_ -> True
= validIfUnliftedFFITypes dflags
| otherwise
= boxedMarshalableTyCon tc
boxedMarshalableTyCon :: TyCon -> Validity
boxedMarshalableTyCon tc
| getUnique tc `elem` [ intTyConKey, int8TyConKey, int16TyConKey
, int32TyConKey, int64TyConKey
, wordTyConKey, word8TyConKey, word16TyConKey
, word32TyConKey, word64TyConKey
, floatTyConKey, doubleTyConKey
, ptrTyConKey, funPtrTyConKey
, charTyConKey
, stablePtrTyConKey
, boolTyConKey
]
= IsValid
| otherwise = NotValid empty
legalFIPrimArgTyCon :: DynFlags -> TyCon -> Validity
-- Check args of 'foreign import prim', only allow simple unlifted types.
-- Strictly speaking it is unnecessary to ban unboxed tuples here since
-- currently they're of the wrong kind to use in function args anyway.
legalFIPrimArgTyCon dflags tc
| isUnliftedTyCon tc
, not (isUnboxedTupleTyCon tc)
= validIfUnliftedFFITypes dflags
| otherwise
= NotValid unlifted_only
legalFIPrimResultTyCon :: DynFlags -> TyCon -> Validity
-- Check result type of 'foreign import prim'. Allow simple unlifted
-- types and also unboxed tuple result types '... -> (# , , #)'
legalFIPrimResultTyCon dflags tc
| isUnliftedTyCon tc
, (isUnboxedTupleTyCon tc
|| case tyConPrimRep tc of -- Note [Marshalling VoidRep]
VoidRep -> False
_ -> True)
= validIfUnliftedFFITypes dflags
| otherwise
= NotValid unlifted_only
unlifted_only :: MsgDoc
unlifted_only = text "foreign import prim only accepts simple unlifted types"
validIfUnliftedFFITypes :: DynFlags -> Validity
validIfUnliftedFFITypes dflags
| xopt LangExt.UnliftedFFITypes dflags = IsValid
| otherwise = NotValid (text "To marshal unlifted types, use UnliftedFFITypes")
{-
Note [Marshalling VoidRep]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't treat State# (whose PrimRep is VoidRep) as marshalable.
In turn that means you can't write
foreign import foo :: Int -> State# RealWorld
Reason: the back end falls over with panic "primRepHint:VoidRep";
and there is no compelling reason to permit it
-}
{-
************************************************************************
* *
The "Paterson size" of a type
* *
************************************************************************
-}
{-
Note [Paterson conditions on PredTypes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We are considering whether *class* constraints terminate
(see Note [Paterson conditions]). Precisely, the Paterson conditions
would have us check that "the constraint has fewer constructors and variables
(taken together and counting repetitions) than the head.".
However, we can be a bit more refined by looking at which kind of constraint
this actually is. There are two main tricks:
1. It seems like it should be OK not to count the tuple type constructor
for a PredType like (Show a, Eq a) :: Constraint, since we don't
count the "implicit" tuple in the ThetaType itself.
In fact, the Paterson test just checks *each component* of the top level
ThetaType against the size bound, one at a time. By analogy, it should be
OK to return the size of the *largest* tuple component as the size of the
whole tuple.
2. Once we get into an implicit parameter or equality we
can't get back to a class constraint, so it's safe
to say "size 0". See Trac #4200.
NB: we don't want to detect PredTypes in sizeType (and then call
sizePred on them), or we might get an infinite loop if that PredType
is irreducible. See Trac #5581.
-}
type TypeSize = IntWithInf
sizeType :: Type -> TypeSize
-- Size of a type: the number of variables and constructors
-- Ignore kinds altogether
sizeType = go
where
go ty | Just exp_ty <- coreView ty = go exp_ty
go (TyVarTy {}) = 1
go (TyConApp tc tys)
| isTypeFamilyTyCon tc = infinity -- Type-family applications can
-- expand to any arbitrary size
| otherwise = sizeTypes (filterOutInvisibleTypes tc tys) + 1
go (LitTy {}) = 1
go (FunTy arg res) = go arg + go res + 1
go (AppTy fun arg) = go fun + go arg
go (ForAllTy (TvBndr tv vis) ty)
| isVisibleArgFlag vis = go (tyVarKind tv) + go ty + 1
| otherwise = go ty + 1
go (CastTy ty _) = go ty
go (CoercionTy {}) = 0
sizeTypes :: [Type] -> TypeSize
sizeTypes tys = sum (map sizeType tys)
| {
"content_hash": "1816e3d48d5e5d6bc95716b8f70b87fd",
"timestamp": "",
"source": "github",
"line_count": 2534,
"max_line_length": 107,
"avg_line_length": 39.53235990528808,
"alnum_prop": 0.595827302221113,
"repo_name": "vTurbine/ghc",
"id": "aa8ca712f59d79ce0f03fff05f353e0482d2d9af",
"size": "100175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compiler/typecheck/TcType.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "8740"
},
{
"name": "Batchfile",
"bytes": "394"
},
{
"name": "C",
"bytes": "2616660"
},
{
"name": "C++",
"bytes": "79740"
},
{
"name": "CSS",
"bytes": "984"
},
{
"name": "DTrace",
"bytes": "3887"
},
{
"name": "Emacs Lisp",
"bytes": "734"
},
{
"name": "Gnuplot",
"bytes": "103851"
},
{
"name": "Groff",
"bytes": "3840"
},
{
"name": "HTML",
"bytes": "6144"
},
{
"name": "Haskell",
"bytes": "20228546"
},
{
"name": "Haxe",
"bytes": "218"
},
{
"name": "Logos",
"bytes": "128668"
},
{
"name": "M4",
"bytes": "52384"
},
{
"name": "Makefile",
"bytes": "547782"
},
{
"name": "Objective-C",
"bytes": "22631"
},
{
"name": "Objective-C++",
"bytes": "535"
},
{
"name": "Pascal",
"bytes": "114241"
},
{
"name": "Perl",
"bytes": "23150"
},
{
"name": "Perl6",
"bytes": "42973"
},
{
"name": "PostScript",
"bytes": "63"
},
{
"name": "Python",
"bytes": "120983"
},
{
"name": "Shell",
"bytes": "77815"
},
{
"name": "TeX",
"bytes": "667"
},
{
"name": "Terra",
"bytes": "419413"
},
{
"name": "Yacc",
"bytes": "62735"
}
],
"symlink_target": ""
} |
import styled from 'styled-components';
const TextArea = styled.textarea`
outline: none;
border: 1px dotted #999;
`;
export default TextArea;
| {
"content_hash": "d20af62655def858a934cd706d075760",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 39,
"avg_line_length": 18.5,
"alnum_prop": 0.7364864864864865,
"repo_name": "dwightgunning/dailywriting",
"id": "c464d78eede4faab1e214880c6f87cfe250a7ede",
"size": "148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/containers/WritingPage/Textarea.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1787"
},
{
"name": "HTML",
"bytes": "9662"
},
{
"name": "JavaScript",
"bytes": "81242"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Arbor</title>
<!-- Twitter Cards -->
<meta name="twitter:title" content="Arbor">
<meta name="twitter:site" content="@lukejharmon">
<meta name="twitter:creator" content="@lukejharmon">
<meta name="twitter:card" content="summary_large_image">
<meta name="twitter:image" content="/images/aspen-1600x800.jpg">
<!-- Open Graph -->
<meta property="og:locale" content="en_US">
<meta property="og:type" content="article">
<meta property="og:title" content="Arbor">
<meta property="og:url" content="/">
<meta property="og:site_name" content="Arbor">
<link rel="author" href="https://plus.google.com/+LukeHarmon1"/>
<link rel="canonical" href="/">
<link href="/atom.xml" type="application/atom+xml" rel="alternate" title="Arbor Atom Feed">
<meta name="HandheldFriendly" content="True">
<meta name="MobileOptimized" content="320">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="cleartype" content="on">
<link rel="stylesheet" href="/css/main.css">
<!-- HTML5 Shiv and Media Query Support for IE -->
<!--[if lt IE 9]>
<script src="/js/vendor/html5shiv.min.js"></script>
<script src="/js/vendor/respond.min.js"></script>
<![endif]-->
</head>
<body>
<header id="masthead">
<div class="inner-wrap">
<a href="/" class="site-title">Arbor</a>
<nav role="navigation" class="menu top-menu">
<ul class="menu-item">
<li class="home"><a href="/">Arbor</a></li>
<li><a href="/help/" >Help</a></li>
<li><a href="/usearbor/" >Use Arbor</a></li>
<li><a href="/documentation/" >Docs & tutorials</a></li>
<li><a href="/build/" >Developers</a></li>
<li><a href="/team/" >Contact</a></li>
</ul>
</nav>
</div><!-- /.inner-wrap -->
</header><!-- /.masthead -->
<nav role="navigation" id="js-menu" class="sliding-menu-content">
<h5>Arbor <span>Table of Contents</span></h5>
<ul class="menu-item">
<li>
<a href="/help/">
<img src="/images/400x250.gif" alt="teaser" class="teaser">
<div class="title">Help</div>
<p class="excerpt">Get help</p>
</a>
</li><li>
<a href="/usearbor/">
<img src="/images/400x250.gif" alt="teaser" class="teaser">
<div class="title">Use Arbor</div>
<p class="excerpt">Run Arbor over the web or install it yourself</p>
</a>
</li><li>
<a href="/documentation/">
<img src="/images/400x250.gif" alt="teaser" class="teaser">
<div class="title">Docs & tutorials</div>
<p class="excerpt">Full documentation for functions and workflows</p>
</a>
</li><li>
<a href="/build/">
<img src="/images/400x250.gif" alt="teaser" class="teaser">
<div class="title">Developers</div>
<p class="excerpt">Add to the Arbor code base</p>
</a>
</li><li>
<a href="/team/">
<img src="/images/400x250.gif" alt="teaser" class="teaser">
<div class="title">Contact</div>
<p class="excerpt">Contact us</p>
</a>
</li>
</ul>
</nav>
<button type="button" id="js-menu-trigger" class="sliding-menu-button lines-button x2" role="button" aria-label="Toggle Navigation">
<span class="nav-lines"></span>
</button>
<div id="js-menu-screen" class="menu-screen"></div>
<div class="page-lead" style="background-image:url(/images/aspen-1600x800.jpg)">
<div class="wrap page-lead-content">
<h1>Arbor workflows</h1>
<h2>Visual workflows for phylogenetic comparative methods.</h2>
<span title="Use Arbor to analyze your data">
<a href="/usearbor/" class="btn-inverse-gray"><b>Analyze your data</b></a></span><br>
</div><!-- /.page-lead-content -->
</div><!-- /.page-lead -->
<div id="page-wrapper">
<!--[if lt IE 9]><div class="upgrade notice-warning"><strong>Your browser is quite old!</strong> Why not <a href="http://whatbrowser.org/">upgrade to a newer one</a> to better enjoy this site?</div><![endif]-->
<div id="main" role="main">
<div class="wrap">
<div class="page-title">
<h1></h1>
</div>
<div class="archive-wrap">
<div class="page-content">
<p style="float: left;margin:0 10px 10px 0">
<img src="/images/arbor_logo/arbor_128px.png" width="150px" border="1px" alt="Arbor logo" /></p>
<p>
<br /><br />Arbor is web-based software for carrying out phylogenetic comparative analyses of the tree of life.
<div class="tiles">
<div class="tile">
<h2 class="post-excerpt">Develop tools</h2>
<p class="post-excerpt">Create your own functions and workflows</p>
<a href="/build/" class="btn-inverse-gray">Developers</a>
</div><!-- /.tile -->
<div class="tile">
<h2 class="post-excerpt">Teach</h2>
<p class="post-excerpt">Use Arbor in the classroom</p>
<a href="/arbor-ed/" class="btn-inverse-gray">ArborEd</a>
</div><!-- /.tile -->
<div class="tile">
<h2 class="post-excerpt">Get help</h2>
<p class="post-excerpt">How to use Arbor</p>
<a href="/help/" class="btn-inverse-gray">Documentation</a>
</div><!-- /.tile -->
</div><!-- /.tiles -->
</p>
</div><!-- /.page-content -->
</div class="archive-wrap"><!-- /.archive-wrap -->
</div><!-- /.wrap -->
</div><!-- /#main -->
<footer role="contentinfo" id="site-footer">
<nav role="navigation" class="menu bottom-menu">
<ul class="menu-item">
<li><a href="/" >Home</a></li>
<li><a href="/getting-started/" >Getting Started</a></li>
<li><a href="/about/" >About</a></li>
<li><a href="/team/" >Team</a></li>
</ul>
</nav><!-- /.bottom-menu -->
<p class="copyright">© 2018 <a href="/">Arbor</a> powered by <a href="http://jekyllrb.com">Jekyll</a> + <a href="http://mmistakes.github.io/skinny-bones-jekyll/">Skinny Bones</a>.</p>
</footer>
</div>
<script src="/js/vendor/jquery-1.9.1.min.js"></script>
<script src="/js/main.js"></script>
</body>
</html>
| {
"content_hash": "dae52dcd564c2f435c5195ac50c1fe4d",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 216,
"avg_line_length": 30.101941747572816,
"alnum_prop": 0.5782938235768424,
"repo_name": "arborworkflows/arborworkflows.github.com",
"id": "6f46fba8b0b7c723b1b0191e8d8c833c33f154cb",
"size": "6201",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_site/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "119006"
},
{
"name": "HTML",
"bytes": "944396"
},
{
"name": "JavaScript",
"bytes": "16633"
},
{
"name": "R",
"bytes": "26"
},
{
"name": "Ruby",
"bytes": "2978"
},
{
"name": "Shell",
"bytes": "216"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "542bef1d31f528e1163c983972ec2273",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "f67d8e6c4fc72a5ce657c10dde872ef8bed31cb1",
"size": "202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Solanales/Solanaceae/Solanum/Solanum sciadostylis/ Syn. Cyphomandra sciadostylis hirsuta/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
// Copyright 2021 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import {FilePath} from 'chrome://resources/mojo/mojo/public/mojom/base/file_path.mojom-webui.js';
import {Url} from 'chrome://resources/mojo/url/mojom/url.mojom-webui.js';
import {CurrentWallpaper, GooglePhotosAlbum, GooglePhotosEnablementState, GooglePhotosPhoto, WallpaperCollection, WallpaperImage} from '../personalization_app.mojom-webui.js';
import {DefaultImageSymbol, DisplayableImage, kDefaultImageSymbol} from './constants.js';
/**
* Stores collections and images from backdrop server.
* |images| is a mapping of collection id to the list of images.
*/
export interface BackdropState {
collections: WallpaperCollection[]|null;
images: Record<WallpaperCollection['id'], WallpaperImage[]|null>;
}
/**
* Stores Google Photos state.
* |enabled| is whether the user is allowed to access Google Photos. It is
* undefined only until it has been initialized.
* |albums| is the list of Google Photos albums. It is undefined only until it
* has been initialized, then either null (in error state) or a valid Array.
* |photos| is the list of Google Photos photos. It is undefined only until it
* has been initialized, then either null (in error state) or a valid Array.
* |photosByAlbumId| is the list of Google Photos photos keyed by album id. The
* list of photos for a given album id is undefined only until is has been
* initialized, then either null (in error state) or a valid Array.
*/
export interface GooglePhotosState {
enabled: GooglePhotosEnablementState|undefined;
albums: GooglePhotosAlbum[]|null|undefined;
photos: GooglePhotosPhoto[]|null|undefined;
photosByAlbumId: Record<string, GooglePhotosPhoto[]|null|undefined>;
resumeTokens: {
albums: string|null,
photos: string|null,
photosByAlbumId: Record<string, string|null>,
};
}
/**
* Stores loading state of various components of the app.
* |images| is a mapping of collection id to loading state.
* |local| stores data just for local images on disk.
* |local.data| stores a mapping of FilePath.path string to loading state.
*
* |selected| is a boolean representing the loading state of current wallpaper
* information. This gets complicated when a user rapidly selects multiple
* wallpaper images, or picks a new daily refresh wallpaper. This becomes
* false when a new CurrentWallpaper object is received and the |setImage|
* counter is at 0.
*
* |setImage| is a number representing the number of concurrent requests to set
* current wallpaper information. This can be more than 1 in case a user rapidly
* selects multiple wallpaper options.
*
* |googlePhotos| stores loading state of Google Photos data.
*/
export interface LoadingState {
collections: boolean;
images: Record<WallpaperCollection['id'], boolean>;
local: {
images: boolean,
data: Record<FilePath['path']|DefaultImageSymbol, boolean>,
};
refreshWallpaper: boolean;
selected: boolean;
setImage: number;
googlePhotos: {
enabled: boolean,
albums: boolean,
photos: boolean,
photosByAlbumId: Record<string, boolean>,
};
}
/**
* |images| stores the list of images on local disk. The image in index 0 may be
* a special case for default image thumbnail.
* |data| stores a mapping of image.path to a thumbnail data url. There is also
* a special key to represent the default image thumbnail.
*/
export interface LocalState {
images: Array<FilePath|DefaultImageSymbol>|null;
data: Record<FilePath['path']|DefaultImageSymbol, Url>;
}
export enum DailyRefreshType {
GOOGLE_PHOTOS = 'daily_refresh_google_photos',
BACKDROP = 'daily_refresh_backdrop',
}
/**
* |id| stores either a Backdrop collection id or a Google Photos album id.
* |type| stores which type of daily refresh and type of id this is.
*/
export interface DailyRefreshState {
id: string;
type: DailyRefreshType;
}
export interface WallpaperState {
backdrop: BackdropState;
loading: LoadingState;
local: LocalState;
currentSelected: CurrentWallpaper|null;
pendingSelected: DisplayableImage|null;
dailyRefresh: DailyRefreshState|null;
fullscreen: boolean;
googlePhotos: GooglePhotosState;
}
export function emptyState(): WallpaperState {
return {
backdrop: {collections: null, images: {}},
loading: {
collections: true,
images: {},
local: {images: false, data: {[kDefaultImageSymbol]: false}},
refreshWallpaper: false,
selected: false,
setImage: 0,
googlePhotos: {
enabled: false,
albums: false,
photos: false,
photosByAlbumId: {},
},
},
local: {images: null, data: {[kDefaultImageSymbol]: {url: ''}}},
currentSelected: null,
pendingSelected: null,
dailyRefresh: null,
fullscreen: false,
googlePhotos: {
enabled: undefined,
albums: undefined,
photos: undefined,
photosByAlbumId: {},
resumeTokens: {albums: null, photos: null, photosByAlbumId: {}},
},
};
}
| {
"content_hash": "aa8f8d2245c1dd308653dbc32fbe6aa7",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 175,
"avg_line_length": 34.80821917808219,
"alnum_prop": 0.7237308146399055,
"repo_name": "chromium/chromium",
"id": "b64205cf807571b233580af950a0dfda0bbe8954",
"size": "5082",
"binary": false,
"copies": "5",
"ref": "refs/heads/main",
"path": "ash/webui/personalization_app/resources/js/wallpaper/wallpaper_state.ts",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
package org.orecruncher.dsurround.client.handlers.fog;
import javax.annotation.Nonnull;
import org.orecruncher.dsurround.client.handlers.EnvironStateHandler.EnvironState;
import org.orecruncher.dsurround.client.weather.Weather;
import org.orecruncher.dsurround.registry.biome.BiomeInfo;
import org.orecruncher.dsurround.registry.biome.BiomeUtil;
import org.orecruncher.lib.chunk.ClientChunkCache;
import org.orecruncher.lib.chunk.IBlockAccessEx;
import org.orecruncher.lib.math.MathStuff;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.util.math.BlockPos;
import net.minecraftforge.client.event.EntityViewRenderEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
/**
* Scans the biome area around the player to determine the fog parameters.
*/
@SideOnly(Side.CLIENT)
public class BiomeFogRangeCalculator extends VanillaFogRangeCalculator {
protected static final int DISTANCE = 20;
protected static final float DUST_FOG_IMPACT = 0.9F;
private static class Context {
public int posX;
public int posZ;
public float rain;
public float lastFarPlane;
public boolean doScan = true;
public final FogResult cached = new FogResult();
public boolean returnCached(final int pX, final int pZ, final float r,
@Nonnull final EntityViewRenderEvent.RenderFogEvent event) {
return !this.doScan && pX == this.posX && pZ == this.posZ && r == this.rain
&& this.lastFarPlane == event.getFarPlaneDistance() && this.cached.isValid(event);
}
}
protected final Context[] context = { new Context(), new Context() };
public BiomeFogRangeCalculator() {
}
@Override
@Nonnull
public String getName() {
return "BiomeFogRangeCalculator";
}
@Override
@Nonnull
public FogResult calculate(@Nonnull final EntityViewRenderEvent.RenderFogEvent event) {
final EntityLivingBase player = EnvironState.getPlayer();
final IBlockAccessEx provider = ClientChunkCache.instance();
final int playerX = MathStuff.floor(player.posX);
final int playerZ = MathStuff.floor(player.posZ);
final float rainStr = Weather.getIntensityLevel();
final Context ctx = this.context[event.getFogMode() == -1 ? 0 : 1];
if (ctx.returnCached(playerX, playerZ, rainStr, event))
return ctx.cached;
final BlockPos.MutableBlockPos pos = new BlockPos.MutableBlockPos(0, 0, 0);
float fpDistanceBiomeFog = 0F;
float weightBiomeFog = 0;
final boolean isRaining = Weather.isRaining();
ctx.rain = rainStr;
ctx.doScan = false;
for (int x = -DISTANCE; x <= DISTANCE; ++x) {
for (int z = -DISTANCE; z <= DISTANCE; ++z) {
pos.setPos(playerX + x, 0, playerZ + z);
// If the chunk is not available doScan will be set true. This will force
// another scan on the next tick.
ctx.doScan = ctx.doScan | !provider.isAvailable(pos);
final BiomeInfo biome = BiomeUtil.getBiomeData(provider.getBiome(pos));
float distancePart = 1F;
final float weightPart = 1;
if (isRaining && biome.getHasDust()) {
distancePart = 1F - DUST_FOG_IMPACT * rainStr;
} else if (biome.getHasFog()) {
distancePart = biome.getFogDensity();
}
fpDistanceBiomeFog += distancePart;
weightBiomeFog += weightPart;
}
}
final float weightMixed = (DISTANCE * 2 + 1) * (DISTANCE * 2 + 1);
final float weightDefault = weightMixed - weightBiomeFog;
final float fpDistanceBiomeFogAvg = (weightBiomeFog == 0) ? 0 : fpDistanceBiomeFog / weightBiomeFog;
final float rangeConst = Math.max(240, event.getFarPlaneDistance() - 16);
float farPlaneDistance = (fpDistanceBiomeFog * rangeConst + event.getFarPlaneDistance() * weightDefault)
/ weightMixed;
final float farPlaneDistanceScaleBiome = (0.1f * (1 - fpDistanceBiomeFogAvg) + 0.75f * fpDistanceBiomeFogAvg);
final float farPlaneDistanceScale = (farPlaneDistanceScaleBiome * weightBiomeFog + 0.75f * weightDefault)
/ weightMixed;
ctx.posX = playerX;
ctx.posZ = playerZ;
ctx.lastFarPlane = event.getFarPlaneDistance();
farPlaneDistance = Math.min(farPlaneDistance, event.getFarPlaneDistance());
ctx.cached.set(event.getFogMode(), farPlaneDistance, farPlaneDistanceScale);
return ctx.cached;
}
}
| {
"content_hash": "688879bd83c2319753a422b546d02d4f",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 112,
"avg_line_length": 33.814516129032256,
"alnum_prop": 0.7421893632244216,
"repo_name": "OreCruncher/DynamicSurroundings",
"id": "7c316893b6414c659df71bcdc4f8a969ecffbd82",
"size": "5390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/orecruncher/dsurround/client/handlers/fog/BiomeFogRangeCalculator.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "756"
},
{
"name": "CSS",
"bytes": "259"
},
{
"name": "GLSL",
"bytes": "5205"
},
{
"name": "Java",
"bytes": "1318082"
},
{
"name": "Makefile",
"bytes": "584"
},
{
"name": "Python",
"bytes": "5517"
}
],
"symlink_target": ""
} |
(function () {
'use strict';
function adminService(dataService) {
var ADMIN_URL = 'api/Admin';
function getAdminData(tweetId) {
return dataService.get(ADMIN_URL + '/GetAdminData');
}
return {
getAdminData: getAdminData
};
}
angular.module('myApp.services')
.factory('adminService', ['dataService', adminService]);
}()); | {
"content_hash": "4449580632abf91ef2287400013d432e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 64,
"avg_line_length": 21.736842105263158,
"alnum_prop": 0.5617433414043583,
"repo_name": "luboganchev/TwitterBackup",
"id": "5690a6360df56a95e1ef8658a41e89ee74def8f4",
"size": "415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TwitterBackup/TwitterBackup.Client/js/admin/adminService.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "111"
},
{
"name": "C#",
"bytes": "162800"
},
{
"name": "CSS",
"bytes": "18206"
},
{
"name": "HTML",
"bytes": "18436"
},
{
"name": "JavaScript",
"bytes": "30051"
}
],
"symlink_target": ""
} |
from .._explainer import Explainer
import numpy as np
from sklearn.model_selection import train_test_split
class Maple(Explainer):
""" Simply wraps MAPLE into the common SHAP interface.
Parameters
----------
model : function
User supplied function that takes a matrix of samples (# samples x # features) and
computes a the output of the model for those samples. The output can be a vector
(# samples) or a matrix (# samples x # model outputs).
data : numpy.array
The background dataset.
"""
def __init__(self, model, data):
self.model = model
if str(type(data)).endswith("pandas.core.frame.DataFrame'>"):
data = data.values
self.data = data
self.data_mean = self.data.mean(0)
out = self.model(data)
if len(out.shape) == 1:
self.out_dim = 1
self.flat_out = True
else:
self.out_dim = out.shape[1]
self.flat_out = False
X_train, X_valid, y_train, y_valid = train_test_split(data, out, test_size=0.2, random_state=0)
self.explainer = MAPLE(X_train, y_train, X_valid, y_valid)
def attributions(self, X, multiply_by_input=False):
""" Compute the MAPLE coef attributions.
Parameters
----------
multiply_by_input : bool
If true, this multiplies the learned coeffients by the mean-centered input. This makes these
values roughly comparable to SHAP values.
"""
if str(type(X)).endswith("pandas.core.frame.DataFrame'>"):
X = X.values
out = [np.zeros(X.shape) for j in range(self.out_dim)]
for i in range(X.shape[0]):
exp = self.explainer.explain(X[i])["coefs"]
out[0][i,:] = exp[1:]
if multiply_by_input:
out[0][i,:] = out[0][i,:] * (X[i] - self.data_mean)
return out[0] if self.flat_out else out
class TreeMaple(Explainer):
""" Simply tree MAPLE into the common SHAP interface.
Parameters
----------
model : function
User supplied function that takes a matrix of samples (# samples x # features) and
computes a the output of the model for those samples. The output can be a vector
(# samples) or a matrix (# samples x # model outputs).
data : numpy.array
The background dataset.
"""
def __init__(self, model, data):
self.model = model
if str(type(model)).endswith("sklearn.ensemble.gradient_boosting.GradientBoostingRegressor'>"):
fe_type = "gbdt"
# elif str(type(model)).endswith("sklearn.tree.tree.DecisionTreeClassifier'>"):
# pass
elif str(type(model)).endswith("sklearn.ensemble.forest.RandomForestRegressor'>"):
fe_type = "rf"
# elif str(type(model)).endswith("sklearn.ensemble.forest.RandomForestClassifier'>"):
# pass
# elif str(type(model)).endswith("xgboost.sklearn.XGBRegressor'>"):
# pass
# elif str(type(model)).endswith("xgboost.sklearn.XGBClassifier'>"):
# pass
else:
raise NotImplementedError("The passed model is not yet supported by TreeMapleExplainer: " + str(type(model)))
if str(type(data)).endswith("pandas.core.frame.DataFrame'>"):
data = data.values
self.data = data
self.data_mean = self.data.mean(0)
out = self.model.predict(data[0:1])
if len(out.shape) == 1:
self.out_dim = 1
self.flat_out = True
else:
self.out_dim = self.model.predict(data[0:1]).shape[1]
self.flat_out = False
#_, X_valid, _, y_valid = train_test_split(data, self.model.predict(data), test_size=0.2, random_state=0)
preds = self.model.predict(data)
self.explainer = MAPLE(data, preds, data, preds, fe=self.model, fe_type=fe_type)
def attributions(self, X, multiply_by_input=False):
""" Compute the MAPLE coef attributions.
Parameters
----------
multiply_by_input : bool
If true, this multiplies the learned coeffients by the mean-centered input. This makes these
values roughly comparable to SHAP values.
"""
if str(type(X)).endswith("pandas.core.frame.DataFrame'>"):
X = X.values
out = [np.zeros(X.shape) for j in range(self.out_dim)]
for i in range(X.shape[0]):
exp = self.explainer.explain(X[i])["coefs"]
out[0][i,:] = exp[1:]
if multiply_by_input:
out[0][i,:] = out[0][i,:] * (X[i] - self.data_mean)
return out[0] if self.flat_out else out
#################################################
# The code below was authored by Gregory Plumb and is
# from: https://github.com/GDPlumb/MAPLE/blob/master/Code/MAPLE.py
# It has by copied here to allow for benchmark comparisions. Please see
# the original repo for the latest version, supporting material, and citations.
#################################################
# Notes:
# - Assumes any required data normalization has already been done
# - Can pass Y (desired response) instead of MR (model fit to Y) to make fitting MAPLE to datasets easy
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.linear_model import Ridge
from sklearn.metrics import mean_squared_error
import numpy as np
class MAPLE:
def __init__(self, X_train, MR_train, X_val, MR_val, fe_type = "rf", fe=None, n_estimators = 200, max_features = 0.5, min_samples_leaf = 10, regularization = 0.001):
# Features and the target model response
self.X_train = X_train
self.MR_train = MR_train
self.X_val = X_val
self.MR_val = MR_val
# Forest Ensemble Parameters
self.n_estimators = n_estimators
self.max_features = max_features
self.min_samples_leaf = min_samples_leaf
# Local Linear Model Parameters
self.regularization = regularization
# Data parameters
num_features = X_train.shape[1]
self.num_features = num_features
num_train = X_train.shape[0]
self.num_train = num_train
num_val = X_val.shape[0]
# Fit a Forest Ensemble to the model response
if fe is None:
if fe_type == "rf":
fe = RandomForestRegressor(n_estimators = n_estimators, min_samples_leaf = min_samples_leaf, max_features = max_features)
elif fe_type == "gbrt":
fe = GradientBoostingRegressor(n_estimators = n_estimators, min_samples_leaf = min_samples_leaf, max_features = max_features, max_depth = None)
else:
print("Unknown FE type ", fe)
import sys
sys.exit(0)
fe.fit(X_train, MR_train)
else:
self.n_estimators = n_estimators = len(fe.estimators_)
self.fe = fe
train_leaf_ids = fe.apply(X_train)
self.train_leaf_ids = train_leaf_ids
val_leaf_ids_list = fe.apply(X_val)
# Compute the feature importances: Non-normalized @ Root
scores = np.zeros(num_features)
if fe_type == "rf":
for i in range(n_estimators):
splits = fe[i].tree_.feature #-2 indicates leaf, index 0 is root
if splits[0] != -2:
scores[splits[0]] += fe[i].tree_.impurity[0] #impurity reduction not normalized per tree
elif fe_type == "gbrt":
for i in range(n_estimators):
splits = fe[i, 0].tree_.feature #-2 indicates leaf, index 0 is root
if splits[0] != -2:
scores[splits[0]] += fe[i, 0].tree_.impurity[0] #impurity reduction not normalized per tree
self.feature_scores = scores
mostImpFeats = np.argsort(-scores)
# Find the number of features to use for MAPLE
retain_best = 0
rmse_best = np.inf
for retain in range(1, num_features + 1):
# Drop less important features for local regression
X_train_p = np.delete(X_train, mostImpFeats[retain:], axis = 1)
X_val_p = np.delete(X_val, mostImpFeats[retain:], axis = 1)
lr_predictions = np.empty([num_val], dtype=float)
for i in range(num_val):
weights = self.training_point_weights(val_leaf_ids_list[i])
# Local linear model
lr_model = Ridge(alpha=regularization)
lr_model.fit(X_train_p, MR_train, weights)
lr_predictions[i] = lr_model.predict(X_val_p[i].reshape(1, -1))
rmse_curr = np.sqrt(mean_squared_error(lr_predictions, MR_val))
if rmse_curr < rmse_best:
rmse_best = rmse_curr
retain_best = retain
self.retain = retain_best
self.X = np.delete(X_train, mostImpFeats[retain_best:], axis = 1)
def training_point_weights(self, instance_leaf_ids):
weights = np.zeros(self.num_train)
for i in range(self.n_estimators):
# Get the PNNs for each tree (ones with the same leaf_id)
PNNs_Leaf_Node = np.where(self.train_leaf_ids[:, i] == instance_leaf_ids[i])[0]
if len(PNNs_Leaf_Node) > 0: # SML: added this to fix degenerate cases
weights[PNNs_Leaf_Node] += 1.0 / len(PNNs_Leaf_Node)
return weights
def explain(self, x):
x = x.reshape(1, -1)
mostImpFeats = np.argsort(-self.feature_scores)
x_p = np.delete(x, mostImpFeats[self.retain:], axis = 1)
curr_leaf_ids = self.fe.apply(x)[0]
weights = self.training_point_weights(curr_leaf_ids)
# Local linear model
lr_model = Ridge(alpha = self.regularization)
lr_model.fit(self.X, self.MR_train, weights)
# Get the model coeficients
coefs = np.zeros(self.num_features + 1)
coefs[0] = lr_model.intercept_
coefs[np.sort(mostImpFeats[0:self.retain]) + 1] = lr_model.coef_
# Get the prediction at this point
prediction = lr_model.predict(x_p.reshape(1, -1))
out = {}
out["weights"] = weights
out["coefs"] = coefs
out["pred"] = prediction
return out
def predict(self, X):
n = X.shape[0]
pred = np.zeros(n)
for i in range(n):
exp = self.explain(X[i, :])
pred[i] = exp["pred"][0]
return pred
# Make the predictions based on the forest ensemble (either random forest or gradient boosted regression tree) instead of MAPLE
def predict_fe(self, X):
return self.fe.predict(X)
# Make the predictions based on SILO (no feature selection) instead of MAPLE
def predict_silo(self, X):
n = X.shape[0]
pred = np.zeros(n)
for i in range(n): #The contents of this inner loop are similar to explain(): doesn't use the features selected by MAPLE or return as much information
x = X[i, :].reshape(1, -1)
curr_leaf_ids = self.fe.apply(x)[0]
weights = self.training_point_weights(curr_leaf_ids)
# Local linear model
lr_model = Ridge(alpha = self.regularization)
lr_model.fit(self.X_train, self.MR_train, weights)
pred[i] = lr_model.predict(x)[0]
return pred
| {
"content_hash": "4bfaec124e23f96d6e7b02a43e73e447",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 169,
"avg_line_length": 37.54276315789474,
"alnum_prop": 0.5846841321300271,
"repo_name": "slundberg/shap",
"id": "e792b158ac8254fd39c533466c82b2bd7c67ea67",
"size": "11413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shap/explainers/other/_maple.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C++",
"bytes": "154976"
},
{
"name": "Cuda",
"bytes": "14595"
},
{
"name": "HTML",
"bytes": "2393"
},
{
"name": "JavaScript",
"bytes": "55236"
},
{
"name": "Jupyter Notebook",
"bytes": "138364033"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Python",
"bytes": "1295917"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE fontscolors PUBLIC "-//NetBeans//DTD Editor Fonts and Colors settings 1.1//EN" "http://www.netbeans.org/dtds/EditorFontsColors-1_1.dtd">
<fontscolors>
<fontcolor default="keyword" foreColor="ff009900" name="tagparam"/>
<fontcolor default="separator" name="flag"/>
<fontcolor default="error" name="error"/>
<fontcolor default="keyword" name="tag"/>
<fontcolor default="string" name="string"/>
<fontcolor default="whitespace" name="whitespace"/>
<fontcolor default="number" name="number"/>
<fontcolor default="keyword" name="directive">
<font style="bold"/>
</fontcolor>
<fontcolor default="comment" name="comment"/>
<fontcolor default="identifier" foreColor="ff9933cc" name="variable"/>
<fontcolor default="keyword" name="directiveparam"/>
</fontscolors>
| {
"content_hash": "588df735e04dd56a871ea4f0a90d301c",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 146,
"avg_line_length": 50.529411764705884,
"alnum_prop": 0.6915017462165308,
"repo_name": "cameronoxley/dark-crayon-netbeans-theme",
"id": "f02acebd87a83a43647f0bcc200150690a569c9e",
"size": "859",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "theme/config/Editors/text/x-apache-conf/FontsColors/Dark Crayon/org-netbeans-modules-editor-settings-CustomFontsColors-tokenColorings.xml",
"mode": "33261",
"license": "mit",
"language": [],
"symlink_target": ""
} |
sap.ui.require(["sap/ui/core/routing/History"], function(History) {
"use strict";
var oEvent = document.createEvent('Event');
oEvent.initEvent("historyReady", true, true);
oEvent._bUsePushStateInFrame = History._bUsePushState;
// inform the parent frame
window.parent.document.dispatchEvent(oEvent);
});
| {
"content_hash": "fee8ef5491c4a4cb25fa4ab8037004de",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 67,
"avg_line_length": 31.2,
"alnum_prop": 0.7435897435897436,
"repo_name": "SAP/openui5",
"id": "a80eb3d209aa98a993f10901f3df4920d2819b32",
"size": "331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sap.ui.core/test/sap/ui/core/qunit/testdata/routing/HistoryIFrame.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "294216"
},
{
"name": "Gherkin",
"bytes": "17201"
},
{
"name": "HTML",
"bytes": "6443688"
},
{
"name": "Java",
"bytes": "83398"
},
{
"name": "JavaScript",
"bytes": "109546491"
},
{
"name": "Less",
"bytes": "8741757"
},
{
"name": "TypeScript",
"bytes": "20918"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using Amadiere.Blog;
using Amadiere.Blog.Repositories;
using Amadiere.Website.ViewModels.Blog;
using Microsoft.AspNetCore.Mvc;
namespace Amadiere.Website.Controllers
{
public class BlogController : Microsoft.AspNetCore.Mvc.Controller
{
private IArticleReader Articles { get; set; }
public BlogController(IArticleReader articleReader)
{
Articles = articleReader;
}
public IActionResult Index()
{
var viewModel = new IndexViewModel();
viewModel.Articles = Articles.GetMostRecent().Select(x => new BlogViewItem(x));
return View(viewModel);
}
public IActionResult Article(int? year, int? month, string slug)
{
if (!year.HasValue || !month.HasValue || string.IsNullOrEmpty(slug))
return NotFound();
var article = Articles.Get(year.Value, month.Value, slug);
if (article == null)
return NotFound();
var viewModel = new ArticleViewModel();
viewModel.Article = new BlogViewItem(article);
return View(viewModel);
}
public IActionResult All()
{
var viewModel = new AllViewModel();
viewModel.Articles = Articles.GetAll().Select(x => new BlogViewItem(x));
return View(viewModel);
}
}
} | {
"content_hash": "0571f5ce7301cbfa79e69a8c57d55780",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 91,
"avg_line_length": 28.50980392156863,
"alnum_prop": 0.609353507565337,
"repo_name": "Amadiere/AmadiereDotCom",
"id": "62ad94cf271a970664b6b01d1dcb881702e5c3bd",
"size": "1454",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Website/Controllers/BlogController.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "30434"
},
{
"name": "CSS",
"bytes": "18733"
},
{
"name": "HTML",
"bytes": "132573"
},
{
"name": "JavaScript",
"bytes": "34"
},
{
"name": "SCSS",
"bytes": "19336"
}
],
"symlink_target": ""
} |
package org.newdawn.slick.opengl.pbuffer;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.Pbuffer;
import org.lwjgl.opengl.PixelFormat;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Image;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.opengl.SlickCallable;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.TextureImpl;
import org.newdawn.slick.opengl.InternalTextureLoader;
import org.newdawn.slick.util.Log;
/**
* A graphics implementation that renders to a PBuffer using a unique context, i.e.
* without render to texture
*
* @author kevin
*/
public class PBufferUniqueGraphics extends Graphics {
/** The pbuffer we're going to render to */
private Pbuffer pbuffer;
/** The image we're we're sort of rendering to */
private Image image;
/**
* Create a new graphics context around a pbuffer
*
* @param image The image we're rendering to
* @throws SlickException Indicates a failure to use pbuffers
*/
public PBufferUniqueGraphics(Image image) throws SlickException {
super(image.getTexture().getTextureWidth(), image.getTexture().getTextureHeight());
this.image = image;
Log.debug("Creating pbuffer(unique) "+image.getWidth()+"x"+image.getHeight());
if ((Pbuffer.getCapabilities() & Pbuffer.PBUFFER_SUPPORTED) == 0) {
throw new SlickException("Your OpenGL card does not support PBuffers and hence can't handle the dynamic images required for this application.");
}
init();
}
/**
* Initialise the PBuffer that will be used to render to
*
* @throws SlickException
*/
private void init() throws SlickException {
try {
Texture tex = InternalTextureLoader.get().createTexture(image.getWidth(), image.getHeight());
pbuffer = new Pbuffer(screenWidth, screenHeight, new PixelFormat(8, 0, 0), null, null);
// Initialise state of the pbuffer context.
pbuffer.makeCurrent();
initGL();
image.draw(0,0);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, tex.getTextureID());
GL11.glCopyTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA, 0, 0,
tex.getTextureWidth(),
tex.getTextureHeight(), 0);
image.setTexture(tex);
Display.makeCurrent();
} catch (Exception e) {
Log.error(e);
throw new SlickException("Failed to create PBuffer for dynamic image. OpenGL driver failure?");
}
}
/**
* @see org.newdawn.slick.Graphics#disable()
*/
protected void disable() {
// Bind the texture after rendering.
GL11.glBindTexture(GL11.GL_TEXTURE_2D, image.getTexture().getTextureID());
GL11.glCopyTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA, 0, 0,
image.getTexture().getTextureWidth(),
image.getTexture().getTextureHeight(), 0);
try {
Display.makeCurrent();
} catch (LWJGLException e) {
Log.error(e);
}
SlickCallable.leaveSafeBlock();
}
/**
* @see org.newdawn.slick.Graphics#enable()
*/
protected void enable() {
SlickCallable.enterSafeBlock();
try {
if (pbuffer.isBufferLost()) {
pbuffer.destroy();
init();
}
pbuffer.makeCurrent();
} catch (Exception e) {
Log.error("Failed to recreate the PBuffer");
Log.error(e);
throw new RuntimeException(e);
}
// Put the renderer contents to the texture
TextureImpl.unbind();
initGL();
}
/**
* Initialise the GL context
*/
protected void initGL() {
GL11.glEnable(GL11.GL_TEXTURE_2D);
GL11.glShadeModel(GL11.GL_SMOOTH);
GL11.glDisable(GL11.GL_DEPTH_TEST);
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GL11.glClearDepth(1);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
GL11.glViewport(0,0,screenWidth,screenHeight);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
GL11.glLoadIdentity();
enterOrtho();
}
/**
* Enter the orthographic mode
*/
protected void enterOrtho() {
GL11.glMatrixMode(GL11.GL_PROJECTION);
GL11.glLoadIdentity();
GL11.glOrtho(0, screenWidth, 0, screenHeight, 1, -1);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
}
/**
* @see org.newdawn.slick.Graphics#destroy()
*/
public void destroy() {
super.destroy();
pbuffer.destroy();
}
/**
* @see org.newdawn.slick.Graphics#flush()
*/
public void flush() {
super.flush();
image.flushPixelData();
}
}
| {
"content_hash": "0e64b9a5fbfa543f2fc05e793a452016",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 147,
"avg_line_length": 28.017964071856287,
"alnum_prop": 0.6520624064971148,
"repo_name": "SenshiSentou/SourceFight",
"id": "c74e0f9d946ec5222311648bc595df908bb69fa1",
"size": "4679",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "slick_dev/tags/Slick0.3/src/org/newdawn/slick/opengl/pbuffer/PBufferUniqueGraphics.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "2377"
},
{
"name": "C++",
"bytes": "1"
},
{
"name": "CSS",
"bytes": "2782"
},
{
"name": "GAP",
"bytes": "24471"
},
{
"name": "Java",
"bytes": "11819037"
},
{
"name": "JavaScript",
"bytes": "22036"
},
{
"name": "Scala",
"bytes": "76"
}
],
"symlink_target": ""
} |
package com.github.Kraken3.AFKPGC;
/**
* Lightweight warning message concerning kick nearness.
*/
class Warning{
public int time;
public String message;
public Warning (int time, String message){
this.time = time;
this.message = message;
}
@Override
public String toString() {
return "[" + time + "]: " + message;
}
} | {
"content_hash": "c1af72fe526844d837f5434afa0b5375",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 58,
"avg_line_length": 19.833333333333332,
"alnum_prop": 0.6386554621848739,
"repo_name": "Maxopoly/AFK-Player-GC",
"id": "db304e67959d15ce61c607dbfcc124b1701596ba",
"size": "357",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/com/github/Kraken3/AFKPGC/Warning.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "79725"
}
],
"symlink_target": ""
} |
package com.wii.vmail.pojo;
import java.io.Serializable;
public class MailAddress implements Serializable {
private static final long serialVersionUID = 1L;
private AddressType mMailType;
private String mMailAddr;
public MailAddress(AddressType pMailType, String pMailAddr) {
this.mMailAddr = pMailAddr;
this.mMailType = pMailType;
}
public AddressType getMailType() {
return this.mMailType;
}
public String getMailAddress() {
return this.mMailAddr;
}
}
| {
"content_hash": "4c488e19d7fe993d076242011ea62c1c",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 65,
"avg_line_length": 25.727272727272727,
"alnum_prop": 0.6484098939929329,
"repo_name": "moreus/vMail",
"id": "d2cf778eb137df8e9759c6308a26d6f97d3ee552",
"size": "566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/wii/vmail/pojo/MailAddress.java",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "819183"
},
{
"name": "PHP",
"bytes": "44483"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android" >
<item
android:id="@+id/logoutFacebook"
android:icon="@drawable/settings"
android:title="@string/logout_facebook"/>
</menu> | {
"content_hash": "eb2a56f345d65dcf6288a18e8658d0b2",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 66,
"avg_line_length": 28.666666666666668,
"alnum_prop": 0.6434108527131783,
"repo_name": "privly/privly-android",
"id": "eeb7b96aca27c8a672efef76b236f1c9e97b712d",
"size": "258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/menu/menu_layout_slistusers.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "173483"
}
],
"symlink_target": ""
} |
<?xml version="1.0"?>
<document>
<properties>
<title>Modello StAX Plugin</title>
<author email="hboutemy_AT_apache_DOT_org">Hervé Boutemy</author>
</properties>
<body>
<section name="Modello StAX Plugin">
<p>Modello StAX Plugin generates XML readers and writers based on
<a href="http://docs.oracle.com/javase/6/docs/api/javax/xml/stream/package-summary.html">StAX API</a>,
plus reader delegates to be able to read multiple model versions.</p>
<p>Notice: DOM content type can be represented either as
<a href="http://plexus.codehaus.org/plexus-utils/apidocs/org/codehaus/plexus/util/xml/Xpp3Dom.html">plexus-utils' Xpp3Dom</a>
or, since Modello 1.6, standard
<a href="http://docs.oracle.com/javase/1.4.2/docs/api/org/w3c/dom/Element.html">org.w3c.dom.Element</a> objects</p>
<subsection name="stax-reader">
<p><code>stax-reader</code> generator creates
<code><i>my.model.package</i><b>.io.stax.</b><i>ModelName</i><b>StaxReader</b></code> class with following
public methods:
</p>
<ul>
<li><code>public <i>RootClass</i> ( Reader reader, boolean strict )<br/>
    throws IOException, XMLStreamException</code></li>
<li><code>public <i>RootClass</i> read( Reader reader )<br/>
    throws IOException, XMLStreamException</code></li>
<li><code>public <i>RootClass</i> read( String filePath, boolean strict )<br/>
    throws IOException, XMLStreamException</code></li>
<li><code>public <i>RootClass</i> read( String filePath )<br/>
    throws IOException, XMLStreamException</code></li>
</ul>
<p>In addition, if multiple model reader versions are generated (each in its own package), it creates a delegate
<code><i>my.model.package</i><b>.io.xpp3.</b><i>ModelName</i><b>StaxReaderDelegate</b></code> class with
following public methods:
</p>
<ul>
<li><code>public Object read( File f, boolean strict )<br/>
    throws IOException, XMLStreamException</code></li>
<li><code>public Object read( File f )<br/>
    throws IOException, XMLStreamException</code></li>
</ul>
<p>Depending on the model version found in the XML content, the returned <code>Object</code> will be of the right
version package.
</p>
</subsection>
<subsection name="stax-writer">
<p><code>stax-writer</code> generator creates
<code><i>my.model.package</i><b>.io.stax.</b><i>ModelName</i><b>StaxWriter</b></code> class with following
public methods:
</p>
<ul>
<li><code>public void write( Writer writer, <i>RootClass</i> root )<br/>
    throws IOException, XMLStreamException</code></li>
</ul>
</subsection>
</section>
</body>
</document>
| {
"content_hash": "1d34ec17387ac91f416fa87bb66730af",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 131,
"avg_line_length": 40.986486486486484,
"alnum_prop": 0.6254533465215958,
"repo_name": "codehaus-plexus/modello",
"id": "c538f82c572f00cefe03d35bc1553d3ddff5b50e",
"size": "3034",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modello-plugins/modello-plugin-stax/src/site/xdoc/index.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "82"
},
{
"name": "Java",
"bytes": "1610244"
}
],
"symlink_target": ""
} |
export const replaceMaterialLinks = (markdown: string) => {
return markdown.replace(
/\(\/(guides|customization|getting-started|discover-more)\/([^)]*)\)/gm,
'(/material-ui/$1/$2)',
);
};
export const replaceComponentLinks = (markdown: string) => {
return markdown
.replace(/\(\/components\/data-grid([^)]*)\)/gm, '(/x/react-data-grid$1)')
.replace(
/\(\/components\/((icons|material-icons|transitions|pickers|about-the-lab)\/?[^)]*)\)/gm,
'(/material-ui/$1)',
)
.replace(/\(\/components\/(?!tabs|breadcrumbs)([^)]*)\)/gm, '(/material-ui/react-$1)')
.replace(/\(\/material-ui\/(react-[-a-z]+)(x|ch)es(\/|#)([^)]*)\)/gm, '(/material-ui/$1$2$3$4)')
.replace(/\(\/material-ui\/(react-[-a-z]+)(x|ch)es"/gm, '(/material-ui/$1$2)')
.replace(
/\(\/material-ui\/(?!react-tabs|react-breadcrumbs)(react-[-a-z]+)s(\/|#)([^)]*)\)/gm,
'(/material-ui/$1$2$3)',
)
.replace(
/\(\/material-ui\/(?!react-tabs|react-breadcrumbs)(react-[-a-z]+)s"/gm,
'(/material-ui/$1)',
)
.replace(/react-trap-focu/gm, 'react-trap-focus')
.replace(/react-trap-focuss/gm, 'react-trap-focus')
.replace(/react-progres/gm, 'react-progress')
.replace(/react-progresss/gm, 'react-progress')
.replace(/\(\/components\/(tabs|breadcrumbs)([^)]*)\)/gm, '(/material-ui/react-$1$2)');
};
export const replaceAPILinks = (markdown: string) => {
return markdown
.replace(/\(\/api\/data-grid([^)]*)\)/gm, '(/x/api/data-grid$1)')
.replace(/\(\/api\/([^"/]+-unstyled)([^)]*)\)/gm, '(/base/api/$1$2)')
.replace(
/\(\/api\/(trap-focus|click-away-listener|no-ssr|portal|textarea-autosize)([^)]*)\)/gm,
'(/base/api/$1$2)',
)
.replace(
/\(\/api\/(loading-button|tab-list|tab-panel|date-picker|date-time-picker|time-picker|calendar-picker|calendar-picker-skeleton|desktop-picker|mobile-date-picker|month-picker|pickers-day|static-date-picker|year-picker|masonry|timeline|timeline-connector|timeline-content|timeline-dot|timeline-item|timeline-opposite-content|timeline-separator|unstable-trap-focus|tree-item|tree-view)([^)]*)\)/gm,
'(/material-ui/api/$1$2)',
)
.replace(/\(\/api\/([^)]*)\)/gm, '(/material-ui/api/$1)');
};
const replaceStylesLinks = (markdown: string) => {
return markdown.replace(/\(\/styles\/([^)]*)\)/gm, '(/system/styles/$1)');
};
export default function replaceMarkdownLinks(markdown: string) {
return replaceStylesLinks(replaceMaterialLinks(replaceAPILinks(replaceComponentLinks(markdown))));
}
| {
"content_hash": "5229c8ec462358c5a193c39b371b2b9d",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 401,
"avg_line_length": 46.888888888888886,
"alnum_prop": 0.6109794628751974,
"repo_name": "rscnt/material-ui",
"id": "899cd782647cca7af7edb8f3bf26809a9ec3ab37",
"size": "2532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/src/modules/utils/replaceMarkdownLinks.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2126"
},
{
"name": "JavaScript",
"bytes": "3967457"
},
{
"name": "TypeScript",
"bytes": "2468380"
}
],
"symlink_target": ""
} |
layout: post
date: 2016-09-29
title: "Ellis Bridals Style 19056 Sleeveless Chapel Train Sheath/Column"
category: Ellis
tags: [Ellis ,Ellis,Sheath/Column,Illusion,Chapel Train,Sleeveless]
---
### Ellis Bridals Style 19056
Just **$299.99**
### Sleeveless Chapel Train Sheath/Column
<table><tr><td>BRANDS</td><td>Ellis</td></tr><tr><td>Silhouette</td><td>Sheath/Column</td></tr><tr><td>Neckline</td><td>Illusion</td></tr><tr><td>Hemline/Train</td><td>Chapel Train</td></tr><tr><td>Sleeve</td><td>Sleeveless</td></tr></table>
<a href="https://www.readybrides.com/en/ellis/4720-ellis-bridals-style-19056.html"><img src="//img.readybrides.com/9797/ellis-bridals-style-19056.jpg" alt="Ellis Bridals Style 19056" style="width:100%;" /></a>
<!-- break --><a href="https://www.readybrides.com/en/ellis/4720-ellis-bridals-style-19056.html"><img src="//img.readybrides.com/9798/ellis-bridals-style-19056.jpg" alt="Ellis Bridals Style 19056" style="width:100%;" /></a>
<a href="https://www.readybrides.com/en/ellis/4720-ellis-bridals-style-19056.html"><img src="//img.readybrides.com/9796/ellis-bridals-style-19056.jpg" alt="Ellis Bridals Style 19056" style="width:100%;" /></a>
Buy it: [https://www.readybrides.com/en/ellis/4720-ellis-bridals-style-19056.html](https://www.readybrides.com/en/ellis/4720-ellis-bridals-style-19056.html)
| {
"content_hash": "bbcbc91ce26fb2f947ccf92a988a3db1",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 241,
"avg_line_length": 88.53333333333333,
"alnum_prop": 0.7266566265060241,
"repo_name": "HOLEIN/HOLEIN.github.io",
"id": "b6b83c78092cc727806f4c3a959030e0b882727b",
"size": "1332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2016-09-29-Ellis-Bridals-Style-19056-Sleeveless-Chapel-Train-SheathColumn.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "83876"
},
{
"name": "HTML",
"bytes": "14547"
},
{
"name": "Ruby",
"bytes": "897"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" ?>
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:wikidot="http://www.wikidot.com/rss-namespace">
<channel>
<title>Comments for page "Withstand the Eyepatch of the Soul!"</title>
<link>http://bvs.wikidot.com/forum/t-46499/withstand-the-eyepatch-of-the-soul</link>
<description>Posts in the discussion thread "Withstand the Eyepatch of the Soul!"</description>
<copyright></copyright>
<lastBuildDate>Sun, 10 Jul 2022 03:40:04 +0000</lastBuildDate>
<item>
<guid>http://bvs.wikidot.com/forum/t-46499#post-1238906</guid>
<title>(no title)</title>
<link>http://bvs.wikidot.com/forum/t-46499/withstand-the-eyepatch-of-the-soul#post-1238906</link>
<description></description>
<pubDate>Wed, 24 Aug 2011 11:46:44 +0000</pubDate>
<wikidot:authorName>natsu</wikidot:authorName> <content:encoded>
<![CDATA[
<p>the chance is so so so low omg … this 11dbhk is going to make us crazy</p>
]]>
</content:encoded> </item>
</channel>
</rss> | {
"content_hash": "6799ca9b5a1a4f6a196a7d1de380b94b",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 129,
"avg_line_length": 48.04347826086956,
"alnum_prop": 0.6796380090497738,
"repo_name": "tn5421/tn5421.github.io",
"id": "ad3d5e60313d8af7b8a45874b952d75a76e02daf",
"size": "1105",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "bvs.wikidot.com/feed/forum/t-46499.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "400301089"
}
],
"symlink_target": ""
} |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
@XmlRootElement(name = "EnableAlarmActionsResponse")
public class EnableAlarmActionsResponse {
}
| {
"content_hash": "f854d80d5b90df9d088488ff7a27900b",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 95,
"avg_line_length": 24.4375,
"alnum_prop": 0.6956521739130435,
"repo_name": "jdgwartney/vsphere-ws",
"id": "5057e33a94b98e687c40cdcc416c7ec5e5c18677",
"size": "782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/JAXWS/samples/com/vmware/vim25/EnableAlarmActionsResponse.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1349"
},
{
"name": "C#",
"bytes": "775222"
},
{
"name": "C++",
"bytes": "14040"
},
{
"name": "CSS",
"bytes": "48826"
},
{
"name": "Java",
"bytes": "13417097"
},
{
"name": "JavaScript",
"bytes": "24681"
},
{
"name": "Shell",
"bytes": "9982"
},
{
"name": "Smalltalk",
"bytes": "14906"
}
],
"symlink_target": ""
} |
class CanaryCheckRootContext1 : public RootContext {
public:
explicit CanaryCheckRootContext1(uint32_t id, std::string_view root_id)
: RootContext(id, root_id) {}
bool onConfigure(size_t s) override {
LOG_TRACE("onConfigure: root_id_1");
return s != 0;
}
};
class CanaryCheckContext : public Context {
public:
explicit CanaryCheckContext(uint32_t id, RootContext *root) : Context(id, root) {}
};
class CanaryCheckRootContext2 : public RootContext {
public:
explicit CanaryCheckRootContext2(uint32_t id, std::string_view root_id)
: RootContext(id, root_id) {}
bool onConfigure(size_t s) override {
LOG_TRACE("onConfigure: root_id_2");
return s != 0;
}
};
static RegisterContextFactory register_CanaryCheckContext1(CONTEXT_FACTORY(CanaryCheckContext),
ROOT_FACTORY(CanaryCheckRootContext1),
"root_id_1");
static RegisterContextFactory register_CanaryCheckContext2(CONTEXT_FACTORY(CanaryCheckContext),
ROOT_FACTORY(CanaryCheckRootContext2),
"root_id_2");
| {
"content_hash": "ee98e0ae8ca97ec2c383a58bfca7fc10",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 97,
"avg_line_length": 38.5625,
"alnum_prop": 0.6037277147487844,
"repo_name": "proxy-wasm/proxy-wasm-cpp-host",
"id": "38cf8b4108a2c7024bd0a80b50afae20b4266a75",
"size": "1926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_data/canary_check.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1538"
},
{
"name": "C++",
"bytes": "465524"
},
{
"name": "Rust",
"bytes": "8765"
},
{
"name": "Starlark",
"bytes": "91432"
}
],
"symlink_target": ""
} |
UsbInternalsUI::UsbInternalsUI(content::WebUI* web_ui)
: ui::MojoWebUIController(web_ui) {
// Set up the chrome://usb-internals source.
content::WebUIDataSource* source =
content::WebUIDataSource::Create(chrome::kChromeUIUsbInternalsHost);
static constexpr webui::ResourcePath kPaths[] = {
{"usb_enumeration_options.mojom-webui.js",
IDR_USB_ENUMERATION_OPTIONS_MOJOM_WEBUI_JS},
{"usb_manager_client.mojom-webui.js",
IDR_USB_DEVICE_MANAGER_CLIENT_MOJOM_WEBUI_JS},
};
source->AddResourcePaths(kPaths);
webui::SetupWebUIDataSource(
source,
base::make_span(kUsbInternalsResources, kUsbInternalsResourcesSize),
IDR_USB_INTERNALS_USB_INTERNALS_HTML);
source->OverrideContentSecurityPolicy(
network::mojom::CSPDirectiveName::RequireTrustedTypesFor,
"require-trusted-types-for 'script';");
source->OverrideContentSecurityPolicy(
network::mojom::CSPDirectiveName::TrustedTypes,
"trusted-types static-types usb-test-static;");
content::WebUIDataSource::Add(Profile::FromWebUI(web_ui), source);
}
WEB_UI_CONTROLLER_TYPE_IMPL(UsbInternalsUI)
UsbInternalsUI::~UsbInternalsUI() {}
void UsbInternalsUI::BindInterface(
mojo::PendingReceiver<mojom::UsbInternalsPageHandler> receiver) {
page_handler_ =
std::make_unique<UsbInternalsPageHandler>(std::move(receiver));
}
| {
"content_hash": "2179f8b174dcef42534f236535cf9e7e",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 74,
"avg_line_length": 36.891891891891895,
"alnum_prop": 0.7333333333333333,
"repo_name": "nwjs/chromium.src",
"id": "d5965db89923d0a39dc02953b8b747b81e71ca97",
"size": "2087",
"binary": false,
"copies": "1",
"ref": "refs/heads/nw70",
"path": "chrome/browser/ui/webui/usb_internals/usb_internals_ui.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
smserver.smutils package
========================
Subpackages
-----------
.. toctree::
smserver.smutils.smconnections
smserver.smutils.smpacket
Submodules
----------
smserver.smutils.smattack module
--------------------------------
.. automodule:: smserver.smutils.smattack
:members:
:undoc-members:
:show-inheritance:
smserver.smutils.smconn module
------------------------------
.. automodule:: smserver.smutils.smconn
:members:
:undoc-members:
:show-inheritance:
smserver.smutils.smthread module
--------------------------------
.. automodule:: smserver.smutils.smthread
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: smserver.smutils
:members:
:undoc-members:
:show-inheritance:
| {
"content_hash": "5488cd8060c8abbcca55155b207cfbd5",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 41,
"avg_line_length": 17.347826086956523,
"alnum_prop": 0.5739348370927319,
"repo_name": "ningirsu/stepmania-server",
"id": "469e3e4b6932ac905f74b14668c136f8a834d3e6",
"size": "798",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/smserver.smutils.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "378506"
},
{
"name": "Shell",
"bytes": "2768"
}
],
"symlink_target": ""
} |
module API
module Entities
class UserSafe < Grape::Entity
expose :name, :username
end
class UserBasic < UserSafe
expose :id, :state, :avatar_url
expose :web_url do |user, options|
Gitlab::Application.routes.url_helpers.user_url(user)
end
end
class User < UserBasic
expose :created_at
expose :is_admin?, as: :is_admin
expose :bio, :skype, :linkedin, :twitter, :website_url
end
class Identity < Grape::Entity
expose :provider, :extern_uid
end
class UserFull < User
expose :email
expose :theme_id, :color_scheme_id, :projects_limit, :current_sign_in_at
expose :identities, using: Entities::Identity
expose :can_create_group?, as: :can_create_group
expose :can_create_project?, as: :can_create_project
expose :two_factor_enabled
end
class UserLogin < UserFull
expose :private_token
end
class Email < Grape::Entity
expose :id, :email
end
class Hook < Grape::Entity
expose :id, :url, :created_at
end
class ProjectHook < Hook
expose :project_id, :push_events
expose :issues_events, :merge_requests_events, :tag_push_events, :note_events, :enable_ssl_verification
end
class ForkedFromProject < Grape::Entity
expose :id
expose :name, :name_with_namespace
expose :path, :path_with_namespace
end
class Project < Grape::Entity
expose :id, :description, :default_branch, :tag_list
expose :public?, as: :public
expose :archived?, as: :archived
expose :visibility_level, :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :owner, using: Entities::UserBasic, unless: ->(project, options) { project.group }
expose :name, :name_with_namespace
expose :path, :path_with_namespace
expose :issues_enabled, :merge_requests_enabled, :wiki_enabled, :builds_enabled, :snippets_enabled, :created_at, :last_activity_at
expose :creator_id
expose :namespace
expose :forked_from_project, using: Entities::ForkedFromProject, if: lambda{ | project, options | project.forked? }
expose :avatar_url
expose :star_count, :forks_count
end
class ProjectMember < UserBasic
expose :access_level do |user, options|
options[:project].project_members.find_by(user_id: user.id).access_level
end
end
class Group < Grape::Entity
expose :id, :name, :path, :description
expose :avatar_url
expose :web_url do |group, options|
Gitlab::Application.routes.url_helpers.group_url(group)
end
end
class GroupDetail < Group
expose :projects, using: Entities::Project
end
class GroupMember < UserBasic
expose :access_level do |user, options|
options[:group].group_members.find_by(user_id: user.id).access_level
end
end
class RepoObject < Grape::Entity
expose :name
expose :commit do |repo_obj, options|
if repo_obj.respond_to?(:commit)
repo_obj.commit
elsif options[:project]
options[:project].repository.commit(repo_obj.target)
end
end
expose :protected do |repo, options|
if options[:project]
options[:project].protected_branch? repo.name
end
end
end
class RepoTreeObject < Grape::Entity
expose :id, :name, :type
expose :mode do |obj, options|
filemode = obj.mode.to_s(8)
filemode = "0" + filemode if filemode.length < 6
filemode
end
end
class RepoCommit < Grape::Entity
expose :id, :short_id, :title, :author_name, :author_email, :created_at
expose :safe_message, as: :message
end
class RepoCommitDetail < RepoCommit
expose :parent_ids, :committed_date, :authored_date
expose :status
end
class ProjectSnippet < Grape::Entity
expose :id, :title, :file_name
expose :author, using: Entities::UserBasic
expose :expires_at, :updated_at, :created_at
end
class ProjectEntity < Grape::Entity
expose :id, :iid
expose(:project_id) { |entity| entity.project.id }
expose :title, :description
expose :state, :created_at, :updated_at
end
class RepoDiff < Grape::Entity
expose :old_path, :new_path, :a_mode, :b_mode, :diff
expose :new_file, :renamed_file, :deleted_file
end
class Milestone < ProjectEntity
expose :due_date
end
class Issue < ProjectEntity
expose :label_names, as: :labels
expose :milestone, using: Entities::Milestone
expose :assignee, :author, using: Entities::UserBasic
end
class MergeRequest < ProjectEntity
expose :target_branch, :source_branch, :upvotes, :downvotes
expose :author, :assignee, using: Entities::UserBasic
expose :source_project_id, :target_project_id
expose :label_names, as: :labels
expose :description
expose :work_in_progress?, as: :work_in_progress
expose :milestone, using: Entities::Milestone
end
class MergeRequestChanges < MergeRequest
expose :diffs, as: :changes, using: Entities::RepoDiff do |compare, _|
compare.diffs
end
end
class SSHKey < Grape::Entity
expose :id, :title, :key, :created_at
end
class SSHKeyWithUser < SSHKey
expose :user, using: Entities::UserFull
end
class Note < Grape::Entity
expose :id
expose :note, as: :body
expose :attachment_identifier, as: :attachment
expose :author, using: Entities::UserBasic
expose :created_at
expose :system?, as: :system
expose :upvote?, as: :upvote
expose :downvote?, as: :downvote
end
class MRNote < Grape::Entity
expose :note
expose :author, using: Entities::UserBasic
end
class CommitNote < Grape::Entity
expose :note
expose(:path) { |note| note.diff_file_name }
expose(:line) { |note| note.diff_new_line }
expose(:line_type) { |note| note.diff_line_type }
expose :author, using: Entities::UserBasic
expose :created_at
end
class CommitStatus < Grape::Entity
expose :id, :sha, :ref, :status, :name, :target_url, :description,
:created_at, :started_at, :finished_at, :allow_failure
expose :author, using: Entities::UserBasic
end
class Event < Grape::Entity
expose :title, :project_id, :action_name
expose :target_id, :target_type, :author_id
expose :data, :target_title
expose :created_at
expose :author_username do |event, options|
if event.author
event.author.username
end
end
end
class Namespace < Grape::Entity
expose :id, :path, :kind
end
class ProjectAccess < Grape::Entity
expose :access_level
expose :notification_level
end
class GroupAccess < Grape::Entity
expose :access_level
expose :notification_level
end
class ProjectService < Grape::Entity
expose :id, :title, :created_at, :updated_at, :active
expose :push_events, :issues_events, :merge_requests_events, :tag_push_events, :note_events
# Expose serialized properties
expose :properties do |service, options|
field_names = service.fields.
select { |field| options[:include_passwords] || field[:type] != 'password' }.
map { |field| field[:name] }
service.properties.slice(*field_names)
end
end
class ProjectWithAccess < Project
expose :permissions do
expose :project_access, using: Entities::ProjectAccess do |project, options|
project.project_members.find_by(user_id: options[:user].id)
end
expose :group_access, using: Entities::GroupAccess do |project, options|
if project.group
project.group.group_members.find_by(user_id: options[:user].id)
end
end
end
end
class Label < Grape::Entity
expose :name, :color
end
class Compare < Grape::Entity
expose :commit, using: Entities::RepoCommit do |compare, options|
Commit.decorate(compare.commits, nil).last
end
expose :commits, using: Entities::RepoCommit do |compare, options|
Commit.decorate(compare.commits, nil)
end
expose :diffs, using: Entities::RepoDiff do |compare, options|
compare.diffs
end
expose :compare_timeout do |compare, options|
compare.timeout
end
expose :same, as: :compare_same_ref
end
class Contributor < Grape::Entity
expose :name, :email, :commits, :additions, :deletions
end
class BroadcastMessage < Grape::Entity
expose :message, :starts_at, :ends_at, :color, :font
end
class ApplicationSetting < Grape::Entity
expose :id
expose :default_projects_limit
expose :signup_enabled
expose :signin_enabled
expose :gravatar_enabled
expose :sign_in_text
expose :created_at
expose :updated_at
expose :home_page_url
expose :default_branch_protection
expose :twitter_sharing_enabled
expose :restricted_visibility_levels
expose :max_attachment_size
expose :session_expire_delay
expose :default_project_visibility
expose :default_snippet_visibility
expose :restricted_signup_domains
expose :user_oauth_applications
expose :after_sign_out_path
end
class Release < Grape::Entity
expose :tag, :description
end
class RepoTag < Grape::Entity
expose :name
expose :message do |repo_obj, _options|
if repo_obj.respond_to?(:message)
repo_obj.message
else
nil
end
end
expose :commit do |repo_obj, options|
if repo_obj.respond_to?(:commit)
repo_obj.commit
elsif options[:project]
options[:project].repository.commit(repo_obj.target)
end
end
expose :release, using: Entities::Release do |repo_obj, options|
if options[:project]
options[:project].releases.find_by(tag: repo_obj.name)
end
end
end
end
end
| {
"content_hash": "730f02918e9d9c7740cb198f7e180708",
"timestamp": "",
"source": "github",
"line_count": 355,
"max_line_length": 136,
"avg_line_length": 28.7943661971831,
"alnum_prop": 0.6318724320093915,
"repo_name": "ferdinandrosario/gitlabhq",
"id": "d6aec03d7f557fab6d3539388ca9db17b61529c6",
"size": "10222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/api/entities.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "152009"
},
{
"name": "CoffeeScript",
"bytes": "152204"
},
{
"name": "Cucumber",
"bytes": "129242"
},
{
"name": "HTML",
"bytes": "558208"
},
{
"name": "JavaScript",
"bytes": "29805"
},
{
"name": "Ruby",
"bytes": "3058542"
},
{
"name": "Shell",
"bytes": "19580"
}
],
"symlink_target": ""
} |
<?php
namespace cyneek\yii2\uploadBehavior;
use cyneek\yii2\uploadBehavior\models\ImageFileModel;
use Yii;
use yii\base\InvalidConfigException;
use yii\helpers\ArrayHelper;
use yii\web\UploadedFile;
/**
* UploadImageBehavior automatically uploads image, creates thumbnails and adds a new row in the uploaded files table.
*
* To use UploadImageBehavior, insert the following code to your ActiveRecord class:
*
* ```php
* use cyneek\yii2\upload\UploadBehavior;
*
* function behaviors()
* {
* return [
* [
* 'class' => UploadImageBehavior::className(),
* 'attribute' => 'file',
* 'scenarios' => ['insert', 'update'],
* 'thumbPath' => '@webroot/upload/{id}/images/thumb',
* 'thumbUrl' => '@web/upload/{id}/images/thumb',
* 'thumbs' => [
* 'thumb' => ['width' => 400, 'quality' => 90],
* 'preview' => ['width' => 200, 'height' => 200],
* ],
* ],
* ];
* }
* ```
*
* @author Joseba Juániz <joseba.juaniz@gmail.com>
*/
class UploadImageBehavior extends UploadBehavior
{
/**
* @var array the thumbnail profiles
* - `action`
* - `width`
* - `height`
* - `quality`
* [
* 'thumb' => [
* ['action' => 'crop', 'width' => 200, 'height' => 200, 'quality' => 90],
* ]
* ]
*/
public $thumbs = [];
/**
* @var string|null
*/
public $thumbPath;
/**
* Describes the actions that will be made to the original image
* [
* ['action' => 'crop', 'width' => 200, 'height' => 200, 'quality' => 90]
* ]
*
* @var String[]
*/
public $imageActions = [];
protected $_fileModel = 'ImageFileModel';
/**
* @inheritdoc
*/
public function init()
{
parent::init();
$this->_normalizeThumbData();
}
//hay que poner lo de imageActions en saveFile y todo eso
/**
* Checks if the thumb data it's properly configured
*
* @throws InvalidConfigException
*/
protected function _normalizeThumbData()
{
if ($this->thumbPath === NULL)
{
$this->thumbPath = $this->path;
}
if (!empty($this->imageActions) && !is_array(reset($this->imageActions)))
{
$this->imageActions = [$this->imageActions];
}
foreach ($this->thumbs as $key => $actions)
{
if (!is_array($actions))
{
$this->thumbs[$key] = [$this->thumbs[$key]];
}
}
foreach ($this->thumbs as $key => $actions)
{
foreach ($actions as $config)
{
$width = ArrayHelper::getValue($config, 'width', 0);
$height = ArrayHelper::getValue($config, 'height', 0);
if ($height < 1 && $width < 1)
{
throw new InvalidConfigException(sprintf(
'Length of either side of thumb cannot be 0 or negative, current size ' .
'is %sx%s', $width, $height
));
}
}
}
}
/**
* @inheritdoc
*/
protected function afterUpload($image)
{
parent::afterUpload($image);
$this->createThumbs($image);
}
/**
* Saves the uploaded file.
* @param UploadedFile $file the uploaded file instance
* @param ImageFileModel $fileModel
* @return boolean true whether the file is saved successfully
*/
protected function save($file, $fileModel = NULL)
{
if (!is_null($fileModel))
{
$fileModel->file = $file;
$fileModel->fileManager = $this->_getFileManager();
if (is_null($fileModel->imageActions))
{
$fileModel->imageActions = $this->imageActions;
}
}
else
{
$modelData = Yii::$app->getModule('uploadBehavior')->model($this->_fileModel, [
'file' => $file,
'model' => $this->owner,
'uploadPath' => $this->path,
'entityAttribute' => $this->attribute,
'imageActions' => $this->imageActions,
'fileManager' => $this->_getFileManager()
]);
/** @var ImageFileModel $fileModel */
$fileModel = Yii::createObject($modelData);
}
$fileModel->save();
return $fileModel;
}
/**
*
* @param ImageFileModel $image
* @throws \yii\base\Exception
*/
protected function createThumbs($image)
{
foreach ($this->thumbs as $profile => $actions)
{
foreach ($this->_files as $_file)
{
$modelData = Yii::$app->getModule('uploadBehavior')->model($this->_fileModel, [
'file' => $_file,
'model' => $this->owner,
'uploadPath' => $this->thumbPath,
'entityAttribute' => $this->attribute,
'fileManager' => $this->_getFileManager(),
'parentModel' => $image,
'imageActions' => $actions,
'childName' => $profile,
]);
/** @var ImageFileModel $fileModel */
$fileModel = Yii::createObject($modelData);
$this->save($_file, $fileModel);
}
}
}
/**
* Returns all the files linked to the model
* in the attribute set in the behavior
*
* @param $attribute
* @return FileModel[]
*/
public function linkedImages($attribute)
{
return $this->linkedFiles($attribute);
}
/**
* Returns the first or only file linked to the model
* in the attribute set in the behavior
*
* @param $attribute
* @return FileModel
*/
public function linkedImage($attribute)
{
return $this->linkedFile($attribute);
}
}
| {
"content_hash": "8d6cc4edadde90746453c2901e669f2d",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 118,
"avg_line_length": 25.38589211618257,
"alnum_prop": 0.4919908466819222,
"repo_name": "suver/yii2-example",
"id": "836623dcf0211aadba99d90698b1bef16ebb2ea8",
"size": "6119",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/cyneek/yii2-upload-behavior/UploadImageBehavior.php",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1541"
},
{
"name": "CSS",
"bytes": "470670"
},
{
"name": "HTML",
"bytes": "9020"
},
{
"name": "JavaScript",
"bytes": "55065"
},
{
"name": "PHP",
"bytes": "365490"
},
{
"name": "Shell",
"bytes": "3257"
}
],
"symlink_target": ""
} |
package com.github.amlcurran.showcaseview;
import android.annotation.TargetApi;
import android.os.Build;
import android.view.View;
public class ApiUtils {
public boolean isCompatWith(int versionCode) {
return Build.VERSION.SDK_INT >= versionCode;
}
public boolean isCompatWithHoneycomb() {
return isCompatWith(Build.VERSION_CODES.HONEYCOMB);
}
@TargetApi(14)
public void setFitsSystemWindowsCompat(View view) {
if (isCompatWith(Build.VERSION_CODES.ICE_CREAM_SANDWICH)) {
view.setFitsSystemWindows(true);
}
}
}
| {
"content_hash": "2b9327782619c81946f8a47d4b703ef6",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 67,
"avg_line_length": 25.52173913043478,
"alnum_prop": 0.7001703577512777,
"repo_name": "vincent-t/KinoCast",
"id": "bc73c568f8b34e7f13c514f2baa6b0c468d1a207",
"size": "587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libs/ShowcaseView/src/main/java/com/github/amlcurran/showcaseview/ApiUtils.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "757771"
}
],
"symlink_target": ""
} |
package polymorphism.shape;
public class Circle extends Shape{
public void draw() {
System.out.println("Circle.draw()");
}
public void erase() {
System.out.println("Circle.erase()");
}
}
/**Output:
*/
| {
"content_hash": "ecbdeb9203e47cf12bda1e0563ea5c14",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 39,
"avg_line_length": 15.142857142857142,
"alnum_prop": 0.6650943396226415,
"repo_name": "weekend27/Thinking-in-Java",
"id": "2c3599daa9f0a181014b39291ac8594cc771785f",
"size": "212",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/polymorphism/shape/Circle.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "64491"
}
],
"symlink_target": ""
} |
namespace content {
class WebContents;
}
namespace ui {
class Accelerator;
}
namespace views {
class WebDialogView;
class Widget;
} // namespace views
namespace ash {
class CaptivePortalDialogDelegate;
class LayoutWidgetDelegateView;
class LoginDisplayHostMojo;
class OobeWebDialogView;
// This class manages the behavior of the Oobe UI dialog.
// And its lifecycle is managed by the widget created in Show().
// WebDialogView<----delegate_----OobeUIDialogDelegate
// |
// |
// V
// clientView---->Widget's view hierarchy
class OobeUIDialogDelegate : public ui::WebDialogDelegate,
public ChromeKeyboardControllerClient::Observer,
public CaptivePortalWindowProxy::Observer,
public OobeUI::Observer,
public views::ViewObserver,
public SystemTrayObserver {
public:
explicit OobeUIDialogDelegate(base::WeakPtr<LoginDisplayHostMojo> controller);
OobeUIDialogDelegate(const OobeUIDialogDelegate&) = delete;
OobeUIDialogDelegate& operator=(const OobeUIDialogDelegate&) = delete;
~OobeUIDialogDelegate() override;
// Show the dialog widget.
void Show();
// Show the dialog widget stretched to full screen.
void ShowFullScreen();
// Close the widget, and it will delete this object.
void Close();
// Hide the dialog widget, but do not shut it down.
void Hide();
// Returns whether the dialog is currently visible.
bool IsVisible();
// Update the oobe state of the dialog.
void SetState(OobeDialogState state);
// Tell the dialog whether to call FixCaptivePortal next time it is shown.
void SetShouldDisplayCaptivePortal(bool should_display);
content::WebContents* GetWebContents();
OobeUI* GetOobeUI() const;
gfx::NativeWindow GetNativeWindow() const;
views::Widget* GetWebDialogWidget() const;
views::View* GetWebDialogView();
CaptivePortalDialogDelegate* captive_portal_delegate_for_test() {
return captive_portal_delegate_.get();
}
private:
// ui::WebDialogDelegate:
ui::ModalType GetDialogModalType() const override;
std::u16string GetDialogTitle() const override;
GURL GetDialogContentURL() const override;
void GetWebUIMessageHandlers(
std::vector<content::WebUIMessageHandler*>* handlers) const override;
void GetDialogSize(gfx::Size* size) const override;
std::string GetDialogArgs() const override;
// NOTE: This function starts cleanup sequence that would call FinishCleanup
// and delete this object in the end.
void OnDialogClosed(const std::string& json_retval) override;
void OnCloseContents(content::WebContents* source,
bool* out_close_dialog) override;
bool ShouldShowDialogTitle() const override;
bool HandleContextMenu(content::RenderFrameHost& render_frame_host,
const content::ContextMenuParams& params) override;
std::vector<ui::Accelerator> GetAccelerators() override;
bool AcceleratorPressed(const ui::Accelerator& accelerator) override;
// views::ViewObserver:
void OnViewBoundsChanged(views::View* observed_view) override;
void OnViewIsDeleting(views::View* observed_view) override;
// ChromeKeyboardControllerClient::Observer:
void OnKeyboardVisibilityChanged(bool visible) override;
// CaptivePortalWindowProxy::Observer:
void OnBeforeCaptivePortalShown() override;
void OnAfterCaptivePortalHidden() override;
// OobeUI::Observer:
void OnCurrentScreenChanged(OobeScreenId current_screen,
OobeScreenId new_screen) override;
void OnDestroyingOobeUI() override;
// SystemTrayObserver:
void OnFocusLeavingSystemTray(bool reverse) override;
base::WeakPtr<LoginDisplayHostMojo> controller_;
base::WeakPtr<CaptivePortalDialogDelegate> captive_portal_delegate_;
// Root widget. It is assumed that widget is placed as a full-screen inside
// LockContainer.
views::Widget* widget_ = nullptr;
// Reference to view owned by widget_.
LayoutWidgetDelegateView* layout_view_ = nullptr;
// Reference to dialog view stored in widget_.
OobeWebDialogView* dialog_view_ = nullptr;
base::ScopedObservation<views::View, views::ViewObserver> view_observer_{
this};
base::ScopedObservation<ChromeKeyboardControllerClient,
ChromeKeyboardControllerClient::Observer>
keyboard_observer_{this};
base::ScopedObservation<CaptivePortalWindowProxy,
CaptivePortalWindowProxy::Observer>
captive_portal_observer_{this};
base::ScopedObservation<OobeUI, OobeUI::Observer> oobe_ui_observer_{this};
std::unique_ptr<
base::ScopedObservation<LoginScreenClientImpl,
SystemTrayObserver,
&LoginScreenClientImpl::AddSystemTrayObserver,
&LoginScreenClientImpl::RemoveSystemTrayObserver>>
scoped_system_tray_observer_;
std::map<ui::Accelerator, LoginAcceleratorAction> accel_map_;
OobeDialogState state_ = OobeDialogState::HIDDEN;
// Whether the captive portal screen should be shown the next time the Gaia
// dialog is opened.
bool should_display_captive_portal_ = false;
};
} // namespace ash
#endif // CHROME_BROWSER_ASH_LOGIN_UI_OOBE_UI_DIALOG_DELEGATE_H_
| {
"content_hash": "796ca4b37f4a2ff769bb188d5c581e59",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 80,
"avg_line_length": 34.94117647058823,
"alnum_prop": 0.7128694350916573,
"repo_name": "nwjs/chromium.src",
"id": "bd3a1e81fd0e72c49e43e52dcc288c00fcadf405",
"size": "6349",
"binary": false,
"copies": "1",
"ref": "refs/heads/nw70",
"path": "chrome/browser/ash/login/ui/oobe_ui_dialog_delegate.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
It is a wrapper for creating an Agent type supertest object, that adds x-headers to the outgoing request object.
## Usage
var eSupertest = require('easy-xapi-supertest');
var app = require('../app');
var getAgent = eSupertest.getAgentFactory(app);
var agent = getAgent({
user: 'Jack',
role: 'user'
});
You can also pass a transform function to the agentFactory function
var eSupertest = require('easy-xapi-supertest');
var app = require('../app');
var getAgent = eSupertest.getAgentFactory(app, function (user, role) {
return {
user: user,
role: role || 'user'
}
});
var agent = getAgent('Jack');
## Licence
The MIT License (MIT)
Copyright (c) 2015 Karl Düüna
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| {
"content_hash": "b9b0841a0aa8d1b8a96907996286b120",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 112,
"avg_line_length": 33.19230769230769,
"alnum_prop": 0.7549246813441484,
"repo_name": "DeadAlready/easy-xapi-supertest",
"id": "9ebbca68bd0822a2243bc32d368e57afc52ac69f",
"size": "1751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1728"
},
{
"name": "TypeScript",
"bytes": "1951"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>View Sources</title>
<link href="/struts2-showcase-2.0.6/styles/main.css" rel="stylesheet" type="text/css" media="all"/>
<link href="/struts2-showcase-2.0.6/struts/niftycorners/niftyCorners.css" rel="stylesheet" type="text/css"/>
<link href="/struts2-showcase-2.0.6/struts/niftycorners/niftyPrint.css" rel="stylesheet" type="text/css" media="print"/>
<script language="JavaScript" type="text/javascript" src="/struts2-showcase-2.0.6/struts/niftycorners/nifty.js"></script>
<script language="JavaScript" type="text/javascript">
window.onload=function(){
if(!NiftyCheck())
return;
Rounded("blockquote","tr bl","#ECF1F9","#CDFFAA","smooth border #88D84F");
Rounded("div#outer-header", "all", "white", "#818EBD", "smooth border #434F7C");
Rounded("div#footer", "all", "white", "#818EBD", "smooth border #434F7C");
}
</script>
<!--// START SNIPPET: common-include-->
<link rel="stylesheet" href="/struts2-showcase-2.0.6/struts/xhtml/styles.css" type="text/css"/>
<script language="JavaScript" type="text/javascript">
// Dojo configuration
djConfig = {
baseRelativePath: "/struts2-showcase-2.0.6/struts/dojo",
isDebug: false,
bindEncoding: "UTF-8",
debugAtAllCosts: true // not needed, but allows the Venkman debugger to work with the includes
};
</script>
<script language="JavaScript" type="text/javascript"
src="/struts2-showcase-2.0.6/struts/dojo/dojo.js"></script>
<script language="JavaScript" type="text/javascript"
src="/struts2-showcase-2.0.6/struts/simple/dojoRequire.js"></script>
<script language="JavaScript" type="text/javascript"
src="/struts2-showcase-2.0.6/struts/ajax/dojoRequire.js"></script>
<script language="JavaScript" type="text/javascript"
src="/struts2-showcase-2.0.6/struts/CommonFunctions.js"></script>
<!--// END SNIPPET: common-include-->
<link rel="stylesheet" type="text/css" href="/struts2-showcase-2.0.6/struts/tabs.css?page=%2Fajax%2Fremotediv%2Findex.jsp">
<link rel="stylesheet" type="text/css" href="/struts2-showcase-2.0.6/struts/niftycorners/niftyCorners.css?page=%2Fajax%2Fremotediv%2Findex.jsp">
<link rel="stylesheet" type="text/css" href="/struts2-showcase-2.0.6/struts/niftycorners/niftyPrint.css?page=%2Fajax%2Fremotediv%2Findex.jsp" media="print">
<script type="text/javascript" src="/struts2-showcase-2.0.6/struts/niftycorners/nifty.js?page=%2Fajax%2Fremotediv%2Findex.jsp"></script>
<script type="text/javascript">
window.onload = function() {
if (!NiftyCheck())
return;
Rounded("li.tab_selected", "top", "white", "transparent", "border #ffffffS");
Rounded("li.tab_unselected", "top", "white", "transparent", "border #ffffffS");
// Rounded("div#tab_header_main li","top","white","transparent","border #ffffffS");
// "white" needs to be replaced with the background color
}
</script>
</head>
<body id="page-home">
<div id="page">
<div id="outer-header">
<div id="header" class="clearfix">
<div id="branding">
<h1 class="title">Struts Showcase</h1>
2007/04/06 03:16:49
</div><!-- end branding -->
<div id="search">
<img src="/struts2-showcase-2.0.6/images/struts-power.gif" alt="Powered by Struts"/>
</div><!-- end search -->
<hr/>
</div>
</div><!-- end header -->
<div id="content" class="clearfix">
<h1>View Sources</h1>
<script type="text/javascript">
dojo.require("dojo.widget.TabContainer");
dojo.require("dojo.widget.LinkPane");
dojo.require("dojo.widget.ContentPane");
</script>
<div dojoType="TabContainer"
id="test" doLayout="false">
<div dojoType="struts:BindDiv"
id="one" label="Page" showError="true">
<h3>/ajax/remotediv/index.jsp</h3>
<pre>
1: <%@ taglib prefix="s" uri="/struts-tags" %>
2: <html>
3: <head>
4: <title>AJAX-based remote DIV tag</title>
5: <%@ include file="/ajax/commonInclude.jsp" %>
6: </head>
7:
8: <body>
9:
10: <h2>Examples</h2>
11:
12: <p>
13: <ol>
14: <li>
15: <a href="example1.jsp">A simple DIV that refreshes only once</a>
16: </li>
17:
18: <li>
19: <a href="example10.jsp">A simple DIV that uses a custom handler</a>
20: </li>
21:
22: <li>
23: <a href="example2.jsp?url=/AjaxTest.action">A simple DIV that updates every 2 seconds, with indicator</a>
24: </li>
25:
26: <li>
27: <a href="example4.jsp">A simple DIV that updates every 5 seconds with loading text and reloading text and delay</a>
28: </li>
29:
30: <li>
31: <a href="example5.jsp">A simple DIV's that cannot contact the server, with fixed error message</a>
32: </li>
33:
34:
35: <li>
36: <a href="example7.jsp">A div that calls the server, and JS in the resulting page is executed</a>
37: </li>
38:
39: <li>
40: <a href="example8.jsp">A div that will listen to events to refresh and start/stop autoupdate</a>
41: </li>
42:
43: <li>
44: <a href="example9.jsp">A div that will listen to events to refresh and start/stop autoupdate, publish notifyTopics</a>
45: </li>
46:
47: </ol>
48:
49:
50: </p>
51:
52: <s:include value="../footer.jsp"/>
53:
54: </body>
55: </html>
</pre>
</div>
<div dojoType="struts:BindDiv"
id="two" label="Configuration" showError="true">
<h3>Unknown configuration</h3>
<pre>
</pre>
</div>
<div dojoType="struts:BindDiv"
id="three" label="Java Action" showError="true">
<h3>Unknown or unavailable Action class</h3>
<pre>
</pre>
</div>
</div>
<div id="nav">
<div class="wrapper">
<h2 class="accessibility">Navigation</h2>
<ul class="clearfix">
<li><a href="/struts2-showcase-2.0.6/showcase.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Home</a></li>
<li><a href="/struts2-showcase-2.0.6/ajax/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Ajax</a></li>
<li><a href="/struts2-showcase-2.0.6/chat/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Ajax Chat</a></li>
<li><a href="/struts2-showcase-2.0.6/actionchaining/actionChain1!input.action">Action Chaining</a></li>
<li><a href="/struts2-showcase-2.0.6/config-browser/index.action">Config Browser</a></li>
<li><a href="/struts2-showcase-2.0.6/conversion/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Conversion</a></li>
<li><a href="/struts2-showcase-2.0.6/empmanager/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">CRUD</a></li>
<li><a href="/struts2-showcase-2.0.6/wait/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Execute & Wait</a></li>
<li><a href="/struts2-showcase-2.0.6/filedownload/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">File Download</a></li>
<li><a href="/struts2-showcase-2.0.6/fileupload?page=%2Fajax%2Fremotediv%2Findex.jsp">File Upload</a></li>
<li><a href="/struts2-showcase-2.0.6/freemarker/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Freemarker</a>
<li><a href="/struts2-showcase-2.0.6/hangman/hangmanMenu.action?page=%2Fajax%2Fremotediv%2Findex.jsp">Hangman</a></li>
<li><a href="/struts2-showcase-2.0.6/jsf/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">JavaServer Faces</a></li>
<li><a href="/struts2-showcase-2.0.6/person/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Person Manager</a></li>
<li><a href="/struts2-showcase-2.0.6/tags/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Tags</a></li>
<li><a href="/struts2-showcase-2.0.6/tiles/index.action?page=%2Fajax%2Fremotediv%2Findex.jsp">Tiles</a></li>
<li><a href="/struts2-showcase-2.0.6/token/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Token</a></li>
<li><a href="/struts2-showcase-2.0.6/validation/index.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Validation</a></li>
<li class="last"><a href="/struts2-showcase-2.0.6/help.jsp?page=%2Fajax%2Fremotediv%2Findex.jsp">Help</a></li>
</ul>
</div>
<hr/>
</div><!-- end nav -->
</div><!-- end content -->
<div>
<p>
<a href="/struts2-showcase-2.0.6/viewSource.action?config=file:/C:/mol/tools/jakarta-tomcat-5.0.30/webapps/struts2-showcase-2.0.6/WEB-INF/classes/struts.xml:60&className=org.apache.struts2.showcase.source.ViewSourceAction&page=//viewSource.jsp">View Sources</a>
</p>
</div>
<div id="footer" class="clearfix">
<p>Copyright © 2003-2007 The Apache Software Foundation.</p>
</div><!-- end footer -->
<p/>
</div><!-- end page -->
</body>
</html>
| {
"content_hash": "88be83c4b997c511462b27c0bb5cf66d",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 269,
"avg_line_length": 41.81276595744681,
"alnum_prop": 0.6099124771015673,
"repo_name": "freeVM/freeVM",
"id": "5138a0375b0f338560bd3a308c60d58317b568e0",
"size": "9826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enhanced/buildtest/tests/struts_test/resources/showcase/file-583689035.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "116828"
},
{
"name": "C",
"bytes": "17860389"
},
{
"name": "C++",
"bytes": "19007206"
},
{
"name": "CSS",
"bytes": "217777"
},
{
"name": "Java",
"bytes": "152108632"
},
{
"name": "Objective-C",
"bytes": "106412"
},
{
"name": "Objective-J",
"bytes": "11029421"
},
{
"name": "Perl",
"bytes": "305690"
},
{
"name": "Scilab",
"bytes": "34"
},
{
"name": "Shell",
"bytes": "153821"
},
{
"name": "XSLT",
"bytes": "152859"
}
],
"symlink_target": ""
} |
class MultiTabLoadingPageLoadMetricsBrowserTest : public InProcessBrowserTest {
public:
MultiTabLoadingPageLoadMetricsBrowserTest() {}
~MultiTabLoadingPageLoadMetricsBrowserTest() override {}
protected:
GURL GetTestURL() { return embedded_test_server()->GetURL("/simple.html"); }
void NavigateToURLWithoutWaiting(GURL url) {
ui_test_utils::NavigateToURLWithDisposition(
browser(), url, WindowOpenDisposition::CURRENT_TAB,
ui_test_utils::BROWSER_TEST_NONE);
}
void SetUpOnMainThread() override {
ASSERT_TRUE(embedded_test_server()->Start());
}
std::string HistogramNameWithSuffix(const char* suffix) {
return std::string(internal::kHistogramPrefixMultiTabLoading)
.append(suffix);
}
};
IN_PROC_BROWSER_TEST_F(MultiTabLoadingPageLoadMetricsBrowserTest, SingleTab) {
base::HistogramTester histogram_tester;
ASSERT_TRUE(ui_test_utils::NavigateToURL(browser(), GetTestURL()));
// Navigate away to force the histogram recording.
ASSERT_TRUE(
ui_test_utils::NavigateToURL(browser(), GURL(url::kAboutBlankURL)));
histogram_tester.ExpectTotalCount(
HistogramNameWithSuffix(internal::kHistogramLoadEventFiredSuffix), 0);
histogram_tester.ExpectTotalCount(
HistogramNameWithSuffix(
internal::kHistogramLoadEventFiredBackgroundSuffix),
0);
}
// TODO(crbug.com/1310328): Test is flaky on Linux, lacros, Chrome OS, Mac.
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_MAC)
#define MAYBE_MultiTabForeground DISABLED_MultiTabForeground
#else
#define MAYBE_MultiTabForeground MultiTabForeground
#endif
IN_PROC_BROWSER_TEST_F(MultiTabLoadingPageLoadMetricsBrowserTest,
MAYBE_MultiTabForeground) {
base::HistogramTester histogram_tester;
NavigateToURLWithoutWaiting(embedded_test_server()->GetURL("/hung"));
// Open a new foreground tab.
ui_test_utils::NavigateToURLWithDispositionBlockUntilNavigationsComplete(
browser(), GetTestURL(), 1, WindowOpenDisposition::NEW_FOREGROUND_TAB,
ui_test_utils::BROWSER_TEST_WAIT_FOR_LOAD_STOP);
// Navigate away to force the histogram recording.
ASSERT_TRUE(
ui_test_utils::NavigateToURL(browser(), GURL(url::kAboutBlankURL)));
histogram_tester.ExpectTotalCount(
HistogramNameWithSuffix(internal::kHistogramLoadEventFiredSuffix), 1);
histogram_tester.ExpectTotalCount(
HistogramNameWithSuffix(
internal::kHistogramLoadEventFiredBackgroundSuffix),
0);
}
IN_PROC_BROWSER_TEST_F(MultiTabLoadingPageLoadMetricsBrowserTest,
MultiTabBackground) {
base::HistogramTester histogram_tester;
NavigateToURLWithoutWaiting(embedded_test_server()->GetURL("/hung"));
// Open a tab in the background.
ui_test_utils::NavigateToURLWithDispositionBlockUntilNavigationsComplete(
browser(), GetTestURL(), 1, WindowOpenDisposition::NEW_BACKGROUND_TAB,
ui_test_utils::BROWSER_TEST_WAIT_FOR_LOAD_STOP);
// Close the foreground tab.
TabStripModel* tab_strip_model = browser()->tab_strip_model();
content::WebContentsDestroyedWatcher destroyed_watcher(
tab_strip_model->GetWebContentsAt(0));
EXPECT_TRUE(tab_strip_model->CloseWebContentsAt(0, 0));
destroyed_watcher.Wait();
// Now the background tab should have moved to the foreground.
// Navigate away to force the histogram recording.
ASSERT_TRUE(
ui_test_utils::NavigateToURL(browser(), GURL(url::kAboutBlankURL)));
histogram_tester.ExpectTotalCount(
HistogramNameWithSuffix(internal::kHistogramLoadEventFiredSuffix), 0);
histogram_tester.ExpectTotalCount(
HistogramNameWithSuffix(
internal::kHistogramLoadEventFiredBackgroundSuffix),
1);
}
| {
"content_hash": "4b29817f6f938759769f6abeed827cf9",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 79,
"avg_line_length": 37.13,
"alnum_prop": 0.7468354430379747,
"repo_name": "chromium/chromium",
"id": "b54bc52a9dd25f226137e93d6f8e80a566f2e637",
"size": "4513",
"binary": false,
"copies": "5",
"ref": "refs/heads/main",
"path": "chrome/browser/page_load_metrics/observers/multi_tab_loading_page_load_metrics_observer_browsertest.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
DOKKU_HOST=ung.utt.fr
DOKKU_PROD=flux.uttnetgroup.fr
DOKKU_DEV=flux-dev.uttnetgroup.fr
if [[ -n $SSH_DEPLOY_KEY ]] ; then
# Set up ssh key
mkdir -p ~/.ssh
echo -e "${SSH_DEPLOY_KEY}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
eval $(ssh-agent -s)
ssh-add ~/.ssh/id_rsa
# SSH config
echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config
# Add dokku to known hosts
ssh-keyscan -H $DOKKU_HOST >> ~/.ssh/known_hosts
# Add commit with git original repo informations
mkdir deploy
mv dist deploy/
mv static.json deploy/
cd deploy
git init
git add . -A
git config user.name "Travis"
git config user.email "ung@utt.fr"
git commit -m "Deploy"
# Deploy
if [[ $TRAVIS_BRANCH == 'master' ]] ; then
git remote add dokku dokku@$DOKKU_HOST:$DOKKU_PROD
else
git remote add dokku dokku@$DOKKU_HOST:$DOKKU_DEV
fi
git push dokku HEAD:refs/heads/master -f
fi
| {
"content_hash": "de2eab00f77dab35ef5b52f76c7e5636",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 68,
"avg_line_length": 29.181818181818183,
"alnum_prop": 0.6251298026998962,
"repo_name": "ungdev/flux2-client",
"id": "a12190337d8702390ad860e81d7d22dc4c345032",
"size": "975",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deploy.sh",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "33441"
},
{
"name": "HTML",
"bytes": "2153"
},
{
"name": "JavaScript",
"bytes": "495775"
},
{
"name": "Shell",
"bytes": "975"
}
],
"symlink_target": ""
} |
/*bootstrap-overwrite.css*/
.navbar-brand span{
vertical-align: top;
display: inline-block;
margin-top: 1px;
margin-left: 10px;
}
.badge{
position: absolute;
top: 8px;
right: 25px;
background: red;
}
.nav.pull-right > li > .dropdown-menu,
.nav > li > .dropdown-menu.pull-right {
right: 0;
left: auto;
min-width: 260px;
padding: 10px;
}
.carousel-inner > .item {
-webkit-transition: 0.4s ease-in-out left;
-moz-transition: 0.4s ease-in-out left;
-o-transition: 0.4s ease-in-out left;
transition: 0.4s ease-in-out left;
}
.modal-dialog{
left:0;
}
/*Bootstrap Clean*/
/* / Bootstrap Clean*/ | {
"content_hash": "d38247c9bb8b590f92bd945f58c05fd3",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 46,
"avg_line_length": 15.625,
"alnum_prop": 0.656,
"repo_name": "dwoodard/MediaCloud",
"id": "03cc3c2bd303a037e194bad777c762147ba1299e",
"size": "625",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "public/assets/css/bootstrap-overwrite.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "478"
},
{
"name": "Batchfile",
"bytes": "5104"
},
{
"name": "CSS",
"bytes": "1529160"
},
{
"name": "CoffeeScript",
"bytes": "14956"
},
{
"name": "Go",
"bytes": "6808"
},
{
"name": "Groff",
"bytes": "160"
},
{
"name": "HTML",
"bytes": "4451657"
},
{
"name": "JavaScript",
"bytes": "12370757"
},
{
"name": "Makefile",
"bytes": "6501"
},
{
"name": "PHP",
"bytes": "7152083"
},
{
"name": "Python",
"bytes": "24994"
},
{
"name": "Ruby",
"bytes": "240"
},
{
"name": "Shell",
"bytes": "22794"
}
],
"symlink_target": ""
} |
<?php
defined('C5_EXECUTE') or die('Access Denied.');
class GoogleApiCredentials extends Object
{
protected static $mTable = 'mGoogleApiCredentials';
protected $api_key, $secret;
public function GoogleApiCredentials($data = null)
{
if ($data) {
$this->id = $data['id'];
$this->api_key = $data['api_key'];
$this->secret = $data['secret'];
}
}
public function getApiKey()
{
return $this->api_key;
}
public function getSecret()
{
return $this->secret;
}
public function setApiKey($val)
{
return $this->api_key = $val;
}
public function setSecret($val)
{
return $this->secret = $val;
}
public function load()
{
$credentials = self::get("SELECT * FROM ".self::$mTable." LIMIT 1");
if ($credentials == null) {
$db = Loader::db();
$db->execute("INSERT INTO ".self::$mTable." (api_key,secret) VALUES ('','')");
$credentials = self::get("SELECT * FROM ".self::$mTable." LIMIT 1");
}
return $credentials;
}
public function save()
{
$db = Loader::db();
return $db->execute("UPDATE ".self::$mTable." SET api_key=?, secret=? WHERE id = ?", array($this->api_key, $this->secret, $this->id));
}
private static function get($sql, $multiple = false)
{
$db = Loader::db();
$rs = $db->execute($sql);
$resp = null;
if ($multiple) {
$resp = array();
while ($data = $rs->fetchrow()) {
$resp[] = new GoogleApiCredentials($data);
}
} elseif ($data = $rs->fetchrow()) {
$resp = new GoogleApiCredentials($data);
}
return $resp;
}
}
| {
"content_hash": "ce0e3e0eaa576d5d27f42c0004237209",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 142,
"avg_line_length": 23.842105263157894,
"alnum_prop": 0.5055187637969095,
"repo_name": "Remo/social",
"id": "9b0872ef80330282cd7f0014da8abecbddc09338",
"size": "1812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/google_api_credentials.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13322"
},
{
"name": "PHP",
"bytes": "33640"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.ServiceFabric.Actors.Runtime;
using Drones.Shared.Events;
using Drones.Shared.Actors;
namespace SwarmActor
{
[StatePersistence(StatePersistence.Persisted)]
internal class SwarmActor : Actor, ISwarmActor
{
private List<String> _droneIds;
public SwarmActor()
{
_droneIds = new List<String>();
}
public Task AddDroneAsync(String droneId)
{
_droneIds.Add(droneId);
// Publish event
var ev = GetEvent<ISwarmEvents>();
ev.DroneAdded(this.Id.GetStringId(), droneId);
return Task.FromResult(true);
}
public Task<List<String>> GetDronesAsync()
{
return Task.FromResult(_droneIds);
}
public Task RemoveDroneAsync(String droneId)
{
_droneIds.Remove(droneId);
// Publish event
var ev = GetEvent<ISwarmEvents>();
ev.DroneRemoved(this.Id.GetStringId(), droneId);
return Task.FromResult(true);
}
}
}
| {
"content_hash": "328f955e84e261873883f01466e7db3c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 60,
"avg_line_length": 24.53191489361702,
"alnum_prop": 0.593235039028621,
"repo_name": "jjcollinge/DroneSimulation",
"id": "942ca1e2fa3c0b0656532ffca3d1600fe24e42e8",
"size": "1155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/SwarmActor/SwarmActor.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "187779"
},
{
"name": "CSS",
"bytes": "1414"
},
{
"name": "HLSL",
"bytes": "274915"
},
{
"name": "HTML",
"bytes": "40406"
},
{
"name": "JavaScript",
"bytes": "17474585"
},
{
"name": "PowerShell",
"bytes": "15998"
},
{
"name": "TypeScript",
"bytes": "2147889"
}
],
"symlink_target": ""
} |
package annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Authorize {
String[] value();
}
| {
"content_hash": "390811876db119c14baa6c9c1d3c20a3",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 44,
"avg_line_length": 25.083333333333332,
"alnum_prop": 0.8073089700996677,
"repo_name": "csutorasa/spring",
"id": "3ac79f391597ac59a49c647d1e624e9fec51f94a",
"size": "301",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Web/src/main/java/annotation/Authorize.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "159"
},
{
"name": "HTML",
"bytes": "229"
},
{
"name": "Java",
"bytes": "76924"
},
{
"name": "JavaScript",
"bytes": "1114"
},
{
"name": "TypeScript",
"bytes": "6875"
}
],
"symlink_target": ""
} |
/*
* The Plaid API
*
* The Plaid REST API. Please see https://plaid.com/docs/api for more details.
*
* API version: 2020-09-14_1.205.3
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package plaid
import (
"encoding/json"
)
// ItemWebhookUpdateRequest ItemWebhookUpdateRequest defines the request schema for `/item/webhook/update`
type ItemWebhookUpdateRequest struct {
// Your Plaid API `client_id`. The `client_id` is required and may be provided either in the `PLAID-CLIENT-ID` header or as part of a request body.
ClientId *string `json:"client_id,omitempty"`
// Your Plaid API `secret`. The `secret` is required and may be provided either in the `PLAID-SECRET` header or as part of a request body.
Secret *string `json:"secret,omitempty"`
// The access token associated with the Item data is being requested for.
AccessToken string `json:"access_token"`
// The new webhook URL to associate with the Item. To remove a webhook from an Item, set to `null`.
Webhook NullableString `json:"webhook,omitempty"`
}
// NewItemWebhookUpdateRequest instantiates a new ItemWebhookUpdateRequest object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewItemWebhookUpdateRequest(accessToken string) *ItemWebhookUpdateRequest {
this := ItemWebhookUpdateRequest{}
this.AccessToken = accessToken
return &this
}
// NewItemWebhookUpdateRequestWithDefaults instantiates a new ItemWebhookUpdateRequest object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewItemWebhookUpdateRequestWithDefaults() *ItemWebhookUpdateRequest {
this := ItemWebhookUpdateRequest{}
return &this
}
// GetClientId returns the ClientId field value if set, zero value otherwise.
func (o *ItemWebhookUpdateRequest) GetClientId() string {
if o == nil || o.ClientId == nil {
var ret string
return ret
}
return *o.ClientId
}
// GetClientIdOk returns a tuple with the ClientId field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ItemWebhookUpdateRequest) GetClientIdOk() (*string, bool) {
if o == nil || o.ClientId == nil {
return nil, false
}
return o.ClientId, true
}
// HasClientId returns a boolean if a field has been set.
func (o *ItemWebhookUpdateRequest) HasClientId() bool {
if o != nil && o.ClientId != nil {
return true
}
return false
}
// SetClientId gets a reference to the given string and assigns it to the ClientId field.
func (o *ItemWebhookUpdateRequest) SetClientId(v string) {
o.ClientId = &v
}
// GetSecret returns the Secret field value if set, zero value otherwise.
func (o *ItemWebhookUpdateRequest) GetSecret() string {
if o == nil || o.Secret == nil {
var ret string
return ret
}
return *o.Secret
}
// GetSecretOk returns a tuple with the Secret field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ItemWebhookUpdateRequest) GetSecretOk() (*string, bool) {
if o == nil || o.Secret == nil {
return nil, false
}
return o.Secret, true
}
// HasSecret returns a boolean if a field has been set.
func (o *ItemWebhookUpdateRequest) HasSecret() bool {
if o != nil && o.Secret != nil {
return true
}
return false
}
// SetSecret gets a reference to the given string and assigns it to the Secret field.
func (o *ItemWebhookUpdateRequest) SetSecret(v string) {
o.Secret = &v
}
// GetAccessToken returns the AccessToken field value
func (o *ItemWebhookUpdateRequest) GetAccessToken() string {
if o == nil {
var ret string
return ret
}
return o.AccessToken
}
// GetAccessTokenOk returns a tuple with the AccessToken field value
// and a boolean to check if the value has been set.
func (o *ItemWebhookUpdateRequest) GetAccessTokenOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.AccessToken, true
}
// SetAccessToken sets field value
func (o *ItemWebhookUpdateRequest) SetAccessToken(v string) {
o.AccessToken = v
}
// GetWebhook returns the Webhook field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ItemWebhookUpdateRequest) GetWebhook() string {
if o == nil || o.Webhook.Get() == nil {
var ret string
return ret
}
return *o.Webhook.Get()
}
// GetWebhookOk returns a tuple with the Webhook field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ItemWebhookUpdateRequest) GetWebhookOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Webhook.Get(), o.Webhook.IsSet()
}
// HasWebhook returns a boolean if a field has been set.
func (o *ItemWebhookUpdateRequest) HasWebhook() bool {
if o != nil && o.Webhook.IsSet() {
return true
}
return false
}
// SetWebhook gets a reference to the given NullableString and assigns it to the Webhook field.
func (o *ItemWebhookUpdateRequest) SetWebhook(v string) {
o.Webhook.Set(&v)
}
// SetWebhookNil sets the value for Webhook to be an explicit nil
func (o *ItemWebhookUpdateRequest) SetWebhookNil() {
o.Webhook.Set(nil)
}
// UnsetWebhook ensures that no value is present for Webhook, not even an explicit nil
func (o *ItemWebhookUpdateRequest) UnsetWebhook() {
o.Webhook.Unset()
}
func (o ItemWebhookUpdateRequest) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.ClientId != nil {
toSerialize["client_id"] = o.ClientId
}
if o.Secret != nil {
toSerialize["secret"] = o.Secret
}
if true {
toSerialize["access_token"] = o.AccessToken
}
if o.Webhook.IsSet() {
toSerialize["webhook"] = o.Webhook.Get()
}
return json.Marshal(toSerialize)
}
type NullableItemWebhookUpdateRequest struct {
value *ItemWebhookUpdateRequest
isSet bool
}
func (v NullableItemWebhookUpdateRequest) Get() *ItemWebhookUpdateRequest {
return v.value
}
func (v *NullableItemWebhookUpdateRequest) Set(val *ItemWebhookUpdateRequest) {
v.value = val
v.isSet = true
}
func (v NullableItemWebhookUpdateRequest) IsSet() bool {
return v.isSet
}
func (v *NullableItemWebhookUpdateRequest) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableItemWebhookUpdateRequest(val *ItemWebhookUpdateRequest) *NullableItemWebhookUpdateRequest {
return &NullableItemWebhookUpdateRequest{value: val, isSet: true}
}
func (v NullableItemWebhookUpdateRequest) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableItemWebhookUpdateRequest) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
| {
"content_hash": "9d218ebc11f548c2b79bbee7d9ab27ab",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 148,
"avg_line_length": 29.565217391304348,
"alnum_prop": 0.7408823529411764,
"repo_name": "plaid/plaid-go",
"id": "75fce0597c6667db831131e3545af110f4594b5d",
"size": "6800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plaid/model_item_webhook_update_request.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "129"
},
{
"name": "Go",
"bytes": "61884"
},
{
"name": "Makefile",
"bytes": "114"
},
{
"name": "Mustache",
"bytes": "46930"
}
],
"symlink_target": ""
} |
package com.fortutech.tcheckit.ejb.sessions;
import java.util.Hashtable;
import org.ksoap2.serialization.PropertyInfo;
import org.ksoap2.serialization.SoapObject;
public final class TypePhone extends SoapObject {
private java.lang.String _value_;
public java.util.HashMap _table_;
public java.lang.String _android;
public java.lang.String _iphone;
public java.lang.String _autre;
public com.fortutech.tcheckit.ejb.sessions.TypePhone android;
public com.fortutech.tcheckit.ejb.sessions.TypePhone iphone;
public com.fortutech.tcheckit.ejb.sessions.TypePhone autre;
public TypePhone() {
super("", "");
}
public void set_value_(java.lang.String _value_) {
this._value_ = _value_;
}
public java.lang.String get_value_(java.lang.String _value_) {
return this._value_;
}
public void set_table_(java.util.HashMap _table_) {
this._table_ = _table_;
}
public java.util.HashMap get_table_(java.util.HashMap _table_) {
return this._table_;
}
public void set_android(java.lang.String _android) {
this._android = _android;
}
public java.lang.String get_android(java.lang.String _android) {
return this._android;
}
public void set_iphone(java.lang.String _iphone) {
this._iphone = _iphone;
}
public java.lang.String get_iphone(java.lang.String _iphone) {
return this._iphone;
}
public void set_autre(java.lang.String _autre) {
this._autre = _autre;
}
public java.lang.String get_autre(java.lang.String _autre) {
return this._autre;
}
public void setAndroid(com.fortutech.tcheckit.ejb.sessions.TypePhone android) {
this.android = android;
}
public com.fortutech.tcheckit.ejb.sessions.TypePhone getAndroid(com.fortutech.tcheckit.ejb.sessions.TypePhone android) {
return this.android;
}
public void setIphone(com.fortutech.tcheckit.ejb.sessions.TypePhone iphone) {
this.iphone = iphone;
}
public com.fortutech.tcheckit.ejb.sessions.TypePhone getIphone(com.fortutech.tcheckit.ejb.sessions.TypePhone iphone) {
return this.iphone;
}
public void setAutre(com.fortutech.tcheckit.ejb.sessions.TypePhone autre) {
this.autre = autre;
}
public com.fortutech.tcheckit.ejb.sessions.TypePhone getAutre(com.fortutech.tcheckit.ejb.sessions.TypePhone autre) {
return this.autre;
}
public int getPropertyCount() {
return 6;
}
public Object getProperty(int __index) {
switch(__index) {
case 0: return _value_;
case 1: return _table_;
case 2: return _android;
case 3: return _iphone;
case 4: return _autre;
case 5: return android;
case 6: return iphone;
case 7: return autre;
}
return null;
}
public void setProperty(int __index, Object __obj) {
switch(__index) {
case 0: _value_ = (java.lang.String) __obj; break;
case 1: _table_ = (java.util.HashMap) __obj; break;
case 2: _android = (java.lang.String) __obj; break;
case 3: _iphone = (java.lang.String) __obj; break;
case 4: _autre = (java.lang.String) __obj; break;
case 5: android = (com.fortutech.tcheckit.ejb.sessions.TypePhone) __obj; break;
case 6: iphone = (com.fortutech.tcheckit.ejb.sessions.TypePhone) __obj; break;
case 7: autre = (com.fortutech.tcheckit.ejb.sessions.TypePhone) __obj; break;
}
}
public void getPropertyInfo(int __index, Hashtable __table, PropertyInfo __info) {
switch(__index) {
case 0:
__info.name = "_value_";
__info.type = java.lang.String.class; break;
case 1:
__info.name = "_table_";
__info.type = java.util.HashMap.class; break;
case 2:
__info.name = "_android";
__info.type = java.lang.String.class; break;
case 3:
__info.name = "_iphone";
__info.type = java.lang.String.class; break;
case 4:
__info.name = "_autre";
__info.type = java.lang.String.class; break;
case 5:
__info.name = "android";
__info.type = com.fortutech.tcheckit.ejb.sessions.TypePhone.class; break;
case 6:
__info.name = "iphone";
__info.type = com.fortutech.tcheckit.ejb.sessions.TypePhone.class; break;
case 7:
__info.name = "autre";
__info.type = com.fortutech.tcheckit.ejb.sessions.TypePhone.class; break;
}
}
}
| {
"content_hash": "9da38ae3ba4119c72697fbea6c911485",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 124,
"avg_line_length": 30.8476821192053,
"alnum_prop": 0.6090596822670674,
"repo_name": "jawadn/Tcheckit-android",
"id": "de1499af758849f3668279fc930dc7cb8e9f8e2c",
"size": "4658",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/fortutech/tcheckit/ejb/sessions/TypePhone.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11139"
},
{
"name": "Java",
"bytes": "2197048"
}
],
"symlink_target": ""
} |
'use strict';
angular.module('angularFullstackApp')
.factory('Modal', function ($rootScope, $modal) {
/**
* Opens a modal
* @param {Object} scope - an object to be merged with modal's scope
* @param {String} modalClass - (optional) class(es) to be applied to the modal
* @return {Object} - the instance $modal.open() returns
*/
function openModal(scope, modalClass) {
var modalScope = $rootScope.$new();
scope = scope || {};
modalClass = modalClass || 'modal-default';
angular.extend(modalScope, scope);
return $modal.open({
templateUrl: 'components/modal/modal.html',
windowClass: modalClass,
scope: modalScope
});
}
// Public API here
return {
/* Confirmation modals */
confirm: {
/**
* Create a function to open a delete confirmation modal (ex. ng-click='myModalFn(name, arg1, arg2...)')
* @param {Function} del - callback, ran when delete is confirmed
* @return {Function} - the function to open the modal (ex. myModalFn)
*/
delete: function(del) {
del = del || angular.noop;
/**
* Open a delete confirmation modal
* @param {String} name - name or info to show on modal
* @param {All} - any additional args are passed staight to del callback
*/
return function() {
var args = Array.prototype.slice.call(arguments),
name = args.shift(),
deleteModal;
deleteModal = openModal({
modal: {
dismissable: true,
title: 'Confirm Delete',
html: '<p>Are you sure you want to delete <strong>' + name + '</strong> ?</p>',
buttons: [{
classes: 'btn-danger',
text: 'Delete',
click: function(e) {
deleteModal.close(e);
}
}, {
classes: 'btn-default',
text: 'Cancel',
click: function(e) {
deleteModal.dismiss(e);
}
}]
}
}, 'modal-danger');
deleteModal.result.then(function(event) {
del.apply(event, args);
});
};
}
}
};
});
| {
"content_hash": "1f25191ea662381e179c00f63285f8bb",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 112,
"avg_line_length": 31.74025974025974,
"alnum_prop": 0.48404255319148937,
"repo_name": "sharmilajesupaul/angular-fullstack",
"id": "a79fb95e315829991648e3b9ec5238b9202c0523",
"size": "2444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/components/modal/modal.service.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "24139"
},
{
"name": "CSS",
"bytes": "2188"
},
{
"name": "HTML",
"bytes": "16499"
},
{
"name": "JavaScript",
"bytes": "57316"
}
],
"symlink_target": ""
} |
namespace Tundra
{
/// Default Ogre material processor.
class URHORENDERER_API DefaultOgreMaterialProcessor : public IOgreMaterialProcessor
{
URHO3D_OBJECT(DefaultOgreMaterialProcessor, IOgreMaterialProcessor);
public:
/// Construct.
DefaultOgreMaterialProcessor(Urho3D::Context* context);
/// Return whether can convert a specific Ogre material.
bool CanConvert(const Ogre::MaterialParser& src) override;
/// Convert the Ogre material parsing result into the given asset, and fill the per-unit texture refs. Assumed to succeed if CanConvert for the same parsed data already returned true.
void Convert(const Ogre::MaterialParser& src, OgreMaterialAsset* dest) override;
};
} | {
"content_hash": "e3fb1c3b5c44c2ad09138936e9096875",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 187,
"avg_line_length": 35.45,
"alnum_prop": 0.7743300423131171,
"repo_name": "realXtend/tundra-urho3d",
"id": "6ba34450af61554274e99ca3855222e8f6ce1763",
"size": "836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Plugins/UrhoRenderer/Ogre/DefaultOgreMaterialProcessor.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5798"
},
{
"name": "C",
"bytes": "3334051"
},
{
"name": "C#",
"bytes": "122989"
},
{
"name": "C++",
"bytes": "4791572"
},
{
"name": "CMake",
"bytes": "140734"
},
{
"name": "GLSL",
"bytes": "40491"
},
{
"name": "HLSL",
"bytes": "45692"
},
{
"name": "Java",
"bytes": "41652"
},
{
"name": "JavaScript",
"bytes": "41532"
},
{
"name": "Objective-C",
"bytes": "2800"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.globalaccelerator.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.globalaccelerator.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ListAcceleratorsResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListAcceleratorsResultJsonUnmarshaller implements Unmarshaller<ListAcceleratorsResult, JsonUnmarshallerContext> {
public ListAcceleratorsResult unmarshall(JsonUnmarshallerContext context) throws Exception {
ListAcceleratorsResult listAcceleratorsResult = new ListAcceleratorsResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return listAcceleratorsResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Accelerators", targetDepth)) {
context.nextToken();
listAcceleratorsResult.setAccelerators(new ListUnmarshaller<Accelerator>(AcceleratorJsonUnmarshaller.getInstance()).unmarshall(context));
}
if (context.testExpression("NextToken", targetDepth)) {
context.nextToken();
listAcceleratorsResult.setNextToken(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return listAcceleratorsResult;
}
private static ListAcceleratorsResultJsonUnmarshaller instance;
public static ListAcceleratorsResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ListAcceleratorsResultJsonUnmarshaller();
return instance;
}
}
| {
"content_hash": "22bedaa4a2c8e5aeb9616d483eba0ba7",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 157,
"avg_line_length": 38.38805970149254,
"alnum_prop": 0.6609642301710731,
"repo_name": "jentfoo/aws-sdk-java",
"id": "f3649871476054c1ce6e626da09588862296f1be",
"size": "3152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-globalaccelerator/src/main/java/com/amazonaws/services/globalaccelerator/model/transform/ListAcceleratorsResultJsonUnmarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "FreeMarker",
"bytes": "173637"
},
{
"name": "Gherkin",
"bytes": "25063"
},
{
"name": "Java",
"bytes": "356214839"
},
{
"name": "Scilab",
"bytes": "3924"
},
{
"name": "Shell",
"bytes": "295"
}
],
"symlink_target": ""
} |
<?php
declare(strict_types=1);
namespace Nelmio\Alice\Definition\ServiceReference;
use Nelmio\Alice\Definition\ServiceReferenceInterface;
/**
* Value object to point to refer to a static service, e.g. 'Nelmio\User\UserFactory'
*/
final class StaticReference implements ServiceReferenceInterface
{
/**
* @var string
*/
private $id;
/**
* @param string $className FQCN
*/
public function __construct(string $className)
{
$this->id = $className;
}
/**
* @return string FQCN e.g. 'Nelmio\User\UserFactory'
*/
public function getId(): string
{
return $this->id;
}
}
| {
"content_hash": "2283d855f2a1d8e10c0c8ca50b6e2141",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 85,
"avg_line_length": 18.25,
"alnum_prop": 0.6270928462709284,
"repo_name": "nelmio/alice",
"id": "adc300ccac26d9d304bc29b1690f653ce0461649",
"size": "869",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Definition/ServiceReference/StaticReference.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "5593"
},
{
"name": "PHP",
"bytes": "1908703"
}
],
"symlink_target": ""
} |
require_relative '../../puppet_x/puppetlabs/property/tag.rb'
require_relative '../../puppet_x/puppetlabs/property/region.rb'
Puppet::Type.newtype(:ec2_vpc_routetable) do
@doc = 'Type representing a VPC route table.'
ensurable
newparam(:name, namevar: true) do
desc 'The name of the route table.'
validate do |value|
fail 'route tables must have a name' if value == ''
fail 'name should be a String' unless value.is_a?(String)
end
end
newproperty(:vpc) do
desc 'VPC to assign the route table to.'
validate do |value|
fail 'vpc should be a String' unless value.is_a?(String)
end
end
newproperty(:region, :parent => PuppetX::Property::AwsRegion) do
desc 'Region in which to launch the route table.'
end
newproperty(:routes, :array_matching => :all) do
desc 'Individual routes for the routing table.'
validate do |value|
['destination_cidr_block', ].each do |key|
fail "routes must include a #{key}" unless value.keys.include?(key)
end
end
def insync?(is)
is.to_set == should.to_set
end
end
newproperty(:tags, :parent => PuppetX::Property::AwsTag) do
desc 'Tags to assign to the route table.'
end
validate do
routes = self[:routes]
if routes
uniq_gateways = Array(routes).collect { |route| route['gateway'] }.uniq
uniq_blocks = Array(routes).collect { |route| route['destination_cidr_block'] }.uniq
fail 'Only one route per gateway allowed' unless uniq_gateways.size == Array(routes).size
fail 'destination_cidr_block must be unique' unless uniq_blocks.size == Array(routes).size
end
end
autorequire(:ec2_vpc_vpn_gateway) do
routes = self[:routes]
routes ? Array(routes).collect { |route| route['gateway'] } : nil
end
autorequire(:ec2_vpc_internet_gateway) do
routes = self[:routes]
routes ? Array(routes).collect { |route| route['gateway'] } : nil
end
autorequire(:ec2_vpc) do
self[:vpc]
end
end
| {
"content_hash": "8b279f7dbecd702b86d7cc5f9f040b9e",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 96,
"avg_line_length": 29.36764705882353,
"alnum_prop": 0.6609914872308462,
"repo_name": "daveseff/puppetlabs-aws",
"id": "f3ba51e7e2ae98cea449aa79b9a88e64912a6b9a",
"size": "1997",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/puppet/type/ec2_vpc_routetable.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Puppet",
"bytes": "874"
},
{
"name": "Ruby",
"bytes": "745717"
},
{
"name": "Shell",
"bytes": "3606"
}
],
"symlink_target": ""
} |
import re, time
from Popen_noblock import Popen_async, PIPE, STDOUT, Empty
__all__ = ["join_definitions", "split_statements_to_definitions"]
DEFAULT_VERBOSITY=1
def DEFAULT_LOG(text, level=DEFAULT_VERBOSITY):
if level <= DEFAULT_VERBOSITY:
print(text)
def get_all_nowait_iter(q):
try:
while True:
yield q.get_nowait()
except Empty:
pass
def get_all_nowait(q):
return ''.join(get_all_nowait_iter(q))
def get_all_semiwait_iter(q, log=DEFAULT_LOG):
def log_and_return(val):
log(val, level=5)
return val
try:
# this is blocking; TODO(jgross): Figure out how to get coqtop
# to tell us if it's finished computing
yield log_and_return(q.get(True))
while True:
yield log_and_return(q.get(True, 0.1))
except Empty:
pass
def get_all_semiwait(q, log=DEFAULT_LOG):
return ''.join(get_all_semiwait_iter(q, log=log))
def get_definitions_diff(previous_definition_string, new_definition_string):
"""Returns a triple of lists (definitions_removed,
definitions_shared, definitions_added)"""
old_definitions = [i for i in previous_definition_string.split('|') if i]
new_definitions = [i for i in new_definition_string.split('|') if i]
return (tuple(i for i in old_definitions if i not in new_definitions),
tuple(i for i in old_definitions if i in new_definitions),
tuple(i for i in new_definitions if i not in old_definitions))
#def split_coq_code_to_definitions(code, log=DEFAULT_LOG, coqtop='coqtop'):
# """Splits coq code into chunks which make up
# independent definitions/hints/etc, of the form
#
# {
# 'statements': <list of runnable statements>,
# 'statement': <entire chunk of code>,
# 'terms_defined': <tuple of terms defined by this chunk of code>,
# 'times': <a list of times for each statement>
# 'output': <a string of the response from coqtop>
# }"""
# p = Popen_async([coqtop, '-q', '-emacs', '-time'], stdout=PIPE, stderr=STDOUT, stdin=PIPE)
# time.sleep(1)
def split_statements_to_definitions(statements, log=DEFAULT_LOG, coqtop='coqtop', coqtop_args=tuple()):
"""Splits a list of statements into chunks which make up
independent definitions/hints/etc."""
p = Popen_async([coqtop, '-q', '-emacs'] + list(coqtop_args), stdout=PIPE, stderr=STDOUT, stdin=PIPE)
time.sleep(1)
prompt_reg = re.compile(r'<prompt>([^<]*?) < ([0-9]+) ([^<]*?) ([0-9]+) < ([^<]*?)</prompt>'.replace(' ', r'\s*'))
defined_reg = re.compile(r'^([^\s]+) is (?:defined|assumed)$', re.MULTILINE)
# aborted_reg = re.compile(r'^Current goal aborted$', re.MULTILINE)
# goal_reg = re.compile(r'^\s*=+\s*$', re.MULTILINE)
# goals and definitions are on stdout, prompts are on stderr
# clear stdout
get_all_semiwait(p.stdout, log=log)
# clear stderr
# get_all_nowait(p.stderr)
rtn = []
cur_definition = {}
last_definitions = '||'
cur_definition_names = '||'
for statement in statements:
if not statement.strip():
continue
log('Write: %s\n\nWait to read...' % statement, level=4)
p.stdin.write(statement + '\n\n')
p.stdin.flush()
stdout = get_all_semiwait(p.stdout, log=log)
stderr = stdout # ''.join(get_all_semiwait(p.stderr))
terms_defined = defined_reg.findall(prompt_reg.sub('', stdout))
# print((statement, stdout, terms_defined))
prompt_match = prompt_reg.search(stderr)
if not prompt_match or len(prompt_match.groups()) != 5:
if not prompt_match:
log('Likely fatal warning: I did not recognize the output from coqtop:')
log('stdout: %s\nstderr: %s' % (repr(stdout), repr(stderr)))
log("I will append the current statement (%s) to the list of definitions as-is, but I don't expect this to work." % statement)
else:
log("Crazy things are happening; the number of groups isn't what it should be (should be 5 groups):")
log("prompt_match.groups(): %s\nstdout: %s\nstderr: %s\nstatement: %s\n" % (repr(prompt_match.groups()), repr(stdout), repr(stderr), repr(statement)))
log((statement, terms_defined, cur_definition_names, cur_definition.get(cur_definition_names, [])), level=2)
if cur_definition_names.strip('|'):
cur_definition[cur_definition_names]['statements'].append(statement)
cur_definition[cur_definition_names]['terms_defined'] += terms_defined
else:
rtn.append({'statements':(statement,),
'statement':statement,
'terms_defined':tuple(terms_defined)})
else:
cur_name, line_num1, cur_definition_names, line_num2, unknown = prompt_reg.search(stderr).groups()
definitions_removed, definitions_shared, definitions_added = get_definitions_diff(last_definitions, cur_definition_names)
# first, to be on the safe side, we add the new
# definitions key to the dict, if it wasn't already there.
if cur_definition_names.strip('|') and cur_definition_names not in cur_definition:
cur_definition[cur_definition_names] = {'statements':[], 'terms_defined':[]}
log((statement, terms_defined, last_definitions, cur_definition_names, cur_definition.get(last_definitions, []), cur_definition.get(cur_definition_names, [])), level=2)
# first, we handle the case where we have just finished
# defining something. This should correspond to
# len(definitions_removed) > 0 and len(terms_defined) > 0.
# If only len(definitions_removed) > 0, then we have
# aborted something. If only len(terms_defined) > 0, then
# we have defined something with a one-liner.
if definitions_removed:
cur_definition[last_definitions]['statements'].append(statement)
cur_definition[last_definitions]['terms_defined'] += terms_defined
if cur_definition_names.strip('|'):
# we are still inside a definition. For now, we
# flatten all definitions.
#
# TODO(jgross): Come up with a better story for
# nested definitions.
cur_definition[cur_definition_names]['statements'] += cur_definition[last_definitions]['statements']
cur_definition[cur_definition_names]['terms_defined'] += cur_definition[last_definitions]['terms_defined']
del cur_definition[last_definitions]
else:
# we're at top-level, so add this as a new
# definition
rtn.append({'statements':tuple(cur_definition[last_definitions]['statements']),
'statement':'\n'.join(cur_definition[last_definitions]['statements']),
'terms_defined':tuple(cur_definition[last_definitions]['terms_defined'])})
del cur_definition[last_definitions]
#print('Adding:')
#print(rtn[-1])
elif terms_defined:
if cur_definition_names.strip('|'):
# we are still inside a definition. For now, we
# flatten all definitions.
#
# TODO(jgross): Come up with a better story for
# nested definitions.
cur_definition[cur_definition_names]['statements'].append(statement)
cur_definition[cur_definition_names]['terms_defined'] += terms_defined
else:
# we're at top level, so add this as a new
# definition
rtn.append({'statements':(statement,),
'statement':statement,
'terms_defined':tuple(terms_defined)})
# now we handle the case where we have just opened a fresh
# definition. We've already added the key to the
# dictionary.
elif definitions_added:
#print(definitions_added)
cur_definition[cur_definition_names]['statements'].append(statement)
else:
# if we're in a definition, append the statement to
# the queue, otherwise, just add it as it's own
# statement
if cur_definition_names.strip('|'):
cur_definition[cur_definition_names]['statements'].append(statement)
else:
rtn.append({'statements':(statement,),
'statement':statement,
'terms_defined':tuple()})
last_definitions = cur_definition_names
log((last_definitions, cur_definition_names), level=2)
if last_definitions.strip('||'):
rtn.append({'statements':tuple(cur_definition[cur_definition_names]['statements']),
'statement':'\n'.join(cur_definition[cur_definition_names]['statements']),
'terms_defined':tuple(cur_definition[cur_definition_names]['terms_defined'])})
del cur_definition[last_definitions]
#for i in rtn:
#print(i)
p.stdin.close()
return rtn
def join_definitions(definitions):
return '\n'.join(i['statement'] for i in definitions)
| {
"content_hash": "7b014a21e68a5b225f233099ebfc4599",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 180,
"avg_line_length": 48.40909090909091,
"alnum_prop": 0.5854981742305686,
"repo_name": "JasonGross/coq-tools",
"id": "5acd7251fbf78cf1438ec978c3fd9adab745c2e7",
"size": "9585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "split_definitions_old.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11"
},
{
"name": "Makefile",
"bytes": "192"
},
{
"name": "Python",
"bytes": "391610"
},
{
"name": "Shell",
"bytes": "711"
}
],
"symlink_target": ""
} |
/
public class TJSONArray : TJSONValue
{
protected List<TJSONValue> Elements;
/**
* Parse the passed String into a new TJSONArray
* @param JSONString
* @return TJSONArray
*/
public static TJSONArray Parse(String JSONString)
{
return new TJSONArray(JArray.Parse(JSONString));
}
/**
* Initialized the instance with a TJSONValueList
* @param JSONValues
*/
public TJSONArray(List<TJSONValue> JSONValues)
: base()
{
Elements = JSONValues;
}
public override String ToString()
{
return asJSONArray().ToString(Newtonsoft.Json.Formatting.None);
}
/**
* Initialized the instance
*/
public TJSONArray()
: base()
{
Elements = new List<TJSONValue>();
}
/**
* Initialized the instance with a JSONArray;
* @param json
*/
public TJSONArray(JArray json)
: base()
{
Elements = buildElements(json);
}
protected List<TJSONValue> buildElements(JArray arr)
{
try
{
List<TJSONValue> res = new List<TJSONValue>();
for (int i = 0; i < arr.Count; i++)
{
switch (arr[i].Type)
{
case JTokenType.Null: { res.Add(new TJSONNull()); break; }
case JTokenType.String: { res.Add(new TJSONString(arr.Value<string>(i))); break; }
case JTokenType.Float: { res.Add(new TJSONNumber(arr.Value<float>(i))); break; }
case JTokenType.Integer: { res.Add(new TJSONNumber(arr.Value<long>(i))); break; }
case JTokenType.Array: { res.Add(new TJSONArray(arr.Value<JArray>(i))); break; }
case JTokenType.Object: { res.Add(new TJSONObject(arr.Value<JObject>(i))); break; }
case JTokenType.Boolean: { if (arr.Value<Boolean>(i)) res.Add(new TJSONTrue()); else res.Add(new TJSONFalse()); break; }
}
}
return res;
}
catch (Exception)
{
return null;
}
}
/**
*Converts into JSONArray
*/
public JArray asJSONArray()
{
JArray arr = new JArray();
foreach (TJSONValue v in Elements)
arr.Add(v.getInternalObject());
return arr;
}
public override Object getInternalObject()
{
return asJSONArray();
}
/**
* Adds a TJSonValue
* @param value a TJSONValue
* @returns a itlsef
*/
public TJSONArray add(TJSONValue value)
{
Elements.Add(value);
return this;
}
public TJSONArray add(int value)
{
JArray app = (JArray)getInternalObject();
app.Add(value);
Elements = buildElements(app);
return this;
}
public TJSONArray add(long value)
{
JArray app = (JArray)getInternalObject();
app.Add(value);
Elements = buildElements(app);
return this;
}
public TJSONArray add(bool value)
{
JArray app = (JArray)getInternalObject();
app.Add(value);
Elements = buildElements(app);
return this;
}
public TJSONArray add(double value)
{
JArray app = (JArray)getInternalObject();
try
{
app.Add(value);
}
catch (Exception e)
{
throw new DBXException(e.Message);
}
Elements = buildElements(app);
return this;
}
public TJSONArray add(String value)
{
JArray app = (JArray)getInternalObject();
app.Add(value);
Elements = buildElements(app);
return this;
}
public TJSONArray add(Object value)
{
JArray app = (JArray)getInternalObject();
app.Add(value);
Elements = buildElements(app);
return this;
}
/**
* Returns a string value by the index
* @param index the index of the value
* @return the value as string
*/
public String getString(int index)
{
TJSONValue p;
return ((p = get(index)) == null) ? null : ((TJSONString)p).getValue();
}
/**
* Returns a double value by the index
* @param index the index of the value
* @return the value as double
*/
public Double? getDouble(int index)
{
TJSONValue p = get(index);
if (p == null)
return null;
Double? d = ((TJSONNumber)p).getValue();
if (d.HasValue)
return d.Value;
return null;
}
/**
* Returns a TJSONObject value by the index
* @param index the index of the value
* @return the value as TSJONObject
*/
public TJSONObject getJSONObject(int index)
{
TJSONValue p;
return ((p = get(index)) == null) ? null : (TJSONObject)p;
}
/**
* Returns an integer value by the index
* @param index the index of the value
* @return the value as integer
*/
public long? getInt(int index)
{
TJSONValue p = get(index);
if (p == null)
return null;
long? d = ((TJSONNumber)p).getValueInt();
if (d.HasValue)
return d.Value;
return null;
}
/**
* Returns a boolean value by the index
* @param index the index of the value
* @return the value as boolean
*/
public Boolean? getBoolean(int index)
{
TJSONValue p = get(index);
if (p == null)
return null;
if (p is TJSONTrue)
return true;
else
return false;
}
/**
* Returns a {@link TJSONArray} value by the index
* @param index
* @return TJSONArray
*/
public TJSONArray getJSONArray(int index)
{
TJSONValue p;
return ((p = get(index)) == null) ? null : (TJSONArray)p;
}
/**
* Returns a {@link TJSONValue} value by the index
* @param index
* @return TJSONValue
*/
public TJSONValue get(int index)
{
return Elements[index];
}
/**
* Convert into a formatted json string.
*/
public TJSONString getAsJsonString(int index)
{
return (TJSONString)get(index);
}
/**
* Returns a TJSONObject value by the index
* @param index
* @return TJSONObject
*/
public TJSONObject getAsJsonObject(int index)
{
return (TJSONObject)get(index);
}
/**
* Returns a {@link TJSONArray} value by the index
* @param index
* @return TJSONArray
*/
public TJSONArray getAsJsonArray(int index)
{
return (TJSONArray)get(index);
}
/**
* Remove a element by the index
* @param index the index of the value
* @returns a itlsef
*/
public TJSONArray remove(int index)
{
Elements.RemoveAt(index);
return this;
}
/**
* Return the size of the element
*/
public long size()
{
return Elements.Count;
}
public override JSONValueType getJsonValueType()
{
return JSONValueType.JSONArray;
}
}
}
| {
"content_hash": "4ce51b086d6d67618c0f9cf3636ec24f",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 144,
"avg_line_length": 27.20655737704918,
"alnum_prop": 0.47023379127500603,
"repo_name": "fortesinformatica/fortesreport-ce",
"id": "e441db32a5b02ad463567b8b469d073a14c61fc8",
"size": "8711",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Demos/Datasnap/Server/proxy/csharp_silverlight/TJSONArray.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Pascal",
"bytes": "1806471"
}
],
"symlink_target": ""
} |
export function getDriveConfig() {
const configs = {
discoverDocs: ["https://www.googleapis.com/discovery/v1/apis/drive/v3/rest"],
clientId: '38703598827-318fe1f76ju71nedjkudad6gcvteglii.apps.googleusercontent.com',
scope: 'https://www.googleapis.com/auth/drive'
}
return configs
}
| {
"content_hash": "4d13eb3a5e20280c22c22791ed3b6f76",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 88,
"avg_line_length": 37.5,
"alnum_prop": 0.74,
"repo_name": "subnotes/gemini",
"id": "d771b31e82ad4f41559d0f8bcfca577827d8e87d",
"size": "300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/configs/driveConfigs.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "142"
},
{
"name": "HTML",
"bytes": "459"
},
{
"name": "JavaScript",
"bytes": "110359"
},
{
"name": "Shell",
"bytes": "109"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "d9de22e6c3d78df47376757d1af98f68",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "f1269838ad1f33b81357dcfc1c86cc8b560c0b38",
"size": "179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Plantaginaceae/Limosella/Limosella pretoriensis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.mindcoders.phial.internal.util;
/**
* Created by rost on 11/3/17.
*/
public final class ObjectUtil {
private ObjectUtil() {
//to hide
}
public static <T> boolean equals(T a, T b) {
return (a == b) || (a != null && a.equals(b));
}
}
| {
"content_hash": "c4a913b90432b5a95928e553a624e6b0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 54,
"avg_line_length": 18.733333333333334,
"alnum_prop": 0.5587188612099644,
"repo_name": "roshakorost/Phial",
"id": "e8e1302dee487e8c02904cf7b1695cae1dfcae2a",
"size": "281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phial-overlay/src/main/java/com/mindcoders/phial/internal/util/ObjectUtil.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3175"
},
{
"name": "Java",
"bytes": "312306"
}
],
"symlink_target": ""
} |
/**
* This is the minimum Datagrid widget for data tables
*
* [table]
* [thead]
* <tr> th, ..., th </tr>
* [tbody]
* <tr> td, ..., td </tr>
* ...
* <tr> ... </tr>
*
* Options
* -------
* 1. data []: rows of data
* 2. columns [
* {
* name: datum key in data row
* cell: cell name
* header: header cell name
* label: name given to header cell (instead of _.titleize(name))
* }
* ]
* 3. details: false or datum name in data row or a view definition (render with row.model) - TBI
*
*
* Events
* ------
* 1. row:clicked
* 2. row:dblclicked
*
*
* Note
* ----
* The details row appears under each normal data row;
*
* TBI
* ---
* select header/cell
* details row is still in TBI status (extra tr stub, view close clean up)
*
*
* @author Tim Lauv
* @created 2014.04.22
*/
;(function(app){
app.widget('Datagrid', function(){
var UI = app.view({
tagName: 'table',
template: [
'<thead region="header"></thead>',
'<tbody region="body"></tbody>'
],
initialize: function(options){
this.options = _.extend({
data: [],
details: false,
columns: []
}, options);
},
onReady: function(){
this.trigger('view:reconfigure', _.extend(this.options, {data: this.get('items', [])}));
},
onReconfigure: function(options){
options = options || {};
//1-1. reconfigure data and columns into this.options
this.options = _.extend(this.options, options);
//1-2. rebuild header cell options - let it rerender with new column array
_.each(this.options.columns, function(column){
column.header = column.header || 'string';
column.cell = column.cell || column.header || 'string';
column.label = column.label || _.string.titleize(column.name);
});
//2. ensure header and body views
if(!this.header.currentView)
this.header.show(HeaderRow.create({grid: this}));
if(!this.body.currentView){
var that = this;
var body = Body.create({
//el can be css selector string, dom or $(dom)
el: this.body.$el,
//Note that a region's el !== $el[0], but a view's el === $el[0] in Marionette.
grid: this
}).on('all', function(e){
//setup page related events forwarding (page-changed, page-not-changed)
if(/page-/.test(e))
that.trigger.apply(that, arguments);
});
this.body.show(body);
}
////////////////Note that the ifs here are for early 'show' --> .set() when using local .data////////////////
this.header.currentView.set(this.options.columns);
this.body.currentView.options = this.options;
/////////////////////////////////////////////////////////////////////////////////////////////////////////////
this.trigger('view:set-grid-data', this.options.data);
},
onSetGridData: function(data){
//3. rebuild body rows - let it rerender with new data array
this.body.currentView.set(data);
},
onLoadPage: function(options){
this.body.currentView.trigger('view:load-page', options);
},
getBody: function(){
return this.body.currentView;
},
getHeader: function(){
return this.header.currentView;
}
});
var HeaderRow = app.view({
type: 'CollectionView',
forceViewType: true,
itemView: 'dynamic',
itemViewEventPrefix: 'headercell',
tagName: 'tr',
initialize: function(options){
this.grid = options.grid; //give each row the grid view ref.
},
//buildItemView - select proper header cell
buildItemView: function(item, ItemViewType, itemViewOptions){
var HCell = app.get(_.string.classify([item.get('header'), 'header', 'cell'].join('-')), 'Widget');
return HCell.create({
model: item,
tagName: 'th',
row: this //link each cell (this.options.row) with the row. (use/link it in cell's init())
});
}
});
var Row = app.view({
type: 'CollectionView',
forceViewType: true,
itemView: 'dynamic',
itemViewEventPrefix: 'cell',
tagName: 'tr',
triggers: { //forward DOM events to row
'click': {
event: 'clicked',
preventDefault: false //for cell elements to work properly (checkbox/radio/<anchor/>)
},
'dblclick': {
event: 'dblclicked',
preventDefault: false
}
},
initialize: function(options){
this.grid = options.body.grid; //give each row the grid view ref.
},
//buildItemView - select proper cell
buildItemView: function(item, ItemViewType, itemViewOptions){
var Cell = app.get(_.string.classify([item.get('cell'), 'cell'].join('-')), 'Widget');
return Cell.create({
tagName: 'td',
model: item,
row: this //link each cell (this.options.row) with the row. (use/link it in cell's init())
});
}
});
var Body = app.view({
type: 'CollectionView',
forceViewType: true,
itemView: Row,
itemViewEventPrefix: 'row',
initialize: function(options){
this.grid = options.grid;
},
itemViewOptions: function(model, index){
return {
collection: app.collection(_.map(this.options.columns, function(column){
return _.extend({
value: app.extract(column.name || '', model.attributes),
index: index
}, column);
}, this)),
body: this //passing body to row view
};
},
itemEvents: { //forward row events to grid
'clicked': function(e, row){
row.grid.trigger('row:clicked', row);
},
'dblclicked': function(e, row){
row.grid.trigger('row:dblclicked', row);
}
}
});
return UI;
});
})(Application); | {
"content_hash": "819e6ebf22fe459e1707fd35b049e382",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 116,
"avg_line_length": 27.20098039215686,
"alnum_prop": 0.5894755811857992,
"repo_name": "mr-beaver/Stage.js",
"id": "70e2dadebc27806d1d3f63fa42e5e2907888f6f9",
"size": "5549",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "implementation/js/src/reusable/widgets/datagrid-lite/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "121023"
},
{
"name": "HTML",
"bytes": "94863"
},
{
"name": "JavaScript",
"bytes": "615277"
},
{
"name": "Shell",
"bytes": "2880"
}
],
"symlink_target": ""
} |
\hypertarget{dir_a11ffb2746d29cf7f0e6c96faf36148e}{}\section{/home/bhargavi/\+Documents/\+S\+D\+R/\+Copy\+\_\+\+Exam\+\_\+808\+X/vendor/googletest/googlemock/src Directory Reference}
\label{dir_a11ffb2746d29cf7f0e6c96faf36148e}\index{/home/bhargavi/\+Documents/\+S\+D\+R/\+Copy\+\_\+\+Exam\+\_\+808\+X/vendor/googletest/googlemock/src Directory Reference@{/home/bhargavi/\+Documents/\+S\+D\+R/\+Copy\+\_\+\+Exam\+\_\+808\+X/vendor/googletest/googlemock/src Directory Reference}}
Directory dependency graph for src\+:
% FIG 0
\subsection*{Files}
\begin{DoxyCompactItemize}
\item
file \hyperlink{gmock-all_8cc}{gmock-\/all.\+cc}
\item
file \hyperlink{gmock-cardinalities_8cc}{gmock-\/cardinalities.\+cc}
\item
file \hyperlink{gmock-internal-utils_8cc}{gmock-\/internal-\/utils.\+cc}
\item
file \hyperlink{gmock-matchers_8cc}{gmock-\/matchers.\+cc}
\item
file \hyperlink{gmock-spec-builders_8cc}{gmock-\/spec-\/builders.\+cc}
\item
file \hyperlink{gmock_8cc}{gmock.\+cc}
\item
file \hyperlink{gmock__main_8cc}{gmock\+\_\+main.\+cc}
\end{DoxyCompactItemize}
| {
"content_hash": "c574c57494008243ee2ba0199a657c36",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 295,
"avg_line_length": 50.523809523809526,
"alnum_prop": 0.7257304429783223,
"repo_name": "bhargavipatel/808X_VO",
"id": "fe6c63c00d57ed49f072ef156c79d5345a4934bb",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/latex/dir_a11ffb2746d29cf7f0e6c96faf36148e.tex",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "18150"
},
{
"name": "CMake",
"bytes": "9404"
},
{
"name": "Matlab",
"bytes": "10128"
},
{
"name": "Python",
"bytes": "5095"
}
],
"symlink_target": ""
} |
Copyright (c) 2009-2013 Eurobit Developers
Distributed under the MIT/X11 software license, see the accompanying
file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the [OpenSSL Toolkit](http://www.openssl.org/). This product includes
cryptographic software written by Eric Young ([eay@cryptsoft.com](mailto:eay@cryptsoft.com)), and UPnP software written by Thomas Bernard.
See readme-qt.rst for instructions on building Eurobit-Qt, the
graphical user interface.
WINDOWS BUILD NOTES
===================
Compilers Supported
-------------------
TODO: What works?
Note: releases are cross-compiled using mingw running on Linux.
Dependencies
------------
Libraries you need to download separately and build:
default path download
OpenSSL \openssl-1.0.1c-mgw http://www.openssl.org/source/
Berkeley DB \db-4.8.30.NC-mgw http://www.oracle.com/technology/software/products/berkeley-db/index.html
Boost \boost-1.50.0-mgw http://www.boost.org/users/download/
miniupnpc \miniupnpc-1.6-mgw http://miniupnp.tuxfamily.org/files/
Their licenses:
OpenSSL Old BSD license with the problematic advertising requirement
Berkeley DB New BSD license with additional requirement that linked software must be free open source
Boost MIT-like license
miniupnpc New (3-clause) BSD license
Versions used in this release:
OpenSSL 1.0.1c
Berkeley DB 4.8.30.NC
Boost 1.50.0
miniupnpc 1.6
OpenSSL
-------
MSYS shell:
un-tar sources with MSYS 'tar xfz' to avoid issue with symlinks (OpenSSL ticket 2377)
change 'MAKE' env. variable from 'C:\MinGW32\bin\mingw32-make.exe' to '/c/MinGW32/bin/mingw32-make.exe'
cd /c/openssl-1.0.1c-mgw
./config
make
Berkeley DB
-----------
MSYS shell:
cd /c/db-4.8.30.NC-mgw/build_unix
sh ../dist/configure --enable-mingw --enable-cxx
make
Boost
-----
DOS prompt:
downloaded boost jam 3.1.18
cd \boost-1.50.0-mgw
bjam toolset=gcc --build-type=complete stage
MiniUPnPc
---------
UPnP support is optional, make with `USE_UPNP=` to disable it.
MSYS shell:
cd /c/miniupnpc-1.6-mgw
make -f Makefile.mingw
mkdir miniupnpc
cp *.h miniupnpc/
Eurobit
-------
DOS prompt:
cd \eurobit\src
mingw32-make -f makefile.mingw
strip eurobitd.exe
| {
"content_hash": "b231dd115b324c86b7fff6d4177e2c9f",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 144,
"avg_line_length": 26.921348314606742,
"alnum_prop": 0.6986644407345576,
"repo_name": "omnicoin/eurobit",
"id": "a26c2c614b560c4914d06d3d4e99e9dcdaf7762f",
"size": "2396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/build-msw.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "89580"
},
{
"name": "C++",
"bytes": "2533769"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Objective-C",
"bytes": "5711"
},
{
"name": "Prolog",
"bytes": "14696"
},
{
"name": "Python",
"bytes": "69709"
},
{
"name": "Shell",
"bytes": "10587"
},
{
"name": "TypeScript",
"bytes": "5236293"
}
],
"symlink_target": ""
} |
// Template Source: BaseEntity.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.models;
import com.microsoft.graph.serializer.ISerializer;
import com.microsoft.graph.serializer.IJsonBackedObject;
import com.microsoft.graph.serializer.AdditionalDataManager;
import java.util.EnumSet;
import com.microsoft.graph.models.ImportedWindowsAutopilotDeviceIdentityImportStatus;
import com.google.gson.JsonObject;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Imported Windows Autopilot Device Identity State.
*/
public class ImportedWindowsAutopilotDeviceIdentityState implements IJsonBackedObject {
/** the OData type of the object as returned by the service */
@SerializedName("@odata.type")
@Expose
@Nullable
public String oDataType;
private transient AdditionalDataManager additionalDataManager = new AdditionalDataManager(this);
@Override
@Nonnull
public final AdditionalDataManager additionalDataManager() {
return additionalDataManager;
}
/**
* The Device Error Code.
* Device error code reported by Device Directory Service(DDS).
*/
@SerializedName(value = "deviceErrorCode", alternate = {"DeviceErrorCode"})
@Expose
@Nullable
public Integer deviceErrorCode;
/**
* The Device Error Name.
* Device error name reported by Device Directory Service(DDS).
*/
@SerializedName(value = "deviceErrorName", alternate = {"DeviceErrorName"})
@Expose
@Nullable
public String deviceErrorName;
/**
* The Device Import Status.
* Device status reported by Device Directory Service(DDS). Possible values are: unknown, pending, partial, complete, error.
*/
@SerializedName(value = "deviceImportStatus", alternate = {"DeviceImportStatus"})
@Expose
@Nullable
public ImportedWindowsAutopilotDeviceIdentityImportStatus deviceImportStatus;
/**
* The Device Registration Id.
* Device Registration ID for successfully added device reported by Device Directory Service(DDS).
*/
@SerializedName(value = "deviceRegistrationId", alternate = {"DeviceRegistrationId"})
@Expose
@Nullable
public String deviceRegistrationId;
/**
* Sets the raw JSON object
*
* @param serializer the serializer
* @param json the JSON object to set this object to
*/
public void setRawObject(@Nonnull final ISerializer serializer, @Nonnull final JsonObject json) {
}
}
| {
"content_hash": "fe08a5390cd7369fb28d6e343c3833c2",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 152,
"avg_line_length": 34.195402298850574,
"alnum_prop": 0.6954621848739496,
"repo_name": "microsoftgraph/msgraph-sdk-java",
"id": "6f0fe146b999ee3b45bc6488d92ee32f1371e196",
"size": "2975",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/main/java/com/microsoft/graph/models/ImportedWindowsAutopilotDeviceIdentityState.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "27286837"
},
{
"name": "PowerShell",
"bytes": "5635"
}
],
"symlink_target": ""
} |
import vtk
from vtk.util.misc import vtkGetDataRoot
from vtk.util.colors import *
VTK_DATA_ROOT = vtkGetDataRoot()
# We read a data file the is a CFD analysis of airflow in an office
# (with ventilation and a burning cigarette). We force an update so
# that we can query the output for its length, i.e., the length of the
# diagonal of the bounding box. This is useful for normalizing the
# data.
reader = vtk.vtkStructuredGridReader()
reader.SetFileName(VTK_DATA_ROOT + "/Data/office.binary.vtk")
reader.Update()
length = reader.GetOutput().GetLength()
maxVelocity =reader.GetOutput().GetPointData().GetVectors().GetMaxNorm()
maxTime = 35.0*length/maxVelocity
# Now we will generate a single streamline in the data. We select the
# integration order to use (RungeKutta order 4) and associate it with
# the streamer. The start position is the position in world space
# where we want to begin streamline integration; and we integrate in
# both directions. The step length is the length of the line segments
# that make up the streamline (i.e., related to display). The
# IntegrationStepLength specifies the integration step length as a
# fraction of the cell size that the streamline is in.
integ = vtk.vtkRungeKutta4()
streamer = vtk.vtkStreamTracer()
streamer.SetInputConnection(reader.GetOutputPort())
streamer.SetStartPosition(0.1, 2.1, 0.5)
streamer.SetMaximumPropagation(500)
streamer.SetInitialIntegrationStep(0.05)
streamer.SetIntegrationDirectionToBoth()
streamer.SetIntegrator(integ)
# The tube is wrapped around the generated streamline. By varying the
# radius by the inverse of vector magnitude, we are creating a tube
# whose radius is proportional to mass flux (in incompressible flow).
streamTube = vtk.vtkTubeFilter()
streamTube.SetInputConnection(streamer.GetOutputPort())
streamTube.SetInputArrayToProcess(1, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS, "vectors")
streamTube.SetRadius(0.02)
streamTube.SetNumberOfSides(12)
streamTube.SetVaryRadiusToVaryRadiusByVector()
mapStreamTube = vtk.vtkPolyDataMapper()
mapStreamTube.SetInputConnection(streamTube.GetOutputPort())
mapStreamTube.SetScalarRange(reader.GetOutput().GetPointData().GetScalars().GetRange())
streamTubeActor = vtk.vtkActor()
streamTubeActor.SetMapper(mapStreamTube)
streamTubeActor.GetProperty().BackfaceCullingOn()
# From here on we generate a whole bunch of planes which correspond to
# the geometry in the analysis; tables, bookshelves and so on.
table1 = vtk.vtkStructuredGridGeometryFilter()
table1.SetInputConnection(reader.GetOutputPort())
table1.SetExtent(11, 15, 7, 9, 8, 8)
mapTable1 = vtk.vtkPolyDataMapper()
mapTable1.SetInputConnection(table1.GetOutputPort())
mapTable1.ScalarVisibilityOff()
table1Actor = vtk.vtkActor()
table1Actor.SetMapper(mapTable1)
table1Actor.GetProperty().SetColor(.59, .427, .392)
table2 = vtk.vtkStructuredGridGeometryFilter()
table2.SetInputConnection(reader.GetOutputPort())
table2.SetExtent(11, 15, 10, 12, 8, 8)
mapTable2 = vtk.vtkPolyDataMapper()
mapTable2.SetInputConnection(table2.GetOutputPort())
mapTable2.ScalarVisibilityOff()
table2Actor = vtk.vtkActor()
table2Actor.SetMapper(mapTable2)
table2Actor.GetProperty().SetColor(.59, .427, .392)
FilingCabinet1 = vtk.vtkStructuredGridGeometryFilter()
FilingCabinet1.SetInputConnection(reader.GetOutputPort())
FilingCabinet1.SetExtent(15, 15, 7, 9, 0, 8)
mapFilingCabinet1 = vtk.vtkPolyDataMapper()
mapFilingCabinet1.SetInputConnection(FilingCabinet1.GetOutputPort())
mapFilingCabinet1.ScalarVisibilityOff()
FilingCabinet1Actor = vtk.vtkActor()
FilingCabinet1Actor.SetMapper(mapFilingCabinet1)
FilingCabinet1Actor.GetProperty().SetColor(.8, .8, .6)
FilingCabinet2 = vtk.vtkStructuredGridGeometryFilter()
FilingCabinet2.SetInputConnection(reader.GetOutputPort())
FilingCabinet2.SetExtent(15, 15, 10, 12, 0, 8)
mapFilingCabinet2 = vtk.vtkPolyDataMapper()
mapFilingCabinet2.SetInputConnection(FilingCabinet2.GetOutputPort())
mapFilingCabinet2.ScalarVisibilityOff()
FilingCabinet2Actor = vtk.vtkActor()
FilingCabinet2Actor.SetMapper(mapFilingCabinet2)
FilingCabinet2Actor.GetProperty().SetColor(.8, .8, .6)
bookshelf1Top = vtk.vtkStructuredGridGeometryFilter()
bookshelf1Top.SetInputConnection(reader.GetOutputPort())
bookshelf1Top.SetExtent(13, 13, 0, 4, 0, 11)
mapBookshelf1Top = vtk.vtkPolyDataMapper()
mapBookshelf1Top.SetInputConnection(bookshelf1Top.GetOutputPort())
mapBookshelf1Top.ScalarVisibilityOff()
bookshelf1TopActor = vtk.vtkActor()
bookshelf1TopActor.SetMapper(mapBookshelf1Top)
bookshelf1TopActor.GetProperty().SetColor(.8, .8, .6)
bookshelf1Bottom = vtk.vtkStructuredGridGeometryFilter()
bookshelf1Bottom.SetInputConnection(reader.GetOutputPort())
bookshelf1Bottom.SetExtent(20, 20, 0, 4, 0, 11)
mapBookshelf1Bottom = vtk.vtkPolyDataMapper()
mapBookshelf1Bottom.SetInputConnection(bookshelf1Bottom.GetOutputPort())
mapBookshelf1Bottom.ScalarVisibilityOff()
bookshelf1BottomActor = vtk.vtkActor()
bookshelf1BottomActor.SetMapper(mapBookshelf1Bottom)
bookshelf1BottomActor.GetProperty().SetColor(.8, .8, .6)
bookshelf1Front = vtk.vtkStructuredGridGeometryFilter()
bookshelf1Front.SetInputConnection(reader.GetOutputPort())
bookshelf1Front.SetExtent(13, 20, 0, 0, 0, 11)
mapBookshelf1Front = vtk.vtkPolyDataMapper()
mapBookshelf1Front.SetInputConnection(bookshelf1Front.GetOutputPort())
mapBookshelf1Front.ScalarVisibilityOff()
bookshelf1FrontActor = vtk.vtkActor()
bookshelf1FrontActor.SetMapper(mapBookshelf1Front)
bookshelf1FrontActor.GetProperty().SetColor(.8, .8, .6)
bookshelf1Back = vtk.vtkStructuredGridGeometryFilter()
bookshelf1Back.SetInputConnection(reader.GetOutputPort())
bookshelf1Back.SetExtent(13, 20, 4, 4, 0, 11)
mapBookshelf1Back = vtk.vtkPolyDataMapper()
mapBookshelf1Back.SetInputConnection(bookshelf1Back.GetOutputPort())
mapBookshelf1Back.ScalarVisibilityOff()
bookshelf1BackActor = vtk.vtkActor()
bookshelf1BackActor.SetMapper(mapBookshelf1Back)
bookshelf1BackActor.GetProperty().SetColor(.8, .8, .6)
bookshelf1LHS = vtk.vtkStructuredGridGeometryFilter()
bookshelf1LHS.SetInputConnection(reader.GetOutputPort())
bookshelf1LHS.SetExtent(13, 20, 0, 4, 0, 0)
mapBookshelf1LHS = vtk.vtkPolyDataMapper()
mapBookshelf1LHS.SetInputConnection(bookshelf1LHS.GetOutputPort())
mapBookshelf1LHS.ScalarVisibilityOff()
bookshelf1LHSActor = vtk.vtkActor()
bookshelf1LHSActor.SetMapper(mapBookshelf1LHS)
bookshelf1LHSActor.GetProperty().SetColor(.8, .8, .6)
bookshelf1RHS = vtk.vtkStructuredGridGeometryFilter()
bookshelf1RHS.SetInputConnection(reader.GetOutputPort())
bookshelf1RHS.SetExtent(13, 20, 0, 4, 11, 11)
mapBookshelf1RHS = vtk.vtkPolyDataMapper()
mapBookshelf1RHS.SetInputConnection(bookshelf1RHS.GetOutputPort())
mapBookshelf1RHS.ScalarVisibilityOff()
bookshelf1RHSActor = vtk.vtkActor()
bookshelf1RHSActor.SetMapper(mapBookshelf1RHS)
bookshelf1RHSActor.GetProperty().SetColor(.8, .8, .6)
bookshelf2Top = vtk.vtkStructuredGridGeometryFilter()
bookshelf2Top.SetInputConnection(reader.GetOutputPort())
bookshelf2Top.SetExtent(13, 13, 15, 19, 0, 11)
mapBookshelf2Top = vtk.vtkPolyDataMapper()
mapBookshelf2Top.SetInputConnection(bookshelf2Top.GetOutputPort())
mapBookshelf2Top.ScalarVisibilityOff()
bookshelf2TopActor = vtk.vtkActor()
bookshelf2TopActor.SetMapper(mapBookshelf2Top)
bookshelf2TopActor.GetProperty().SetColor(.8, .8, .6)
bookshelf2Bottom = vtk.vtkStructuredGridGeometryFilter()
bookshelf2Bottom.SetInputConnection(reader.GetOutputPort())
bookshelf2Bottom.SetExtent(20, 20, 15, 19, 0, 11)
mapBookshelf2Bottom = vtk.vtkPolyDataMapper()
mapBookshelf2Bottom.SetInputConnection(bookshelf2Bottom.GetOutputPort())
mapBookshelf2Bottom.ScalarVisibilityOff()
bookshelf2BottomActor = vtk.vtkActor()
bookshelf2BottomActor.SetMapper(mapBookshelf2Bottom)
bookshelf2BottomActor.GetProperty().SetColor(.8, .8, .6)
bookshelf2Front = vtk.vtkStructuredGridGeometryFilter()
bookshelf2Front.SetInputConnection(reader.GetOutputPort())
bookshelf2Front.SetExtent(13, 20, 15, 15, 0, 11)
mapBookshelf2Front = vtk.vtkPolyDataMapper()
mapBookshelf2Front.SetInputConnection(bookshelf2Front.GetOutputPort())
mapBookshelf2Front.ScalarVisibilityOff()
bookshelf2FrontActor = vtk.vtkActor()
bookshelf2FrontActor.SetMapper(mapBookshelf2Front)
bookshelf2FrontActor.GetProperty().SetColor(.8, .8, .6)
bookshelf2Back = vtk.vtkStructuredGridGeometryFilter()
bookshelf2Back.SetInputConnection(reader.GetOutputPort())
bookshelf2Back.SetExtent(13, 20, 19, 19, 0, 11)
mapBookshelf2Back = vtk.vtkPolyDataMapper()
mapBookshelf2Back.SetInputConnection(bookshelf2Back.GetOutputPort())
mapBookshelf2Back.ScalarVisibilityOff()
bookshelf2BackActor = vtk.vtkActor()
bookshelf2BackActor.SetMapper(mapBookshelf2Back)
bookshelf2BackActor.GetProperty().SetColor(.8, .8, .6)
bookshelf2LHS = vtk.vtkStructuredGridGeometryFilter()
bookshelf2LHS.SetInputConnection(reader.GetOutputPort())
bookshelf2LHS.SetExtent(13, 20, 15, 19, 0, 0)
mapBookshelf2LHS = vtk.vtkPolyDataMapper()
mapBookshelf2LHS.SetInputConnection(bookshelf2LHS.GetOutputPort())
mapBookshelf2LHS.ScalarVisibilityOff()
bookshelf2LHSActor = vtk.vtkActor()
bookshelf2LHSActor.SetMapper(mapBookshelf2LHS)
bookshelf2LHSActor.GetProperty().SetColor(.8, .8, .6)
bookshelf2RHS = vtk.vtkStructuredGridGeometryFilter()
bookshelf2RHS.SetInputConnection(reader.GetOutputPort())
bookshelf2RHS.SetExtent(13, 20, 15, 19, 11, 11)
mapBookshelf2RHS = vtk.vtkPolyDataMapper()
mapBookshelf2RHS.SetInputConnection(bookshelf2RHS.GetOutputPort())
mapBookshelf2RHS.ScalarVisibilityOff()
bookshelf2RHSActor = vtk.vtkActor()
bookshelf2RHSActor.SetMapper(mapBookshelf2RHS)
bookshelf2RHSActor.GetProperty().SetColor(.8, .8, .6)
window = vtk.vtkStructuredGridGeometryFilter()
window.SetInputConnection(reader.GetOutputPort())
window.SetExtent(20, 20, 6, 13, 10, 13)
mapWindow = vtk.vtkPolyDataMapper()
mapWindow.SetInputConnection(window.GetOutputPort())
mapWindow.ScalarVisibilityOff()
windowActor = vtk.vtkActor()
windowActor.SetMapper(mapWindow)
windowActor.GetProperty().SetColor(.3, .3, .5)
outlet = vtk.vtkStructuredGridGeometryFilter()
outlet.SetInputConnection(reader.GetOutputPort())
outlet.SetExtent(0, 0, 9, 10, 14, 16)
mapOutlet = vtk.vtkPolyDataMapper()
mapOutlet.SetInputConnection(outlet.GetOutputPort())
mapOutlet.ScalarVisibilityOff()
outletActor = vtk.vtkActor()
outletActor.SetMapper(mapOutlet)
outletActor.GetProperty().SetColor(0, 0, 0)
inlet = vtk.vtkStructuredGridGeometryFilter()
inlet.SetInputConnection(reader.GetOutputPort())
inlet.SetExtent(0, 0, 9, 10, 0, 6)
mapInlet = vtk.vtkPolyDataMapper()
mapInlet.SetInputConnection(inlet.GetOutputPort())
mapInlet.ScalarVisibilityOff()
inletActor = vtk.vtkActor()
inletActor.SetMapper(mapInlet)
inletActor.GetProperty().SetColor(0, 0, 0)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputConnection(reader.GetOutputPort())
mapOutline = vtk.vtkPolyDataMapper()
mapOutline.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(mapOutline)
outlineActor.GetProperty().SetColor(0, 0, 0)
# Now create the usual graphics stuff.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
ren.AddActor(table1Actor)
ren.AddActor(table2Actor)
ren.AddActor(FilingCabinet1Actor)
ren.AddActor(FilingCabinet2Actor)
ren.AddActor(bookshelf1TopActor)
ren.AddActor(bookshelf1BottomActor)
ren.AddActor(bookshelf1FrontActor)
ren.AddActor(bookshelf1BackActor)
ren.AddActor(bookshelf1LHSActor)
ren.AddActor(bookshelf1RHSActor)
ren.AddActor(bookshelf2TopActor)
ren.AddActor(bookshelf2BottomActor)
ren.AddActor(bookshelf2FrontActor)
ren.AddActor(bookshelf2BackActor)
ren.AddActor(bookshelf2LHSActor)
ren.AddActor(bookshelf2RHSActor)
ren.AddActor(windowActor)
ren.AddActor(outletActor)
ren.AddActor(inletActor)
ren.AddActor(outlineActor)
ren.AddActor(streamTubeActor)
ren.SetBackground(slate_grey)
# Here we specify a particular view.
aCamera = vtk.vtkCamera()
aCamera.SetClippingRange(0.726079, 36.3039)
aCamera.SetFocalPoint(2.43584, 2.15046, 1.11104)
aCamera.SetPosition(-4.76183, -10.4426, 3.17203)
aCamera.SetViewUp(0.0511273, 0.132773, 0.989827)
aCamera.SetViewAngle(18.604)
aCamera.Zoom(1.2)
ren.SetActiveCamera(aCamera)
renWin.SetSize(500, 300)
iren.Initialize()
renWin.Render()
iren.Start()
| {
"content_hash": "fe0e49240a9cb93be88356aa57322e28",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 97,
"avg_line_length": 41.02013422818792,
"alnum_prop": 0.8261616492146597,
"repo_name": "keithroe/vtkoptix",
"id": "037cc30aa9b42b1d89a279087be27525c9d7ae79",
"size": "12353",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "Examples/VisualizationAlgorithms/Python/officeTube.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "Batchfile",
"bytes": "106"
},
{
"name": "C",
"bytes": "46217717"
},
{
"name": "C++",
"bytes": "73779038"
},
{
"name": "CMake",
"bytes": "1786055"
},
{
"name": "CSS",
"bytes": "7532"
},
{
"name": "Cuda",
"bytes": "37418"
},
{
"name": "D",
"bytes": "2081"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "GLSL",
"bytes": "222494"
},
{
"name": "Groff",
"bytes": "65394"
},
{
"name": "HTML",
"bytes": "193016"
},
{
"name": "Java",
"bytes": "148789"
},
{
"name": "JavaScript",
"bytes": "54139"
},
{
"name": "Lex",
"bytes": "50109"
},
{
"name": "M4",
"bytes": "159710"
},
{
"name": "Makefile",
"bytes": "275672"
},
{
"name": "Objective-C",
"bytes": "22779"
},
{
"name": "Objective-C++",
"bytes": "191216"
},
{
"name": "Perl",
"bytes": "173168"
},
{
"name": "Prolog",
"bytes": "4406"
},
{
"name": "Python",
"bytes": "15765617"
},
{
"name": "Shell",
"bytes": "88087"
},
{
"name": "Slash",
"bytes": "1476"
},
{
"name": "Smarty",
"bytes": "393"
},
{
"name": "Tcl",
"bytes": "1404085"
},
{
"name": "Yacc",
"bytes": "191144"
}
],
"symlink_target": ""
} |
/*
* Programmer: rky 980813
*
* Purpose: Functions to read/write directly between app buffer and file.
*
*/
#define H5S_PACKAGE /*suppress error about including H5Spkg */
#include "H5private.h" /* Generic Functions */
#include "H5Dprivate.h" /* Datasets */
#include "H5Eprivate.h" /* Error handling */
#include "H5Fprivate.h" /* File access */
#include "H5FDprivate.h" /* File drivers */
#include "H5Iprivate.h" /* IDs */
#include "H5MMprivate.h" /* Memory management */
#include "H5Oprivate.h" /* Object headers */
#include "H5Pprivate.h" /* Property lists */
#include "H5Spkg.h" /* Dataspaces */
#include "H5VMprivate.h" /* Vector and array functions */
#ifdef H5_HAVE_PARALLEL
static herr_t H5S_mpio_all_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type);
static herr_t H5S_mpio_none_type(MPI_Datatype *new_type, int *count, hbool_t *is_derived_type);
static herr_t H5S_mpio_create_point_datatype(size_t elmt_size, hsize_t num_points, MPI_Aint *disp,
MPI_Datatype *new_type);
static herr_t H5S_mpio_point_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type, hbool_t do_permute, hsize_t **permute_map,
hbool_t *is_permuted);
static herr_t H5S_mpio_permute_type(const H5S_t *space, size_t elmt_size, hsize_t **permute_map,
MPI_Datatype *new_type, int *count, hbool_t *is_derived_type);
static herr_t H5S_mpio_hyper_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type);
static herr_t H5S_mpio_span_hyper_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type,
int *count, hbool_t *is_derived_type);
static herr_t H5S_obtain_datatype(const hsize_t down[], H5S_hyper_span_t *span, const MPI_Datatype *elmt_type,
MPI_Datatype *span_type, size_t elmt_size);
#define H5S_MPIO_INITIAL_ALLOC_COUNT 256
/*-------------------------------------------------------------------------
* Function: H5S_mpio_all_type
*
* Purpose: Translate an HDF5 "all" selection into an MPI type.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
*
* Programmer: rky 980813
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_all_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type)
{
hsize_t total_bytes;
hssize_t snelmts; /* Total number of elmts (signed) */
hsize_t nelmts; /* Total number of elmts */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Check args */
HDassert(space);
/* Just treat the entire extent as a block of bytes */
if ((snelmts = (hssize_t)H5S_GET_EXTENT_NPOINTS(space)) < 0)
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "src dataspace has invalid selection")
H5_CHECKED_ASSIGN(nelmts, hsize_t, snelmts, hssize_t);
total_bytes = (hsize_t)elmt_size * nelmts;
/* fill in the return values */
*new_type = MPI_BYTE;
H5_CHECKED_ASSIGN(*count, int, total_bytes, hsize_t);
*is_derived_type = FALSE;
done:
FUNC_LEAVE_NOAPI(ret_value)
} /* H5S_mpio_all_type() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_none_type
*
* Purpose: Translate an HDF5 "none" selection into an MPI type.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
*
* Programmer: Quincey Koziol, October 29, 2002
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_none_type(MPI_Datatype *new_type, int *count, hbool_t *is_derived_type)
{
FUNC_ENTER_NOAPI_NOINIT_NOERR
/* fill in the return values */
*new_type = MPI_BYTE;
*count = 0;
*is_derived_type = FALSE;
FUNC_LEAVE_NOAPI(SUCCEED)
} /* H5S_mpio_none_type() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_create_point_datatype
*
* Purpose: Create a derived datatype for point selections.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
*
* Programmer: Mohamad Chaarawi
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_create_point_datatype(size_t elmt_size, hsize_t num_points, MPI_Aint *disp, MPI_Datatype *new_type)
{
MPI_Datatype elmt_type; /* MPI datatype for individual element */
hbool_t elmt_type_created = FALSE; /* Whether the element MPI datatype was created */
int mpi_code; /* MPI error code */
int * blocks = NULL; /* Array of block sizes for MPI hindexed create call */
hsize_t u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Create an MPI datatype for an element */
if (MPI_SUCCESS != (mpi_code = MPI_Type_contiguous((int)elmt_size, MPI_BYTE, &elmt_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_contiguous failed", mpi_code)
elmt_type_created = TRUE;
/* Allocate block sizes for MPI datatype call */
if (NULL == (blocks = (int *)H5MM_malloc(sizeof(int) * num_points)))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of blocks")
/* Would be nice to have Create_Hindexed_block to avoid this array of all ones */
for (u = 0; u < num_points; u++)
blocks[u] = 1;
/* Create an MPI datatype for the whole point selection */
if (MPI_SUCCESS !=
(mpi_code = MPI_Type_create_hindexed((int)num_points, blocks, disp, elmt_type, new_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_create_indexed_block failed", mpi_code)
/* Commit MPI datatype for later use */
if (MPI_SUCCESS != (mpi_code = MPI_Type_commit(new_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_commit failed", mpi_code)
done:
if (elmt_type_created)
MPI_Type_free(&elmt_type);
if (blocks)
H5MM_free(blocks);
FUNC_LEAVE_NOAPI(ret_value)
} /* H5S_mpio_create_point_datatype() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_point_type
*
* Purpose: Translate an HDF5 "point" selection into an MPI type.
* Create a permutation array to handle out-of-order point selections.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
* *permute_map the permutation of the displacements to create
* the MPI_Datatype
* *is_permuted 0 if the displacements are permuted, 1 if not
*
* Programmer: Mohamad Chaarawi
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_point_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type, hbool_t do_permute, hsize_t **permute, hbool_t *is_permuted)
{
MPI_Aint * disp = NULL; /* Datatype displacement for each point*/
H5S_pnt_node_t *curr = NULL; /* Current point being operated on in from the selection */
hssize_t snum_points; /* Signed number of elements in selection */
hsize_t num_points; /* Sumber of points in the selection */
hsize_t u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Check args */
HDassert(space);
/* Get the total number of points selected */
if ((snum_points = (hssize_t)H5S_GET_SELECT_NPOINTS(space)) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCOUNT, FAIL, "can't get number of elements selected")
num_points = (hsize_t)snum_points;
/* Allocate array for element displacements */
if (NULL == (disp = (MPI_Aint *)H5MM_malloc(sizeof(MPI_Aint) * num_points)))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of displacements")
/* Allocate array for element permutation - returned to caller */
if (do_permute)
if (NULL == (*permute = (hsize_t *)H5MM_malloc(sizeof(hsize_t) * num_points)))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate permutation array")
/* Iterate through list of elements */
curr = space->select.sel_info.pnt_lst->head;
for (u = 0; u < num_points; u++) {
/* calculate the displacement of the current point */
disp[u] = H5VM_array_offset(space->extent.rank, space->extent.size, curr->pnt);
disp[u] *= elmt_size;
/* This is a File Space used to set the file view, so adjust the displacements
* to have them monotonically non-decreasing.
* Generate the permutation array by indicating at each point being selected,
* the position it will shifted in the new displacement. Example:
* Suppose 4 points with corresponding are selected
* Pt 1: disp=6 ; Pt 2: disp=3 ; Pt 3: disp=0 ; Pt 4: disp=4
* The permute map to sort the displacements in order will be:
* point 1: map[0] = L, indicating that this point is not moved (1st point selected)
* point 2: map[1] = 0, indicating that this point is moved to the first position,
* since disp_pt1(6) > disp_pt2(3)
* point 3: map[2] = 0, move to position 0, bec it has the lowest disp between
* the points selected so far.
* point 4: map[3] = 2, move the 2nd position since point 1 has a higher disp,
* but points 2 and 3 have lower displacements.
*/
if (do_permute) {
if (u > 0 && disp[u] < disp[u - 1]) {
unsigned s = 0, l = u, m = u / 2;
*is_permuted = TRUE;
do {
if (disp[u] > disp[m])
s = m + 1;
else if (disp[u] < disp[m])
l = m;
else
break;
m = s + ((l - s) / 2);
} while (s < l);
if (m < u) {
MPI_Aint temp;
temp = disp[u];
HDmemmove(disp + m + 1, disp + m, (u - m) * sizeof(MPI_Aint));
disp[m] = temp;
} /* end if */
(*permute)[u] = m;
} /* end if */
else
(*permute)[u] = num_points;
} /* end if */
/* this is a memory space, and no permutation is necessary to create
the derived datatype */
else {
; /* do nothing */
} /* end else */
/* get the next point */
curr = curr->next;
} /* end for */
/* Create the MPI datatype for the set of element displacements */
if (H5S_mpio_create_point_datatype(elmt_size, num_points, disp, new_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "couldn't create an MPI Datatype from point selection")
/* Set values about MPI datatype created */
*count = 1;
*is_derived_type = TRUE;
done:
if (NULL != disp)
H5MM_free(disp);
/* Release the permutation buffer, if it wasn't used */
if (!(*is_permuted) && (*permute)) {
H5MM_free(*permute);
*permute = NULL;
} /* end if */
FUNC_LEAVE_NOAPI(ret_value)
} /* H5S_mpio_point_type() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_permute_type
*
* Purpose: Translate an HDF5 "all/hyper/point" selection into an MPI type,
* while applying the permutation map. This function is called if
* the file space selection is permuted due to out-of-order point
* selection and so the memory datatype has to be permuted using the
* permutation map created by the file selection.
*
* Note: This routine is called from H5S_mpio_space_type(), which is
* called first for the file dataspace and creates
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
*
* Programmer: Mohamad Chaarawi
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_permute_type(const H5S_t *space, size_t elmt_size, hsize_t **permute, MPI_Datatype *new_type,
int *count, hbool_t *is_derived_type)
{
MPI_Aint * disp = NULL; /* Datatype displacement for each point*/
H5S_sel_iter_t sel_iter; /* Selection iteration info */
hbool_t sel_iter_init = FALSE; /* Selection iteration info has been initialized */
hsize_t off[H5D_IO_VECTOR_SIZE]; /* Array to store sequence offsets */
size_t len[H5D_IO_VECTOR_SIZE]; /* Array to store sequence lengths */
hssize_t snum_points; /* Signed number of elements in selection */
hsize_t num_points; /* Number of points in the selection */
size_t max_elem; /* Maximum number of elements allowed in sequences */
hsize_t u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Check args */
HDassert(space);
/* Get the total number of points selected */
if ((snum_points = (hssize_t)H5S_GET_SELECT_NPOINTS(space)) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCOUNT, FAIL, "can't get number of elements selected")
num_points = (hsize_t)snum_points;
/* Allocate array to store point displacements */
if (NULL == (disp = (MPI_Aint *)H5MM_malloc(sizeof(MPI_Aint) * num_points)))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of displacements")
/* Initialize selection iterator */
if (H5S_select_iter_init(&sel_iter, space, elmt_size) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTINIT, FAIL, "unable to initialize selection iterator")
sel_iter_init = TRUE; /* Selection iteration info has been initialized */
/* Set the number of elements to iterate over */
H5_CHECKED_ASSIGN(max_elem, size_t, num_points, hsize_t);
/* Loop, while elements left in selection */
u = 0;
while (max_elem > 0) {
size_t nelem; /* Number of elements used in sequences */
size_t nseq; /* Number of sequences generated */
size_t curr_seq; /* Current sequence being worked on */
/* Get the sequences of bytes */
if (H5S_SELECT_GET_SEQ_LIST(space, 0, &sel_iter, (size_t)H5D_IO_VECTOR_SIZE, max_elem, &nseq, &nelem,
off, len) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_UNSUPPORTED, FAIL, "sequence length generation failed")
/* Loop, while sequences left to process */
for (curr_seq = 0; curr_seq < nseq; curr_seq++) {
hsize_t curr_off; /* Current offset within sequence */
size_t curr_len; /* Length of bytes left to process in sequence */
/* Get the current offset */
curr_off = off[curr_seq];
/* Get the number of bytes in sequence */
curr_len = len[curr_seq];
/* Loop, while bytes left in sequence */
while (curr_len > 0) {
/* Set the displacement of the current point */
disp[u] = curr_off;
/* This is a memory displacement, so for each point selected,
* apply the map that was generated by the file selection */
if ((*permute)[u] != num_points) {
MPI_Aint temp = disp[u];
HDmemmove(disp + (*permute)[u] + 1, disp + (*permute)[u],
(u - (*permute)[u]) * sizeof(MPI_Aint));
disp[(*permute)[u]] = temp;
} /* end if */
/* Advance to next element */
u++;
/* Increment offset in dataspace */
curr_off += elmt_size;
/* Decrement number of bytes left in sequence */
curr_len -= elmt_size;
} /* end while */
} /* end for */
/* Decrement number of elements left to process */
max_elem -= nelem;
} /* end while */
/* Create the MPI datatype for the set of element displacements */
if (H5S_mpio_create_point_datatype(elmt_size, num_points, disp, new_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "couldn't create an MPI Datatype from point selection")
/* Set values about MPI datatype created */
*count = 1;
*is_derived_type = TRUE;
done:
/* Release selection iterator */
if (sel_iter_init)
if (H5S_SELECT_ITER_RELEASE(&sel_iter) < 0)
HDONE_ERROR(H5E_DATASPACE, H5E_CANTRELEASE, FAIL, "unable to release selection iterator")
/* Free memory */
if (disp)
H5MM_free(disp);
if (*permute) {
H5MM_free(*permute);
*permute = NULL;
} /* end if */
FUNC_LEAVE_NOAPI(ret_value)
} /* H5S_mpio_permute_type() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_hyper_type
*
* Purpose: Translate a regular HDF5 hyperslab selection into an MPI type.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
*
* Programmer: rky 980813
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_hyper_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type)
{
H5S_sel_iter_t sel_iter; /* Selection iteration info */
hbool_t sel_iter_init = FALSE; /* Selection iteration info has been initialized */
struct dim { /* less hassle than malloc/free & ilk */
hssize_t start;
hsize_t strid;
hsize_t block;
hsize_t xtent;
hsize_t count;
} d[H5S_MAX_RANK];
hsize_t offset[H5S_MAX_RANK];
hsize_t max_xtent[H5S_MAX_RANK];
H5S_hyper_dim_t *diminfo; /* [rank] */
unsigned rank;
MPI_Datatype inner_type, outer_type;
MPI_Aint extent_len, start_disp, new_extent;
MPI_Aint lb; /* Needed as an argument for MPI_Type_get_extent */
unsigned u; /* Local index variable */
int i; /* Local index variable */
int mpi_code; /* MPI return code */
herr_t ret_value = SUCCEED;
FUNC_ENTER_NOAPI_NOINIT
/* Check args */
HDassert(space);
HDassert(sizeof(MPI_Aint) >= sizeof(elmt_size));
/* Initialize selection iterator */
if (H5S_select_iter_init(&sel_iter, space, elmt_size) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTINIT, FAIL, "unable to initialize selection iterator")
sel_iter_init = TRUE; /* Selection iteration info has been initialized */
/* Abbreviate args */
diminfo = sel_iter.u.hyp.diminfo;
HDassert(diminfo);
/* Make a local copy of the dimension info so we can operate with them */
/* Check if this is a "flattened" regular hyperslab selection */
if (sel_iter.u.hyp.iter_rank != 0 && sel_iter.u.hyp.iter_rank < space->extent.rank) {
/* Flattened selection */
rank = sel_iter.u.hyp.iter_rank;
HDassert(rank <= H5S_MAX_RANK); /* within array bounds */
#ifdef H5S_DEBUG
if (H5DEBUG(S))
HDfprintf(H5DEBUG(S), "%s: Flattened selection\n", FUNC);
#endif
for (u = 0; u < rank; ++u) {
H5_CHECK_OVERFLOW(diminfo[u].start, hsize_t, hssize_t)
d[u].start = (hssize_t)diminfo[u].start + sel_iter.u.hyp.sel_off[u];
d[u].strid = diminfo[u].stride;
d[u].block = diminfo[u].block;
d[u].count = diminfo[u].count;
d[u].xtent = sel_iter.u.hyp.size[u];
#ifdef H5S_DEBUG
if (H5DEBUG(S)) {
HDfprintf(H5DEBUG(S), "%s: start=%Hd stride=%Hu count=%Hu block=%Hu xtent=%Hu", FUNC,
d[u].start, d[u].strid, d[u].count, d[u].block, d[u].xtent);
if (u == 0)
HDfprintf(H5DEBUG(S), " rank=%u\n", rank);
else
HDfprintf(H5DEBUG(S), "\n");
}
#endif
if (0 == d[u].block)
goto empty;
if (0 == d[u].count)
goto empty;
if (0 == d[u].xtent)
goto empty;
} /* end for */
} /* end if */
else {
/* Non-flattened selection */
rank = space->extent.rank;
HDassert(rank <= H5S_MAX_RANK); /* within array bounds */
if (0 == rank)
goto empty;
#ifdef H5S_DEBUG
if (H5DEBUG(S))
HDfprintf(H5DEBUG(S), "%s: Non-flattened selection\n", FUNC);
#endif
for (u = 0; u < rank; ++u) {
H5_CHECK_OVERFLOW(diminfo[u].start, hsize_t, hssize_t)
d[u].start = (hssize_t)diminfo[u].start + space->select.offset[u];
d[u].strid = diminfo[u].stride;
d[u].block = diminfo[u].block;
d[u].count = diminfo[u].count;
d[u].xtent = space->extent.size[u];
#ifdef H5S_DEBUG
if (H5DEBUG(S)) {
HDfprintf(H5DEBUG(S), "%s: start=%Hd stride=%Hu count=%Hu block=%Hu xtent=%Hu", FUNC,
d[u].start, d[u].strid, d[u].count, d[u].block, d[u].xtent);
if (u == 0)
HDfprintf(H5DEBUG(S), " rank=%u\n", rank);
else
HDfprintf(H5DEBUG(S), "\n");
}
#endif
if (0 == d[u].block)
goto empty;
if (0 == d[u].count)
goto empty;
if (0 == d[u].xtent)
goto empty;
} /* end for */
} /* end else */
/**********************************************************************
Compute array "offset[rank]" which gives the offsets for a multi-
dimensional array with dimensions "d[i].xtent" (i=0,1,...,rank-1).
**********************************************************************/
offset[rank - 1] = 1;
max_xtent[rank - 1] = d[rank - 1].xtent;
#ifdef H5S_DEBUG
if (H5DEBUG(S)) {
i = ((int)rank) - 1;
HDfprintf(H5DEBUG(S), " offset[%2d]=%Hu; max_xtent[%2d]=%Hu\n", i, offset[i], i, max_xtent[i]);
}
#endif
for (i = ((int)rank) - 2; i >= 0; --i) {
offset[i] = offset[i + 1] * d[i + 1].xtent;
max_xtent[i] = max_xtent[i + 1] * d[i].xtent;
#ifdef H5S_DEBUG
if (H5DEBUG(S))
HDfprintf(H5DEBUG(S), " offset[%2d]=%Hu; max_xtent[%2d]=%Hu\n", i, offset[i], i, max_xtent[i]);
#endif
} /* end for */
/* Create a type covering the selected hyperslab.
* Multidimensional dataspaces are stored in row-major order.
* The type is built from the inside out, going from the
* fastest-changing (i.e., inner) dimension * to the slowest (outer).
*/
/*******************************************************
* Construct contig type for inner contig dims:
*******************************************************/
#ifdef H5S_DEBUG
if (H5DEBUG(S)) {
HDfprintf(H5DEBUG(S), "%s: Making contig type %Zu MPI_BYTEs\n", FUNC, elmt_size);
for (i = ((int)rank) - 1; i >= 0; --i)
HDfprintf(H5DEBUG(S), "d[%d].xtent=%Hu \n", i, d[i].xtent);
}
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Type_contiguous((int)elmt_size, MPI_BYTE, &inner_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_contiguous failed", mpi_code)
/*******************************************************
* Construct the type by walking the hyperslab dims
* from the inside out:
*******************************************************/
for (i = ((int)rank) - 1; i >= 0; --i) {
#ifdef H5S_DEBUG
if (H5DEBUG(S))
HDfprintf(H5DEBUG(S),
"%s: Dimension i=%d \n"
"start=%Hd count=%Hu block=%Hu stride=%Hu, xtent=%Hu max_xtent=%d\n",
FUNC, i, d[i].start, d[i].count, d[i].block, d[i].strid, d[i].xtent, max_xtent[i]);
#endif
#ifdef H5S_DEBUG
if (H5DEBUG(S))
HDfprintf(H5DEBUG(S), "%s: i=%d Making vector-type \n", FUNC, i);
#endif
/****************************************
* Build vector type of the selection.
****************************************/
mpi_code = MPI_Type_vector((int)(d[i].count), /* count */
(int)(d[i].block), /* blocklength */
(int)(d[i].strid), /* stride */
inner_type, /* old type */
&outer_type); /* new type */
MPI_Type_free(&inner_type);
if (mpi_code != MPI_SUCCESS)
HMPI_GOTO_ERROR(FAIL, "couldn't create MPI vector type", mpi_code)
/****************************************
* Then build the dimension type as (start, vector type, xtent).
****************************************/
/* calculate start and extent values of this dimension */
start_disp = d[i].start * offset[i] * elmt_size;
new_extent = (MPI_Aint)elmt_size * max_xtent[i];
if (MPI_SUCCESS != (mpi_code = MPI_Type_get_extent(outer_type, &lb, &extent_len)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_get_extent failed", mpi_code)
/*************************************************
* Restructure this datatype ("outer_type")
* so that it still starts at 0, but its extent
* is the full extent in this dimension.
*************************************************/
if (start_disp > 0 || extent_len < new_extent) {
MPI_Datatype interm_type;
int block_len = 1;
HDassert(0 == lb);
mpi_code = MPI_Type_create_hindexed(1, &block_len, &start_disp, outer_type, &interm_type);
MPI_Type_free(&outer_type);
if (mpi_code != MPI_SUCCESS)
HMPI_GOTO_ERROR(FAIL, "MPI_Type_create_hindexed failed", mpi_code)
mpi_code = MPI_Type_create_resized(interm_type, lb, new_extent, &inner_type);
MPI_Type_free(&interm_type);
if (mpi_code != MPI_SUCCESS)
HMPI_GOTO_ERROR(FAIL, "couldn't resize MPI vector type", mpi_code)
} /* end if */
else
inner_type = outer_type;
} /* end for */
/******************************************
* End of loop, walking through dimensions.
*******************************************/
/* At this point inner_type is actually the outermost type, even for 0-trip loop */
*new_type = inner_type;
if (MPI_SUCCESS != (mpi_code = MPI_Type_commit(new_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_commit failed", mpi_code)
/* fill in the remaining return values */
*count = 1; /* only have to move one of these suckers! */
*is_derived_type = TRUE;
HGOTO_DONE(SUCCEED);
empty:
/* special case: empty hyperslab */
*new_type = MPI_BYTE;
*count = 0;
*is_derived_type = FALSE;
done:
/* Release selection iterator */
if (sel_iter_init)
if (H5S_SELECT_ITER_RELEASE(&sel_iter) < 0)
HDONE_ERROR(H5E_DATASPACE, H5E_CANTRELEASE, FAIL, "unable to release selection iterator")
#ifdef H5S_DEBUG
if (H5DEBUG(S))
HDfprintf(H5DEBUG(S), "Leave %s, count=%ld is_derived_type=%t\n", FUNC, *count, *is_derived_type);
#endif
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5S_mpio_hyper_type() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_span_hyper_type
*
* Purpose: Translate an HDF5 irregular hyperslab selection into an
MPI type.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
*
* Programmer: kyang
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_mpio_span_hyper_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type)
{
MPI_Datatype elmt_type; /* MPI datatype for an element */
hbool_t elmt_type_is_derived = FALSE; /* Whether the element type has been created */
MPI_Datatype span_type; /* MPI datatype for overall span tree */
hsize_t down[H5S_MAX_RANK]; /* 'down' sizes for each dimension */
int mpi_code; /* MPI return code */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Check args */
HDassert(space);
HDassert(space->extent.size);
HDassert(space->select.sel_info.hslab->span_lst);
HDassert(space->select.sel_info.hslab->span_lst->head);
/* Create the base type for an element */
if (MPI_SUCCESS != (mpi_code = MPI_Type_contiguous((int)elmt_size, MPI_BYTE, &elmt_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_contiguous failed", mpi_code)
elmt_type_is_derived = TRUE;
/* Compute 'down' sizes for each dimension */
if (H5VM_array_down(space->extent.rank, space->extent.size, down) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTGETSIZE, FAIL, "couldn't compute 'down' dimension sizes")
/* Obtain derived data type */
if (H5S_obtain_datatype(down, space->select.sel_info.hslab->span_lst->head, &elmt_type, &span_type,
elmt_size) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "couldn't obtain MPI derived data type")
if (MPI_SUCCESS != (mpi_code = MPI_Type_commit(&span_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_commit failed", mpi_code)
*new_type = span_type;
/* fill in the remaining return values */
*count = 1;
*is_derived_type = TRUE;
done:
/* Release resources */
if (elmt_type_is_derived)
if (MPI_SUCCESS != (mpi_code = MPI_Type_free(&elmt_type)))
HMPI_DONE_ERROR(FAIL, "MPI_Type_free failed", mpi_code)
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5S_mpio_span_hyper_type() */
/*-------------------------------------------------------------------------
* Function: H5S_obtain_datatype
*
* Purpose: Obtain an MPI derived datatype based on span-tree
* implementation
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *span_type the MPI type corresponding to the selection
*
* Programmer: kyang
*
*-------------------------------------------------------------------------
*/
static herr_t
H5S_obtain_datatype(const hsize_t *down, H5S_hyper_span_t *span, const MPI_Datatype *elmt_type,
MPI_Datatype *span_type, size_t elmt_size)
{
size_t alloc_count = 0; /* Number of span tree nodes allocated at this level */
size_t outercount = 0; /* Number of span tree nodes at this level */
MPI_Datatype * inner_type = NULL;
hbool_t inner_types_freed = FALSE; /* Whether the inner_type MPI datatypes have been freed */
hbool_t span_type_valid = FALSE; /* Whether the span_type MPI datatypes is valid */
int * blocklen = NULL;
MPI_Aint * disp = NULL;
H5S_hyper_span_t *tspan; /* Temporary pointer to span tree node */
int mpi_code; /* MPI return status code */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Sanity check */
HDassert(span);
/* Allocate the initial displacement & block length buffers */
alloc_count = H5S_MPIO_INITIAL_ALLOC_COUNT;
if (NULL == (disp = (MPI_Aint *)H5MM_malloc(alloc_count * sizeof(MPI_Aint))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of displacements")
if (NULL == (blocklen = (int *)H5MM_malloc(alloc_count * sizeof(int))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of block lengths")
/* if this is the fastest changing dimension, it is the base case for derived datatype. */
if (NULL == span->down) {
tspan = span;
outercount = 0;
while (tspan) {
/* Check if we need to increase the size of the buffers */
if (outercount >= alloc_count) {
MPI_Aint *tmp_disp; /* Temporary pointer to new displacement buffer */
int * tmp_blocklen; /* Temporary pointer to new block length buffer */
/* Double the allocation count */
alloc_count *= 2;
/* Re-allocate the buffers */
if (NULL == (tmp_disp = (MPI_Aint *)H5MM_realloc(disp, alloc_count * sizeof(MPI_Aint))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of displacements")
disp = tmp_disp;
if (NULL == (tmp_blocklen = (int *)H5MM_realloc(blocklen, alloc_count * sizeof(int))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of block lengths")
blocklen = tmp_blocklen;
} /* end if */
/* Store displacement & block length */
disp[outercount] = (MPI_Aint)elmt_size * tspan->low;
H5_CHECK_OVERFLOW(tspan->nelem, hsize_t, int)
blocklen[outercount] = (int)tspan->nelem;
tspan = tspan->next;
outercount++;
} /* end while */
if (MPI_SUCCESS !=
(mpi_code = MPI_Type_create_hindexed((int)outercount, blocklen, disp, *elmt_type, span_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_create_hindexed failed", mpi_code)
span_type_valid = TRUE;
} /* end if */
else {
size_t u; /* Local index variable */
if (NULL == (inner_type = (MPI_Datatype *)H5MM_malloc(alloc_count * sizeof(MPI_Datatype))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of inner MPI datatypes")
tspan = span;
outercount = 0;
while (tspan) {
MPI_Datatype down_type; /* Temporary MPI datatype for a span tree node's children */
MPI_Aint stride; /* Distance between inner MPI datatypes */
/* Check if we need to increase the size of the buffers */
if (outercount >= alloc_count) {
MPI_Aint * tmp_disp; /* Temporary pointer to new displacement buffer */
int * tmp_blocklen; /* Temporary pointer to new block length buffer */
MPI_Datatype *tmp_inner_type; /* Temporary pointer to inner MPI datatype buffer */
/* Double the allocation count */
alloc_count *= 2;
/* Re-allocate the buffers */
if (NULL == (tmp_disp = (MPI_Aint *)H5MM_realloc(disp, alloc_count * sizeof(MPI_Aint))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of displacements")
disp = tmp_disp;
if (NULL == (tmp_blocklen = (int *)H5MM_realloc(blocklen, alloc_count * sizeof(int))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL, "can't allocate array of block lengths")
blocklen = tmp_blocklen;
if (NULL == (tmp_inner_type = (MPI_Datatype *)H5MM_realloc(
inner_type, alloc_count * sizeof(MPI_Datatype))))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, FAIL,
"can't allocate array of inner MPI datatypes")
inner_type = tmp_inner_type;
} /* end if */
/* Displacement should be in byte and should have dimension information */
/* First using MPI Type vector to build derived data type for this span only */
/* Need to calculate the disp in byte for this dimension. */
/* Calculate the total bytes of the lower dimension */
disp[outercount] = tspan->low * (*down) * elmt_size;
blocklen[outercount] = 1;
/* Generate MPI datatype for next dimension down */
if (H5S_obtain_datatype(down + 1, tspan->down->head, elmt_type, &down_type, elmt_size) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "couldn't obtain MPI derived data type")
/* Build the MPI datatype for this node */
stride = (*down) * elmt_size;
H5_CHECK_OVERFLOW(tspan->nelem, hsize_t, int)
if (MPI_SUCCESS != (mpi_code = MPI_Type_create_hvector((int)tspan->nelem, 1, stride, down_type,
&inner_type[outercount]))) {
MPI_Type_free(&down_type);
HMPI_GOTO_ERROR(FAIL, "MPI_Type_create_hvector failed", mpi_code)
} /* end if */
/* Release MPI datatype for next dimension down */
if (MPI_SUCCESS != (mpi_code = MPI_Type_free(&down_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_free failed", mpi_code)
tspan = tspan->next;
outercount++;
} /* end while */
/* building the whole vector datatype */
H5_CHECK_OVERFLOW(outercount, size_t, int)
if (MPI_SUCCESS !=
(mpi_code = MPI_Type_create_struct((int)outercount, blocklen, disp, inner_type, span_type)))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_create_struct failed", mpi_code)
span_type_valid = TRUE;
/* Release inner node types */
for (u = 0; u < outercount; u++)
if (MPI_SUCCESS != (mpi_code = MPI_Type_free(&inner_type[u])))
HMPI_GOTO_ERROR(FAIL, "MPI_Type_free failed", mpi_code)
inner_types_freed = TRUE;
} /* end else */
done:
/* General cleanup */
if (inner_type != NULL) {
if (!inner_types_freed) {
size_t u; /* Local index variable */
for (u = 0; u < outercount; u++)
if (MPI_SUCCESS != (mpi_code = MPI_Type_free(&inner_type[u])))
HMPI_DONE_ERROR(FAIL, "MPI_Type_free failed", mpi_code)
} /* end if */
H5MM_free(inner_type);
} /* end if */
if (blocklen != NULL)
H5MM_free(blocklen);
if (disp != NULL)
H5MM_free(disp);
/* Error cleanup */
if (ret_value < 0) {
if (span_type_valid)
if (MPI_SUCCESS != (mpi_code = MPI_Type_free(span_type)))
HMPI_DONE_ERROR(FAIL, "MPI_Type_free failed", mpi_code)
} /* end if */
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5S_obtain_datatype() */
/*-------------------------------------------------------------------------
* Function: H5S_mpio_space_type
*
* Purpose: Translate an HDF5 dataspace selection into an MPI type.
* Currently handle only hyperslab and "all" selections.
*
* Return: Non-negative on success, negative on failure.
*
* Outputs: *new_type the MPI type corresponding to the selection
* *count how many objects of the new_type in selection
* (useful if this is the buffer type for xfer)
* *is_derived_type 0 if MPI primitive type, 1 if derived
*
* Programmer: rky 980813
*
*-------------------------------------------------------------------------
*/
herr_t
H5S_mpio_space_type(const H5S_t *space, size_t elmt_size, MPI_Datatype *new_type, int *count,
hbool_t *is_derived_type, hbool_t do_permute, hsize_t **permute_map, hbool_t *is_permuted)
{
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Check args */
HDassert(space);
HDassert(elmt_size);
/* Create MPI type based on the kind of selection */
switch (H5S_GET_EXTENT_TYPE(space)) {
case H5S_NULL:
case H5S_SCALAR:
case H5S_SIMPLE:
/* If the file space has been permuted previously due to
* out-of-order point selection, then permute this selection which
* should be a memory selection to match the file space permutation.
*/
if (TRUE == *is_permuted) {
switch (H5S_GET_SELECT_TYPE(space)) {
case H5S_SEL_NONE:
if (H5S_mpio_none_type(new_type, count, is_derived_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert 'none' selection to MPI type")
break;
case H5S_SEL_ALL:
case H5S_SEL_POINTS:
case H5S_SEL_HYPERSLABS:
/* Sanity check */
HDassert(!do_permute);
if (H5S_mpio_permute_type(space, elmt_size, permute_map, new_type, count,
is_derived_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert 'all' selection to MPI type")
break;
case H5S_SEL_ERROR:
case H5S_SEL_N:
default:
HDassert("unknown selection type" && 0);
break;
} /* end switch */
} /* end if */
/* the file space is not permuted, so do a regular selection */
else {
switch (H5S_GET_SELECT_TYPE(space)) {
case H5S_SEL_NONE:
if (H5S_mpio_none_type(new_type, count, is_derived_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert 'none' selection to MPI type")
break;
case H5S_SEL_ALL:
if (H5S_mpio_all_type(space, elmt_size, new_type, count, is_derived_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert 'all' selection to MPI type")
break;
case H5S_SEL_POINTS:
if (H5S_mpio_point_type(space, elmt_size, new_type, count, is_derived_type,
do_permute, permute_map, is_permuted) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert 'point' selection to MPI type")
break;
case H5S_SEL_HYPERSLABS:
if ((H5S_SELECT_IS_REGULAR(space) == TRUE)) {
if (H5S_mpio_hyper_type(space, elmt_size, new_type, count, is_derived_type) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert regular 'hyperslab' selection to MPI type")
} /* end if */
else {
if (H5S_mpio_span_hyper_type(space, elmt_size, new_type, count, is_derived_type) <
0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL,
"couldn't convert irregular 'hyperslab' selection to MPI type")
} /* end else */
break;
case H5S_SEL_ERROR:
case H5S_SEL_N:
default:
HDassert("unknown selection type" && 0);
break;
} /* end switch */
} /* end else */
break;
case H5S_NO_CLASS:
default:
HDassert("unknown data space type" && 0);
break;
} /* end switch */
done:
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5S_mpio_space_type() */
#endif /* H5_HAVE_PARALLEL */
| {
"content_hash": "b2554fddf94e74bba2a222e44eac7c5d",
"timestamp": "",
"source": "github",
"line_count": 1080,
"max_line_length": 110,
"avg_line_length": 42.730555555555554,
"alnum_prop": 0.5296972848815792,
"repo_name": "modelica-3rdparty/ExternData",
"id": "db46c15432d7d62f556bc717297a138dbb0c4f87",
"size": "47097",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ExternData/Resources/C-Sources/hdf5/src/H5Smpio.c",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "20267171"
},
{
"name": "C++",
"bytes": "54724"
},
{
"name": "M4",
"bytes": "1031"
},
{
"name": "Makefile",
"bytes": "4657"
},
{
"name": "Modelica",
"bytes": "180087"
},
{
"name": "Python",
"bytes": "12588"
},
{
"name": "Shell",
"bytes": "3995"
}
],
"symlink_target": ""
} |
package org.fusesource.scalate.filter.less
import org.fusesource.scalate.{ TemplateEngineAddOn, RenderContext, TemplateEngine }
import com.asual.lesscss.{ LessEngine, LessOptions }
import com.asual.lesscss.loader.ResourceLoader
import org.fusesource.scalate.filter.{ Filter, NoLayoutFilter }
import org.fusesource.scalate.util.IOUtil
import java.io.IOException
/**
* Renders Less syntax inside templates.
*
* @author <a href="mailto:stuart.roebuck@gmail.com">Stuart Roebuck</a>
*/
class LessFilter(lessEngine: LessEngine) extends Filter {
def filter(context: RenderContext, content: String) = {
synchronized {
// This code block is synchronized as I'm not confident that the Less filter is thread safe.
val css = lessEngine.compile(content, context.currentTemplate).stripLineEnd
"""<style type="text/css">%n%s%n</style>""".format(css)
}
}
}
/**
* Renders standalone less files
*
* @author <a href="mailto:rafal.krzewski@caltha.pl>Rafał Krzewski</a>
*/
class LessPipeline(private val lessEngine: LessEngine) extends Filter {
def filter(context: RenderContext, content: String) = synchronized {
synchronized {
lessEngine.compile(content, context.currentTemplate)
}
}
}
/**
* Engine add-on for processing lesscss.
*
* @author <a href="mailto:rafal.krzewski@caltha.pl>Rafał Krzewski</a>
*/
object LessAddOn extends TemplateEngineAddOn {
def apply(te: TemplateEngine) {
val lessEngine = new LessEngine(new LessOptions, new ScalateResourceLoader(te))
te.filters += "less" -> new LessFilter(lessEngine)
te.pipelines += "less" -> List(NoLayoutFilter(new LessPipeline(lessEngine), "text/css"))
te.templateExtensionsFor("css") += "less"
}
/**
* Bridge between Scalate and less resource loading.
*/
class ScalateResourceLoader(private val engine: TemplateEngine) extends ResourceLoader {
def exists(path: String): Boolean = {
engine.resourceLoader.resource(path).isDefined
}
def load(path: String, charset: String): String = {
engine.resourceLoader.resource(path) match {
case Some(r) =>
IOUtil.loadText(r.inputStream, charset)
case _ =>
throw new IOException("No such file: " + path)
}
}
}
} | {
"content_hash": "018a1eb724a1d0dadeecf21e751ccb98",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 98,
"avg_line_length": 32.608695652173914,
"alnum_prop": 0.7071111111111111,
"repo_name": "maslovalex/scalate",
"id": "cbd15f48ef5597d48c9f4a9ee69ee1a8585ebd5c",
"size": "2989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scalate-less/src/main/scala/org/fusesource/scalate/filter/less/LessFilter.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16530"
},
{
"name": "CoffeeScript",
"bytes": "22"
},
{
"name": "HTML",
"bytes": "33540"
},
{
"name": "Java",
"bytes": "7903"
},
{
"name": "JavaScript",
"bytes": "5226"
},
{
"name": "Ruby",
"bytes": "561628"
},
{
"name": "Scala",
"bytes": "963872"
},
{
"name": "Shell",
"bytes": "313"
}
],
"symlink_target": ""
} |
package com.google.errorprone.bugpatterns;
import static com.google.errorprone.BugPattern.SeverityLevel.WARNING;
import static com.google.errorprone.util.ASTHelpers.getAnnotationWithSimpleName;
import static com.google.errorprone.util.ASTHelpers.isConsideredFinal;
import com.google.errorprone.BugPattern;
import com.google.errorprone.VisitorState;
import com.google.errorprone.annotations.Var;
import com.google.errorprone.bugpatterns.BugChecker.VariableTreeMatcher;
import com.google.errorprone.fixes.Fix;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.fixes.SuggestedFixes;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.ForLoopTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.VariableTree;
import com.sun.source.util.TreePath;
import com.sun.tools.javac.code.Flags;
import com.sun.tools.javac.code.Source;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.TreeInfo;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Optional;
import javax.lang.model.element.Modifier;
/** A {@link BugChecker}; see the associated {@link BugPattern} annotation for details. */
@BugPattern(
name = "Var",
summary = "Non-constant variable missing @Var annotation",
severity = WARNING)
public class VarChecker extends BugChecker implements VariableTreeMatcher {
private static final String UNNECESSARY_FINAL = "Unnecessary 'final' modifier.";
@Override
public Description matchVariable(VariableTree tree, VisitorState state) {
Symbol sym = ASTHelpers.getSymbol(tree);
if (ASTHelpers.hasAnnotation(sym, Var.class, state)) {
if ((sym.flags() & Flags.EFFECTIVELY_FINAL) != 0) {
return buildDescription(tree)
.setMessage("@Var variable is never modified")
.addFix(
SuggestedFix.delete(
getAnnotationWithSimpleName(tree.getModifiers().getAnnotations(), "Var")))
.build();
}
return Description.NO_MATCH;
}
if (!ASTHelpers.getGeneratedBy(state).isEmpty()) {
return Description.NO_MATCH;
}
if (TreeInfo.isReceiverParam((JCTree) tree)) {
return Description.NO_MATCH;
}
if (forLoopVariable(tree, state.getPath())) {
// for loop indices are implicitly @Var
// TODO(cushon): consider requiring @Var if the index is modified in the body of the loop
return Description.NO_MATCH;
}
switch (sym.getKind()) {
case PARAMETER:
case LOCAL_VARIABLE:
case EXCEPTION_PARAMETER:
case RESOURCE_VARIABLE:
return handleLocalOrParam(tree, state, sym);
default:
return Description.NO_MATCH;
}
}
boolean forLoopVariable(VariableTree tree, TreePath path) {
Tree parent = path.getParentPath().getLeaf();
if (!(parent instanceof ForLoopTree)) {
return false;
}
ForLoopTree forLoop = (ForLoopTree) parent;
return forLoop.getInitializer().contains(tree);
}
private Description handleLocalOrParam(VariableTree tree, VisitorState state, Symbol sym) {
if (sym.getModifiers().contains(Modifier.FINAL)) {
if (Source.instance(state.context).compareTo(Source.lookup("1.8")) >= 0) {
// In Java 8, the final modifier is never necessary on locals/parameters because
// effectively final variables can be used anywhere a final variable is required.
Optional<SuggestedFix> fix = SuggestedFixes.removeModifiers(tree, state, Modifier.FINAL);
// The fix may not be present for TWR variables that were not explicitly final
if (fix.isPresent()) {
return buildDescription(tree).setMessage(UNNECESSARY_FINAL).addFix(fix.get()).build();
}
}
return Description.NO_MATCH;
}
if (!Collections.disjoint(
sym.owner.getModifiers(), EnumSet.of(Modifier.ABSTRACT, Modifier.NATIVE))) {
// flow information isn't collected for body-less methods
return Description.NO_MATCH;
}
if (isConsideredFinal(sym)) {
return Description.NO_MATCH;
}
return describeMatch(tree, addVarAnnotation(tree));
}
private static Fix addVarAnnotation(VariableTree tree) {
return SuggestedFix.builder().prefixWith(tree, "@Var ").addImport(Var.class.getName()).build();
}
}
| {
"content_hash": "25b4ecee6d46dc271e930e1482ad4816",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 99,
"avg_line_length": 38.89380530973451,
"alnum_prop": 0.7167235494880546,
"repo_name": "google/error-prone",
"id": "d0e8b625834cb268f324c6ef93b31568657f0ed6",
"size": "5002",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/src/main/java/com/google/errorprone/bugpatterns/VarChecker.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "9329704"
},
{
"name": "Mustache",
"bytes": "1939"
},
{
"name": "Shell",
"bytes": "1915"
},
{
"name": "Starlark",
"bytes": "959"
}
],
"symlink_target": ""
} |
<?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class AddDatesToTypesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('types', function (Blueprint $table) {
$table->softDeletes();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('types', function (Blueprint $table) {
$table->dropColumn('deleted_at');
});
}
}
| {
"content_hash": "905cdc6d678fbda77b0663874bd1f041",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 60,
"avg_line_length": 19.129032258064516,
"alnum_prop": 0.5497470489038786,
"repo_name": "Meepnix/libreBudgetSnapShot",
"id": "86bb4146378668bd97b5699ba4389070a83a5b7d",
"size": "593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "database/migrations/2017_11_26_182429_add_dates_to_types_table.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "64454"
},
{
"name": "JavaScript",
"bytes": "623"
},
{
"name": "PHP",
"bytes": "96680"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--[if lt IE 7 ]><html class="ie ie6" lang="en"> <![endif]-->
<!--[if IE 7 ]><html class="ie ie7" lang="en"> <![endif]-->
<!--[if IE 8 ]><html class="ie ie8" lang="en"> <![endif]-->
<!--[if (gte IE 9)|!(IE)]><!-->
<html lang="en" xmlns="http://www.w3.org/1999/html"> <!--<![endif]-->
<head>
<!-- Basic Page Needs
================================================== -->
<meta charset="utf-8" />
<title>icon-angle-right: Font Awesome Icons</title>
<meta name="description" content="Font Awesome, the iconic font designed for Bootstrap">
<meta name="author" content="Dave Gandy">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<!--<meta name="viewport" content="initial-scale=1; maximum-scale=1">-->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<!-- CSS
================================================== -->
<link rel="stylesheet" href="../../assets/css/site.css">
<link rel="stylesheet" href="../../assets/css/pygments.css">
<link rel="stylesheet" href="../../assets/font-awesome/css/font-awesome.css">
<!--[if IE 7]>
<link rel="stylesheet" href="../../assets/font-awesome/css/font-awesome-ie7.css">
<![endif]-->
<!-- Le fav and touch icons -->
<link rel="shortcut icon" href="../../assets/ico/favicon.ico">
<script type="text/javascript" src="//use.typekit.net/wnc7ioh.js"></script>
<script type="text/javascript">try{Typekit.load();}catch(e){}</script>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-30136587-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<body data-spy="scroll" data-target=".navbar">
<div class="wrapper"> <!-- necessary for sticky footer. wrap all content except footer -->
<div class="navbar navbar-inverse navbar-static-top hidden-print">
<div class="navbar-inner">
<div class="container">
<a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</a>
<a class="brand" href="../../"><i class="icon-flag"></i> Font Awesome</a>
<div class="nav-collapse collapse">
<ul class="nav">
<li class="hidden-tablet "><a href="../../">Home</a></li>
<li><a href="../../get-started/">Get Started</a></li>
<li class="dropdown-split-left"><a href="../../icons/">Icons</a></li>
<li class="dropdown dropdown-split-right hidden-phone">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-caret-down"></i>
</a>
<ul class="dropdown-menu pull-right">
<li><a href="../../icons/"><i class="icon-flag icon-fixed-width"></i> Icons</a></li>
<li class="divider"></li>
<li><a href="../../icons/#new"><i class="icon-shield icon-fixed-width"></i> New Icons in 3.2.1</a></li>
<li><a href="../../icons/#web-application"><i class="icon-camera-retro icon-fixed-width"></i> Web Application Icons</a></li>
<li><a href="../../icons/#currency"><i class="icon-won icon-fixed-width"></i> Currency Icons</a></li>
<li><a href="../../icons/#text-editor"><i class="icon-file-text-alt icon-fixed-width"></i> Text Editor Icons</a></li>
<li><a href="../../icons/#directional"><i class="icon-hand-right icon-fixed-width"></i> Directional Icons</a></li>
<li><a href="../../icons/#video-player"><i class="icon-play-sign icon-fixed-width"></i> Video Player Icons</a></li>
<li><a href="../../icons/#brand"><i class="icon-github icon-fixed-width"></i> Brand Icons</a></li>
<li><a href="../../icons/#medical"><i class="icon-medkit icon-fixed-width"></i> Medical Icons</a></li>
</ul>
</li>
<li class="dropdown-split-left"><a href="../../examples/">Examples</a></li>
<li class="dropdown dropdown-split-right hidden-phone">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-caret-down"></i>
</a>
<ul class="dropdown-menu pull-right">
<li><a href="../../examples/">Examples</a></li>
<li class="divider"></li>
<li><a href="../../examples/#new-styles">New Styles</a></li>
<li><a href="../../examples/#inline-icons">Inline Icons</a></li>
<li><a href="../../examples/#larger-icons">Larger Icons</a></li>
<li><a href="../../examples/#bordered-pulled">Bordered & Pulled</a></li>
<li><a href="../../examples/#buttons">Buttons</a></li>
<li><a href="../../examples/#button-groups">Button Groups</a></li>
<li><a href="../../examples/#button-dropdowns">Button Dropdowns</a></li>
<li><a href="../../examples/#bulleted-lists">Bulleted Lists</a></li>
<li><a href="../../examples/#navigation">Navigation</a></li>
<li><a href="../../examples/#form-inputs">Form Inputs</a></li>
<li><a href="../../examples/#animated-spinner">Animated Spinner</a></li>
<li><a href="../../examples/#rotated-flipped">Rotated & Flipped</a></li>
<li><a href="../../examples/#stacked">Stacked</a></li>
<li><a href="../../examples/#custom">Custom CSS</a></li>
</ul>
</li>
<li><a href="../../whats-new/">
<span class="hidden-tablet">What's </span>New</a>
</li>
<li><a href="../../community/">Community</a></li>
<li><a href="../../license/">License</a></li>
</ul>
<ul class="nav pull-right">
<li><a href="http://blog.fontawesome.io">Blog</a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="jumbotron jumbotron-icon">
<div class="container">
<div class="info-icons">
<i class="icon-angle-right icon-6"></i>
<span class="hidden-phone">
<i class="icon-angle-right icon-5"></i>
<span class="hidden-tablet"><i class="icon-angle-right icon-4"></i> </span>
<i class="icon-angle-right icon-3"></i>
<i class="icon-angle-right icon-2"></i>
</span>
<i class="icon-angle-right icon-1"></i>
</div>
<h1 class="info-class">
icon-angle-right
<small>
<i class="icon-angle-right"></i> ·
Unicode: <span class="upper">f105</span> ·
Created: v3.0 ·
Categories:
Directional Icons
</small>
</h1>
</div>
</div>
<div class="container">
<section>
<div class="row-fluid">
<div class="span9">
<p>After you get <a href="../../integration/">up and running</a>, you can place Font Awesome icons just about anywhere with the <code><i></code> tag:</p>
<div class="well well-transparent">
<div style="font-size: 24px; line-height: 1.5em;">
<i class="icon-angle-right"></i> icon-angle-right
</div>
</div>
<div class="highlight"><pre><code class="html"><span class="nt"><i</span> <span class="na">class=</span><span class="s">"icon-angle-right"</span><span class="nt">></i></span> icon-angle-right
</code></pre></div>
<br>
<div class="lead"><i class="icon-info-sign"></i> Looking for more? Check out the <a href="../../examples/">examples</a>.</div>
</div>
<div class="span3">
<div class="info-ad"><div id="carbonads-container"><div class="carbonad"><div id="azcarbon"></div><script type="text/javascript">var z = document.createElement("script"); z.type = "text/javascript"; z.async = true; z.src = "http://engine.carbonads.com/z/32291/azcarbon_2_1_0_VERT"; var s = document.getElementsByTagName("script")[0]; s.parentNode.insertBefore(z, s);</script></div></div>
</div>
</div>
</div>
</section>
</div>
<div class="push"><!-- necessary for sticky footer --></div>
</div>
<footer class="footer hidden-print">
<div class="container text-center">
<div>
<i class="icon-flag"></i> Font Awesome 3.2.1
<span class="hidden-phone">·</span><br class="visible-phone">
Created and Maintained by <a href="http://twitter.com/davegandy">Dave Gandy</a>
</div>
<div>
Font Awesome licensed under <a href="http://scripts.sil.org/OFL">SIL OFL 1.1</a>
<span class="hidden-phone">·</span><br class="visible-phone">
Code licensed under <a href="http://opensource.org/licenses/mit-license.html">MIT License</a>
<span class="hidden-phone hidden-tablet">·</span><br class="visible-phone visible-tablet">
Documentation licensed under <a href="http://creativecommons.org/licenses/by/3.0/">CC BY 3.0</a>
</div>
<div>
Thanks to <a href="http://maxcdn.com"><i class="icon-maxcdn"></i> MaxCDN</a> for providing the excellent <a href="http://www.bootstrapcdn.com/#tab_fontawesome">BootstrapCDN for Font Awesome</a>
</div>
<div class="project">
<a href="https://github.com/FortAwesome/Font-Awesome">GitHub Project</a> ·
<a href="https://github.com/FortAwesome/Font-Awesome/issues">Issues</a>
</div>
</div>
</footer>
<script src="http://platform.twitter.com/widgets.js"></script>
<script src="../../assets/js/jquery-1.7.1.min.js"></script>
<script src="../../assets/js/ZeroClipboard-1.1.7.min.js"></script>
<script src="../../assets/js/bootstrap-2.3.1.min.js"></script>
<script src="../../assets/js/site.js"></script>
</body>
</html>
| {
"content_hash": "2aaf5bfed59254f80e00aefd30f40f15",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 395,
"avg_line_length": 48.48557692307692,
"alnum_prop": 0.5709469509172038,
"repo_name": "vmetayer/co-brand",
"id": "1fd15f51738620847823fb62d744e8ba957b0d8c",
"size": "10085",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bower_components/font-awesome/src/3.2.1/icon/angle-right/index.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43558"
},
{
"name": "JavaScript",
"bytes": "315611"
},
{
"name": "PHP",
"bytes": "17"
}
],
"symlink_target": ""
} |
<?php
declare(strict_types=1);
/**
* This code was auto-generated by {this script}[https://github.com/cucumber/common/blob/main/messages/jsonschema/scripts/codegen.rb]
*/
namespace Cucumber\Messages;
use JsonSerializable;
use Cucumber\Messages\DecodingException\SchemaViolationException;
/**
* Represents the PickleTableRow message in Cucumber's message protocol
* @see https://github.com/cucumber/common/tree/main/messages#readme
*
*/
final class PickleTableRow implements JsonSerializable
{
use JsonEncodingTrait;
/**
* Construct the PickleTableRow with all properties
*
* @param list<PickleTableCell> $cells
*/
public function __construct(
public readonly array $cells = [],
) {
}
/**
* @throws SchemaViolationException
*
* @internal
*/
public static function fromArray(array $arr): self
{
self::ensureCells($arr);
return new self(
array_values(array_map(fn (array $member) => PickleTableCell::fromArray($member), $arr['cells'])),
);
}
/**
* @psalm-assert array{cells: array} $arr
*/
private static function ensureCells(array $arr): void
{
if (!array_key_exists('cells', $arr)) {
throw new SchemaViolationException('Property \'cells\' is required but was not found');
}
if (array_key_exists('cells', $arr) && !is_array($arr['cells'])) {
throw new SchemaViolationException('Property \'cells\' was not array');
}
}
}
| {
"content_hash": "d6667e71574b37c4962a5be72e9308f7",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 133,
"avg_line_length": 26.016949152542374,
"alnum_prop": 0.6351791530944625,
"repo_name": "cucumber/cucumber",
"id": "8dea7b8fb04c77c1bdf809b797e9142d0355ca46",
"size": "1535",
"binary": false,
"copies": "1",
"ref": "refs/heads/retain-step-keyword",
"path": "messages/php/src-generated/PickleTableRow.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "127"
},
{
"name": "C",
"bytes": "480225"
},
{
"name": "C#",
"bytes": "289774"
},
{
"name": "C++",
"bytes": "14849"
},
{
"name": "CMake",
"bytes": "3944"
},
{
"name": "CSS",
"bytes": "3355"
},
{
"name": "Dockerfile",
"bytes": "3553"
},
{
"name": "Gherkin",
"bytes": "8970"
},
{
"name": "Go",
"bytes": "425168"
},
{
"name": "HTML",
"bytes": "71036"
},
{
"name": "JSONiq",
"bytes": "2997"
},
{
"name": "Java",
"bytes": "695608"
},
{
"name": "JavaScript",
"bytes": "2853"
},
{
"name": "Makefile",
"bytes": "168761"
},
{
"name": "Objective-C",
"bytes": "242920"
},
{
"name": "Perl",
"bytes": "140453"
},
{
"name": "Python",
"bytes": "269889"
},
{
"name": "Ruby",
"bytes": "310834"
},
{
"name": "Shell",
"bytes": "47340"
},
{
"name": "TypeScript",
"bytes": "405056"
}
],
"symlink_target": ""
} |
.class Landroid/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong;
.super Ljava/lang/Object;
.source "TextDirectionHeuristicsCompat.java"
# interfaces
.implements Landroid/support/v4/text/TextDirectionHeuristicsCompat$TextDirectionAlgorithm;
# annotations
.annotation system Ldalvik/annotation/EnclosingClass;
value = Landroid/support/v4/text/TextDirectionHeuristicsCompat;
.end annotation
.annotation system Ldalvik/annotation/InnerClass;
accessFlags = 0xa
name = "FirstStrong"
.end annotation
# static fields
.field public static final INSTANCE:Landroid/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong;
# direct methods
.method static constructor <clinit>()V
.locals 1
.prologue
.line 193
new-instance v0, Landroid/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong;
invoke-direct {v0}, Landroid/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong;-><init>()V
sput-object v0, Landroid/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong;->INSTANCE:Landroid/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong;
return-void
.end method
.method private constructor <init>()V
.locals 0
.prologue
.line 190
invoke-direct {p0}, Ljava/lang/Object;-><init>()V
.line 191
return-void
.end method
# virtual methods
.method public checkRtl(Ljava/lang/CharSequence;II)I
.locals 4
.parameter "cs"
.parameter "start"
.parameter "count"
.prologue
.line 183
const/4 v2, 0x2
.line 184
.local v2, result:I
move v1, p2
.local v1, i:I
add-int v0, p2, p3
.local v0, e:I
:goto_0
if-ge v1, v0, :cond_0
const/4 v3, 0x2
if-ne v2, v3, :cond_0
.line 185
invoke-interface {p1, v1}, Ljava/lang/CharSequence;->charAt(I)C
move-result v3
invoke-static {v3}, Ljava/lang/Character;->getDirectionality(C)B
move-result v3
#calls: Landroid/support/v4/text/TextDirectionHeuristicsCompat;->isRtlTextOrFormat(I)I
invoke-static {v3}, Landroid/support/v4/text/TextDirectionHeuristicsCompat;->access$100(I)I
move-result v2
.line 184
add-int/lit8 v1, v1, 0x1
goto :goto_0
.line 187
:cond_0
return v2
.end method
| {
"content_hash": "fd52afbaaca9c7111abb0f2994c37843",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 165,
"avg_line_length": 22.545454545454547,
"alnum_prop": 0.7195340501792115,
"repo_name": "baidurom/devices-Coolpad8720L",
"id": "0f9534dd8a91e225305a26f5660bc908cc7580e2",
"size": "2232",
"binary": false,
"copies": "1",
"ref": "refs/heads/coron-4.3",
"path": "CP_Gallery3D/smali/android/support/v4/text/TextDirectionHeuristicsCompat$FirstStrong.smali",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "13619"
},
{
"name": "Shell",
"bytes": "1917"
}
],
"symlink_target": ""
} |
(function() {
//'use strict';
angular.module('blocks.exception', [
'blocks.logger'
]);
})();
| {
"content_hash": "db14077864eae54295f94f2d86871eda",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 40,
"avg_line_length": 16.285714285714285,
"alnum_prop": 0.5,
"repo_name": "gmnordlogic/agenda2",
"id": "1799d2f5d464e05d2d5bec4640fec02726427ba6",
"size": "114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "presentation/client/app/blocks/exception/exception.module.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "186"
},
{
"name": "CSS",
"bytes": "202362"
},
{
"name": "HTML",
"bytes": "13825"
},
{
"name": "JavaScript",
"bytes": "176715"
},
{
"name": "PHP",
"bytes": "199754"
},
{
"name": "Shell",
"bytes": "3702"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Interim Register of Marine and Nonmarine Genera
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "bbce6a416bf5ffb5b6f4978a8769b223",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 47,
"avg_line_length": 10.923076923076923,
"alnum_prop": 0.7183098591549296,
"repo_name": "mdoering/backbone",
"id": "23bf345115bbdad83cb6b45ef2e06b3f13ccba8d",
"size": "194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Chromista/Haptophyta/Prymnesiophyceae/Arkhangelskiellaceae/Psedolithoderma/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
Wukong.processor(:simple) do
def process(record)
yield record
end
end
Wukong.processor(:skipped) do
def process(record)
# skip records
end
end
Wukong.processor(:multi) do
def process(record)
3.times{ yield record }
end
end
Wukong.processor(:test_example) do
def process(record)
yield "I raised the #{record['foo']}"
end
end
Wukong.dataflow(:flow) do
from_json | test_example
end
| {
"content_hash": "38569f4a5709b732fbf4748ef902c2ed",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 41,
"avg_line_length": 15.74074074074074,
"alnum_prop": 0.6823529411764706,
"repo_name": "infochimps-labs/wukong-storm",
"id": "280a7cc47d857cf5db9d1f932daa9cd217506021",
"size": "425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/support/examples.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "49668"
},
{
"name": "Ruby",
"bytes": "47209"
}
],
"symlink_target": ""
} |
define(['okta'], function (Okta) {
function addInlineTotp(form) {
form.addDivider();
form.addInput({
label: false,
'label-top': true,
placeholder: Okta.loc('mfa.challenge.enterCode.placeholder', 'login'),
className: 'o-form-fieldset o-form-label-top inline-input auth-passcode',
name: 'answer',
type: 'text'
});
form.add(Okta.createButton({
attributes: { 'data-se': 'inline-totp-verify' },
className: 'button inline-totp-verify',
title: Okta.loc('mfa.challenge.verify', 'login'),
click: function () {
form.model.manageTransaction(function (transaction, setTransaction) {
// This is the case where we enter the TOTP code and verify while there is an
// active Push request (or polling) running. We need to invoke previous() on authClient
// and then call model.save(). If not, we would still be in MFA_CHALLENGE state and
// verify would result in a wrong request (push verify instead of a TOTP verify).
if (transaction.status === 'MFA_CHALLENGE' && transaction.previous) {
transaction.previous().then(function (trans) {
setTransaction(trans);
form.model.save();
});
} else {
// Push is not active and we enter the code to verify.
form.model.save();
}
});
}
}));
form.at(1).focus();
}
return Okta.Form.extend({
autoSave: true,
noButtonBar: true,
scrollOnError: false,
layout: 'o-form-theme',
className: 'mfa-verify-totp-inline',
attributes: { 'data-se': 'factor-inline-totp' },
initialize: function () {
var form = this;
this.listenTo(this.model, 'error', function () {
this.clearErrors();
});
this.add(Okta.createButton({
className: 'link',
attributes: { 'data-se': 'inline-totp-add' },
title: Okta.loc('mfa.challenge.orEnterCode', 'login'),
click: function () {
this.remove();
addInlineTotp(form);
}
}));
}
});
});
| {
"content_hash": "a16441aa1aab6c4551f2da26e593c2a4",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 97,
"avg_line_length": 31.55223880597015,
"alnum_prop": 0.576631977294229,
"repo_name": "gregorydandrea-okta/okta-signin-widget",
"id": "b28a7b317f8b61eb5e9c0b3c5412a12e2bf33880",
"size": "2736",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/views/mfa-verify/InlineTOTPForm.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "181635"
},
{
"name": "JavaScript",
"bytes": "1968098"
},
{
"name": "Ruby",
"bytes": "1057"
},
{
"name": "Shell",
"bytes": "2806"
},
{
"name": "Smarty",
"bytes": "784"
}
],
"symlink_target": ""
} |
Bookkeeper Symfony 2 Demo Application
========================
A simple demo application in symfony 2, using doctrine.
* Run Composer
* Enjoy
| {
"content_hash": "546c41932c2055db2cd68ee17d9573b5",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 56,
"avg_line_length": 20.857142857142858,
"alnum_prop": 0.6438356164383562,
"repo_name": "bart88/bookkeeper",
"id": "351ceee485989ec879edfc5e4263a14625b1642c",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3073"
},
{
"name": "PHP",
"bytes": "62469"
}
],
"symlink_target": ""
} |
import json
import logging
from datetime import datetime
from importlib import import_module
from django.conf import settings
from django.core.mail import mail_admins
from django.utils.dateparse import parse_date
from django.utils.timezone import localtime, make_aware, now
from cspreports.models import CSPReport
logger = logging.getLogger(getattr(settings, "CSP_REPORTS_LOGGER_NAME", "CSP Reports"))
def process_report(request):
""" Given the HTTP request of a CSP violation report, log it in the required ways. """
if not should_process_report(request):
return
if config.EMAIL_ADMINS:
email_admins(request)
if config.LOG:
log_report(request)
if config.SAVE:
save_report(request)
if config.ADDITIONAL_HANDLERS:
run_additional_handlers(request)
def format_report(jsn):
""" Given a JSON report, return a nicely formatted (i.e. with indentation) string.
This should handle invalid JSON (as the JSON comes from the browser/user).
We trust that Python's json library is secure, but if the JSON is invalid then we still
want to be able to display it, rather than tripping up on a ValueError.
"""
if isinstance(jsn, bytes):
jsn = jsn.decode('utf-8')
try:
return json.dumps(json.loads(jsn), indent=4, sort_keys=True, separators=(',', ': '))
except ValueError:
return "Invalid JSON. Raw dump is below.\n\n" + jsn
def email_admins(request):
user_agent = request.META.get('HTTP_USER_AGENT', '')
report = format_report(request.body)
message = "User agent:\n%s\n\nReport:\n%s" % (user_agent, report)
mail_admins("CSP Violation Report", message)
def log_report(request):
func = getattr(logger, config.LOG_LEVEL)
func("Content Security Policy violation: %s", format_report(request.body))
def save_report(request):
message = request.body
if isinstance(message, bytes):
message = message.decode(request.encoding or settings.DEFAULT_CHARSET)
report = CSPReport.from_message(message)
report.user_agent = request.META.get('HTTP_USER_AGENT', '')
report.save()
def run_additional_handlers(request):
for handler in get_additional_handlers():
handler(request)
class Config:
""" Configuration with defaults, each of which is overrideable in django settings. """
# Defaults, these are overridden using "CSP_REPORTS_"-prefixed versions in settings.py
EMAIL_ADMINS = True
LOG = True
LOG_LEVEL = 'warning'
SAVE = True
ADDITIONAL_HANDLERS = []
FILTER_FUNCTION = None
def __getattribute__(self, name):
try:
return getattr(settings, "%s%s" % ("CSP_REPORTS_", name))
except AttributeError:
return super().__getattribute__(name)
config = Config()
_additional_handlers = None
_filter_function = None
def get_additional_handlers():
""" Returns the actual functions from the dotted paths specified in ADDITIONAL_HANDLERS. """
global _additional_handlers
if not isinstance(_additional_handlers, list):
handlers = []
for name in config.ADDITIONAL_HANDLERS:
function = import_from_dotted_path(name)
handlers.append(function)
_additional_handlers = handlers
return _additional_handlers
def parse_date_input(value):
"""Return datetime based on the user's input.
@param value: User's input
@type value: str
@raise ValueError: If the input is not valid.
@return: Datetime of the beginning of the user's date.
"""
try:
limit = parse_date(value)
except ValueError:
limit = None
if limit is None:
raise ValueError("'{}' is not a valid date.".format(value))
limit = datetime(limit.year, limit.month, limit.day)
if settings.USE_TZ:
limit = make_aware(limit)
return limit
def get_midnight():
"""Return last midnight in localtime as datetime.
@return: Midnight datetime
"""
limit = now()
if settings.USE_TZ:
limit = localtime(limit)
return limit.replace(hour=0, minute=0, second=0, microsecond=0)
def import_from_dotted_path(name):
module_name, function_name = name.rsplit('.', 1)
return getattr(import_module(module_name), function_name)
def should_process_report(request):
if not config.FILTER_FUNCTION:
return True
global _filter_function
if _filter_function is None:
_filter_function = import_from_dotted_path(config.FILTER_FUNCTION)
return _filter_function(request)
| {
"content_hash": "266a373187b4bc724a567ba17eb01b3a",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 96,
"avg_line_length": 30.7027027027027,
"alnum_prop": 0.6756161971830986,
"repo_name": "adamalton/django-csp-reports",
"id": "1e067a64bbb639a498c34442a1162927acad809a",
"size": "4544",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cspreports/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "370"
},
{
"name": "Python",
"bytes": "58487"
}
],
"symlink_target": ""
} |
<!--
Copyright 2014-2016 CyberVision, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.kaaproject.kaa.server</groupId>
<version>0.8.0</version>
<artifactId>transports</artifactId>
</parent>
<groupId>org.kaaproject.kaa.server.transports</groupId>
<artifactId>http</artifactId>
<packaging>pom</packaging>
<name>Kaa HTTP Transport</name>
<url>http://kaaproject.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<main.dir>${basedir}/../../..</main.dir>
</properties>
<modules>
<module>config</module>
<module>transport</module>
</modules>
</project>
| {
"content_hash": "eb0c176f7a0c982a433252f063d842f8",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 108,
"avg_line_length": 35.142857142857146,
"alnum_prop": 0.6869918699186992,
"repo_name": "liuhu/Kaa",
"id": "322c5ab4826f1fcc8eaa18537fe98359666812a6",
"size": "1476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/transports/http/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4762"
},
{
"name": "C",
"bytes": "1397470"
},
{
"name": "C++",
"bytes": "1227671"
},
{
"name": "CMake",
"bytes": "71884"
},
{
"name": "CSS",
"bytes": "10373"
},
{
"name": "HTML",
"bytes": "6884"
},
{
"name": "Java",
"bytes": "9328357"
},
{
"name": "Makefile",
"bytes": "5541"
},
{
"name": "Objective-C",
"bytes": "1172379"
},
{
"name": "Python",
"bytes": "128276"
},
{
"name": "Ruby",
"bytes": "247"
},
{
"name": "Shell",
"bytes": "90772"
},
{
"name": "Thrift",
"bytes": "10264"
},
{
"name": "XSLT",
"bytes": "4062"
}
],
"symlink_target": ""
} |
<query id="updateMenuItems" action="update">
<tables>
<table name="menu_item" />
</tables>
<columns>
<column name="menu_srl" var="menu_srl" />
<column name="parent_srl" var="parent_srl" />
<column name="name" var="name" />
<column name="url" var="url" />
<column name="is_shortcut" var="is_shortcut" />
<column name="open_window" var="open_window" />
<column name="expand" var="expand" />
<column name="normal_btn" var="normal_btn" />
<column name="hover_btn" var="hover_btn" />
<column name="active_btn" var="active_btn" />
<column name="group_srls" var="group_srls" />
</columns>
<conditions>
<condition operation="equal" column="menu_srl" var="current_menu_srl" filter="number" notnull="notnull" />
</conditions>
</query>
| {
"content_hash": "1d6b2e4d4e0e1fe81f49239c11180490",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 114,
"avg_line_length": 40.61904761904762,
"alnum_prop": 0.5767878077373975,
"repo_name": "talkwithraon/XE4KAIST",
"id": "82ea01b659664e59d70e07677f198457dc06ba88",
"size": "853",
"binary": false,
"copies": "40",
"ref": "refs/heads/master",
"path": "xe4kaist/modules/menu/queries/updateMenuItems.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "2358"
},
{
"name": "CSS",
"bytes": "728319"
},
{
"name": "HTML",
"bytes": "1286170"
},
{
"name": "JavaScript",
"bytes": "2354460"
},
{
"name": "PHP",
"bytes": "4750065"
}
],
"symlink_target": ""
} |
..
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
:mod:`poppy.transport` drivers
==============================
.. automodule:: poppy.transport
:members:
:undoc-members:
:show-inheritance:
.. automodule:: poppy.transport.base
:members:
:undoc-members:
:show-inheritance:
:mod:`poppy.transport.pecan` driver
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: poppy.transport.pecan.controllers.base
:members:
:undoc-members:
:show-inheritance:
.. automodule:: poppy.transport.pecan.controllers.root
:members:
:undoc-members:
:show-inheritance:
.. automodule:: poppy.transport.pecan.driver
:members:
:undoc-members:
:show-inheritance:
.. automodule:: poppy.transport.pecan.controllers.services
:members:
:undoc-members:
:show-inheritance:
.. automodule:: poppy.transport.pecan.controllers.v1
:members:
:undoc-members:
:show-inheritance:
| {
"content_hash": "00de85853010f4f1d48f4df4058c497a",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 79,
"avg_line_length": 27.037037037037038,
"alnum_prop": 0.665068493150685,
"repo_name": "obulpathi/poppy",
"id": "6dc38d9dc7d79ea84e17745b52c5ed2e47e4b78c",
"size": "1460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/source/poppy.transport.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1281"
},
{
"name": "PigLatin",
"bytes": "808"
},
{
"name": "Python",
"bytes": "1265113"
},
{
"name": "Shell",
"bytes": "12042"
}
],
"symlink_target": ""
} |
package org.apache.camel.dataformat.xmlsecurity.springboot;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.dataformat.xmlsecurity.XMLSecurityDataFormat;
import org.apache.camel.util.IntrospectionSupport;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Configuration
@ConditionalOnBean(type = "org.apache.camel.spring.boot.CamelAutoConfiguration")
@AutoConfigureAfter(name = "org.apache.camel.spring.boot.CamelAutoConfiguration")
@EnableConfigurationProperties(XMLSecurityDataFormatConfiguration.class)
public class XMLSecurityDataFormatAutoConfiguration {
@Bean(name = "secureXML-dataformat")
@ConditionalOnClass(CamelContext.class)
@ConditionalOnMissingBean(XMLSecurityDataFormat.class)
public XMLSecurityDataFormat configureXMLSecurityDataFormat(
CamelContext camelContext,
XMLSecurityDataFormatConfiguration configuration) throws Exception {
XMLSecurityDataFormat dataformat = new XMLSecurityDataFormat();
if (dataformat instanceof CamelContextAware) {
((CamelContextAware) dataformat).setCamelContext(camelContext);
}
Map<String, Object> parameters = new HashMap<>();
IntrospectionSupport.getProperties(configuration, parameters, null,
false);
IntrospectionSupport.setProperties(camelContext,
camelContext.getTypeConverter(), dataformat, parameters);
return dataformat;
}
} | {
"content_hash": "a8c66860adb6845eac3af20b600bfe60",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 81,
"avg_line_length": 46.68181818181818,
"alnum_prop": 0.7955209347614411,
"repo_name": "bhaveshdt/camel",
"id": "7f1633a6849503fae4c0ab950afa76e689b5b283",
"size": "2857",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "components-starter/camel-xmlsecurity-starter/src/main/java/org/apache/camel/dataformat/xmlsecurity/springboot/XMLSecurityDataFormatAutoConfiguration.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "106"
},
{
"name": "CSS",
"bytes": "30373"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "11410"
},
{
"name": "Groovy",
"bytes": "53445"
},
{
"name": "HTML",
"bytes": "178754"
},
{
"name": "Java",
"bytes": "56161990"
},
{
"name": "JavaScript",
"bytes": "90232"
},
{
"name": "Protocol Buffer",
"bytes": "578"
},
{
"name": "Python",
"bytes": "36"
},
{
"name": "Ruby",
"bytes": "4802"
},
{
"name": "Scala",
"bytes": "323343"
},
{
"name": "Shell",
"bytes": "16236"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "284394"
}
],
"symlink_target": ""
} |
/**
*
*/
package gov.nih.nci.cagrid.portal.portlet.discovery.dir;
import gov.nih.nci.cagrid.portal.dao.PointOfContactDao;
import gov.nih.nci.cagrid.portal.portlet.discovery.DiscoveryType;
import java.util.List;
/**
* @author <a href="mailto:joshua.phillips@semanticbits.com">Joshua Phillips</a>
*
*/
public class PointOfContactDirectory extends DiscoveryDirectory {
private PointOfContactDao pointOfContactDao;
/**
*
*/
public PointOfContactDirectory() {
setType(DiscoveryType.POC);
}
/* (non-Javadoc)
* @see gov.nih.nci.cagrid.portal.portlet.discovery.dir.DiscoveryDirectory#getObjects()
*/
@Override
public List getObjects() {
return getPointOfContactDao().getAllPointOfContactPersons();
}
public PointOfContactDao getPointOfContactDao() {
return pointOfContactDao;
}
public void setPointOfContactDao(PointOfContactDao pointOfContactDao) {
this.pointOfContactDao = pointOfContactDao;
}
}
| {
"content_hash": "e9a8ca9c849abd8b6ebffc5473e29e3c",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 88,
"avg_line_length": 22.261904761904763,
"alnum_prop": 0.760427807486631,
"repo_name": "NCIP/cagrid",
"id": "5a54174cfaf70a4fb907b3273f07d356d305d5eb",
"size": "935",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cagrid/Software/portal/cagrid-portal/portlets/src/java/gov/nih/nci/cagrid/portal/portlet/discovery/dir/PointOfContactDirectory.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "438360"
},
{
"name": "Java",
"bytes": "25536538"
},
{
"name": "JavaScript",
"bytes": "265984"
},
{
"name": "Perl",
"bytes": "115674"
},
{
"name": "Scala",
"bytes": "405"
},
{
"name": "Shell",
"bytes": "85928"
},
{
"name": "XSLT",
"bytes": "75865"
}
],
"symlink_target": ""
} |
/**
* Created by xavier on 10/21/16.
*/
import {dispruxDecomposer, RealTimeManager, dispruxMiddlewareFactory} from "./dispruxDecomposer";
import {RealTimeAction} from './RealTimeAction'
import {HandlerOf} from "./HandlerOf";
import {ReducerOf} from "./ReducerOf";
import {Path} from "./Path";
import {ReducerByName} from "./ReducerByName";
import {firebaseManagerFactory} from './firebaseManagerFactory'
export {
HandlerOf,
dispruxDecomposer,
ReducerOf,
Path,
ReducerByName,
RealTimeAction,
RealTimeManager,
dispruxMiddlewareFactory,
firebaseManagerFactory
}
| {
"content_hash": "1d654cd3e40fff68fb3a79a5a8f25f48",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 97,
"avg_line_length": 24.04,
"alnum_prop": 0.7337770382695508,
"repo_name": "xaviercobain88/disprux",
"id": "896af9e5ad019a0fccbd9ae66b1e619314e1ca2c",
"size": "601",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/index.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "318"
},
{
"name": "TypeScript",
"bytes": "33196"
}
],
"symlink_target": ""
} |
'use strict';
const DEFAULT_IMG = "https://bitstorm.org/edwin/jquery-presentatie/pix/jquery_logo_color_onwhite.png";
const DEFAULT_ORDER_TYPE = 'createdAt';
module.exports = (Model) => {
return {
getAllMaterials(filter) {
filter = filter || '';
// {
// title: { $in: [filter] },
// description: { $in: [filter] },
// user: { username: { $in: [filter] } }
// }
return Model
.find()
.sort(DEFAULT_ORDER_TYPE)
.exec();
},
createMaterial(title, description, img, user) {
img = img || DEFAULT_IMG;
return Model
.create({ title, description, img, user });
},
getMaterialById(id) {
return Model
.findById(id)
.exec();
},
addCommentToMaterial(materialId, commentText, user) {
let comment = {
text: commentText,
user
};
return Model
.findByIdAndUpdate(materialId, { $push: { comments: comment } }, { new: true })
.exec();
}
}
} | {
"content_hash": "dc70b001a4cd860dfe19c3fb55fb9fa2",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 102,
"avg_line_length": 27.818181818181817,
"alnum_prop": 0.4452614379084967,
"repo_name": "pollx/spa-forum-demo",
"id": "0dbd6ee2f54d69f6ef069f1805764a2a5164fc59",
"size": "1224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/repository/materials.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2250"
},
{
"name": "HTML",
"bytes": "16132"
},
{
"name": "JavaScript",
"bytes": "41178"
}
],
"symlink_target": ""
} |
$packageName = 'kvrt'
$url = 'http://devbuilds.kaspersky-labs.com/devbuilds/KVRT/latest/full/KVRT.exe'
$checksum = '86881cc2439949710ff20c777736dba2195429d4395c96dfd6100853c8408666'
$checksumType = 'sha256'
$toolsPath = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)"
$installFile = Join-Path $toolsPath "kvrt.exe"
try {
Get-ChocolateyWebFile -PackageName "$packageName" `
-FileFullPath "$installFile" `
-Url "$url" `
-Checksum "$checksum" `
-ChecksumType "$checksumType"
# create empty sidecars so shimgen only creates one shim
Set-Content -Path ("$installFile.ignore") `
-Value $null
# create batch to start executable
$batchStart = Join-Path $toolsPath "kvrt.bat"
'start %~dp0\kvrt.exe -accepteula' | Out-File -FilePath $batchStart -Encoding ASCII
Install-BinFile "kvrt" "$batchStart"
} catch {
throw $_.Exception
} | {
"content_hash": "4b7b5b515c5729e8fa5f89acac69aead",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 85,
"avg_line_length": 39.916666666666664,
"alnum_prop": 0.6565762004175365,
"repo_name": "dtgm/chocolatey-packages",
"id": "0be5048f937dc05bf7dfc2ec07e16d364c420510",
"size": "960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "automatic/_output/kvrt/2016.06.24.1153/tools/chocolateyInstall.ps1",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AutoHotkey",
"bytes": "347616"
},
{
"name": "AutoIt",
"bytes": "13530"
},
{
"name": "Batchfile",
"bytes": "1404"
},
{
"name": "C#",
"bytes": "8134"
},
{
"name": "HTML",
"bytes": "80818"
},
{
"name": "PowerShell",
"bytes": "13124493"
}
],
"symlink_target": ""
} |
/**
* Collo is Observable on the main collection instance
* @param collection
* @param opts
* @returns {collo}
*/
function Collo(collection, opts={}) {
this._collection = collection || [];
this.nextFn =()=>{};//noop
this.errorFn =()=>{};//noop
this.next = (cb)=>{
this.nextFn = (data=>{
cb(data);
});
return this;
};
this.error = (cb)=>{
this.errorFn = (data=>{
cb(data);
});
return this;
};
// Return an Observer
this.subscribe = (next,err)=>{
this.nextFn = (data=>{
next(data);
});
this.errorFn = (data=>{
err(data);
});
return this;
};
const self = this;
const InvalidInputType = "Invalid input type. Input must be an object";
const NotFound = "Item not found in the collection";
let options = opts || {};
/**
* Setter for promisifying the api
*/
self.promisify = ()=>{options['promisify'] = true; return this;};
self.unPromisify = ()=>{options['promisify'] = false; return this;};
/**
* Check if is an object with values
* @param item
* @private
*/
self._isObject = (item)=>item && typeof item === 'object' && Object.keys(item).length;
/**
* Internal index fetcher
* @param val
* @returns {boolean}
* @private
*/
self._getTheIndexOf = (val)=>{
let index = false;
let key = Object.keys(val)[0];
self._collection.forEach((item,i)=>{
if(item[key] === val[key]){ index = i; }
});
return index;
};
/**
* Rsponse utility fns
* @param input
* @param options
* @param next
* @returns {*}
*/
self._onSuccess = (input,options)=>{
self.nextFn(self._collection);
if(options.promisify){
return new Promise(function (resolve, reject) {
resolve(input);
});
}
return input;
};
self._onFail = (input,options)=>{
self.errorFn(null,input);
if(options.promisify){
return new Promise(function (resolve, reject) {
reject(null,input);
});
}
return null;
};
/**
* Public get the index of method
* @param val
* @returns {*}
*/
self.getTheIndexOf = val=>{
if(self._isObject(val)) {
let index = self._getTheIndexOf(val);
if(index) {
return self._onSuccess(index,options)
}else{
return self._onFail({Error:NotFound},options)
}
}else{
return self._onFail({Error:InvalidInputType},options);
}
};
/**
* Return the current collection
*/
self.list = ()=>self._onSuccess(self._collection,options);
/**
* Return the item where the key/al match
* @param val
*/
self.findWhere = val => {
if(self._isObject(val)) {
const i = self._collection
.filter(item => {
let key = Object.keys(val)[0];
return item[key] && item[key] === val[key];
})[0];
if (i) {
return self._onSuccess(i,options);
} else {
return self._onSuccess(null,options)
}
}else{
return self._onFail({Error:InvalidInputType},options);
}
};
/**
* Used to determine if an item is in the collection
* @param val
*/
self.exists = val =>self._isObject(val)
? self._onSuccess(!!self._getTheIndexOf(val),options)
: self._onFail({Error:InvalidInputType},options);
/**
* Insert the item at the end of the stack
* @param item
* @returns {*|Array}
*/
self.insert = item => {
if(self._isObject(item)){
self._collection.push(item);
return self._onSuccess(self._collection,options);
}
return self._onFail({Error:InvalidInputType},options);
};
/**
* Splice an item in at an exact position in the collection
* @param item
* @param index
* @returns {*|Array}
*/
self.insertAtIndex = (item, index) => {
if(self._isObject(item)) {
self._collection.splice(index, 0, item);
return self._onSuccess(self._collection,options);
}
return self._onFail({Error:InvalidInputType},options);
};
/**
* If the item exists, perform an dupdate, otherwise insert
* @param val
* @param item
* @returns {*}
*/
self.upsert = (val,item) => {
if(self._isObject(val) && self._isObject(item)) {
if (!self._getTheIndexOf(val)) {
self.insert(item);
} else {
// Update
self._collection = self._collection.map(it => {
let key = Object.keys(val)[0];
if (it[key] === val[key]) {
return Object.assign({}, it, item);
}
return it;
});
}
return self._onSuccess(self._collection,options);
}else{
return self._onFail({Error:InvalidInputType},options);
}
};
/**
* Remove where the key and value match
* @param val
*/
self.removeWhere = val => {
if(self._isObject(val)) {
const index = self._getTheIndexOf(val);
if(index) {
self._collection.splice(index, 1);
return self._onSuccess(self._collection,options);
}else{
return self._onFail({Error:NotFound},options); }
}
return self._onFail({Error:InvalidInputType},options);
};
/**
* Perform a normal update if the key and value match
* @param val
* @param item
*/
self.updateWhere = (val,item) => {
if(self._isObject(val) && self._isObject(val)){
self._collection = self._collection.map(it => {
let key = Object.keys(val)[0];
if(it[key] === val[key]){
return Object.assign({},it,item);
}else{
return it;
}
});
return self._onSuccess(self._collection,options);
}
return self._onFail({Error:InvalidInputType},options);
};
return this;
};
export default Collo;
| {
"content_hash": "760c8a6728c9c31efa9894fe5991b1c3",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 87,
"avg_line_length": 19.58671586715867,
"alnum_prop": 0.6049359457422758,
"repo_name": "adamgedney/collo",
"id": "1b78326af3ff8427dfff3e5b0145a790c04ef950",
"size": "5308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "18808"
}
],
"symlink_target": ""
} |
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
class CreateBiddingsTable extends Migration {
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('biddings', function(Blueprint $table)
{
$table->increments('id');
$table->integer('user_id')->unsigned();
$table->integer('product_id')->unsigned();
$table->float('bid');
$table->foreign('user_id')->references('id')->on('users')->onDelete('cascade');
$table->foreign('product_id')->references('id')->on('products')->onDelete('cascade');
$table->unique(array('user_id', 'product_id'));
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('biddings');
}
}
| {
"content_hash": "3a87294098c12294e435b778db5690ab",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 88,
"avg_line_length": 20.025641025641026,
"alnum_prop": 0.6338028169014085,
"repo_name": "SingaporeSling/SingaporeAuction",
"id": "367e348b1f498b56f2cbe0a7ec6fc5a68f66f3ad",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/database/migrations/2015_08_30_131914_create_biddings_table.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "356"
},
{
"name": "CSS",
"bytes": "4718"
},
{
"name": "HTML",
"bytes": "714"
},
{
"name": "JavaScript",
"bytes": "55516"
},
{
"name": "PHP",
"bytes": "101711"
}
],
"symlink_target": ""
} |
package org.spongepowered.api.util.command.dispatcher;
import org.spongepowered.api.util.command.CommandMapping;
import org.spongepowered.api.util.command.CommandSource;
import java.util.List;
import java.util.Optional;
public interface Disambiguator {
/**
* Disambiguate an alias in cases where there are multiple command mappings registered for a given alias.
*
* @param source The CommandSource executing the command
* @param aliasUsed The alias input by the user
* @param availableOptions The commands registered to this alias
* @return The specific command to use
*/
Optional<CommandMapping> disambiguate(CommandSource source, String aliasUsed, List<CommandMapping> availableOptions);
}
| {
"content_hash": "57abd239a25a1c9dc519319abbbc81c2",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 121,
"avg_line_length": 33.68181818181818,
"alnum_prop": 0.7638326585695007,
"repo_name": "Kiskae/SpongeAPI",
"id": "521fd43e95c58c4388686df35bce37113e610a65",
"size": "1991",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/org/spongepowered/api/util/command/dispatcher/Disambiguator.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "4405050"
},
{
"name": "Shell",
"bytes": "81"
}
],
"symlink_target": ""
} |
/* List of contributors:
*
* Initial Name/description
* -------------------------------------------------------------------
* MF Mario Fortier
*
*
* Change history:
*
* MMDDYY BY Description
* -------------------------------------------------------------------
* 112400 MF Template creation.
* 052603 MF Adapt code to compile with .NET Managed C++
*
*/
/**** START GENCODE SECTION 1 - DO NOT DELETE THIS LINE ****/
/* All code within this section is automatically
* generated by gen_code. Any modification will be lost
* next time gen_code is run.
*/
/* Generated */
/* Generated */ #if defined( _MANAGED )
/* Generated */ #include "TA-Lib-Core.h"
/* Generated */ #define TA_INTERNAL_ERROR(Id) (RetCode::InternalError)
/* Generated */ namespace TicTacTec { namespace TA { namespace Library {
/* Generated */ #elif defined( _JAVA )
/* Generated */ #include "ta_defs.h"
/* Generated */ #include "ta_java_defs.h"
/* Generated */ #define TA_INTERNAL_ERROR(Id) (RetCode.InternalError)
/* Generated */ #else
/* Generated */ #include <string.h>
/* Generated */ #include <math.h>
/* Generated */ #include "ta_func.h"
/* Generated */ #endif
/* Generated */
/* Generated */ #ifndef TA_UTILITY_H
/* Generated */ #include "ta_utility.h"
/* Generated */ #endif
/* Generated */
/* Generated */ #ifndef TA_MEMORY_H
/* Generated */ #include "ta_memory.h"
/* Generated */ #endif
/* Generated */
/* Generated */ #define TA_PREFIX(x) TA_##x
/* Generated */ #define INPUT_TYPE double
/* Generated */
/* Generated */ #if defined( _MANAGED )
/* Generated */ int Core::TemaLookback( int optInTimePeriod ) /* From 2 to 100000 */
/* Generated */
/* Generated */ #elif defined( _JAVA )
/* Generated */ public int temaLookback( int optInTimePeriod ) /* From 2 to 100000 */
/* Generated */
/* Generated */ #else
/* Generated */ int TA_TEMA_Lookback( int optInTimePeriod ) /* From 2 to 100000 */
/* Generated */
/* Generated */ #endif
/**** END GENCODE SECTION 1 - DO NOT DELETE THIS LINE ****/
{
/* insert local variable here */
int retValue;
/**** START GENCODE SECTION 2 - DO NOT DELETE THIS LINE ****/
/* Generated */ #ifndef TA_FUNC_NO_RANGE_CHECK
/* Generated */ /* min/max are checked for optInTimePeriod. */
/* Generated */ if( (int)optInTimePeriod == TA_INTEGER_DEFAULT )
/* Generated */ optInTimePeriod = 30;
/* Generated */ else if( ((int)optInTimePeriod < 2) || ((int)optInTimePeriod > 100000) )
/* Generated */ return -1;
/* Generated */
/* Generated */ #endif /* TA_FUNC_NO_RANGE_CHECK */
/**** END GENCODE SECTION 2 - DO NOT DELETE THIS LINE ****/
/* insert lookback code here. */
/* Get lookack for one EMA. */
retValue = LOOKBACK_CALL(EMA)( optInTimePeriod );
return retValue * 3;
}
/**** START GENCODE SECTION 3 - DO NOT DELETE THIS LINE ****/
/*
* TA_TEMA - Triple Exponential Moving Average
*
* Input = double
* Output = double
*
* Optional Parameters
* -------------------
* optInTimePeriod:(From 2 to 100000)
* Number of period
*
*
*/
/* Generated */
/* Generated */ #if defined( _MANAGED ) && defined( USE_SUBARRAY )
/* Generated */ enum class Core::RetCode Core::Tema( int startIdx,
/* Generated */ int endIdx,
/* Generated */ SubArray<double>^ inReal,
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ [Out]int% outBegIdx,
/* Generated */ [Out]int% outNBElement,
/* Generated */ SubArray<double>^ outReal )
/* Generated */ #elif defined( _MANAGED )
/* Generated */ enum class Core::RetCode Core::Tema( int startIdx,
/* Generated */ int endIdx,
/* Generated */ cli::array<double>^ inReal,
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ [Out]int% outBegIdx,
/* Generated */ [Out]int% outNBElement,
/* Generated */ cli::array<double>^ outReal )
/* Generated */ #elif defined( _JAVA )
/* Generated */ public RetCode tema( int startIdx,
/* Generated */ int endIdx,
/* Generated */ double inReal[],
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ MInteger outBegIdx,
/* Generated */ MInteger outNBElement,
/* Generated */ double outReal[] )
/* Generated */ #else
/* Generated */ TA_RetCode TA_TEMA( int startIdx,
/* Generated */ int endIdx,
/* Generated */ const double inReal[],
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ int *outBegIdx,
/* Generated */ int *outNBElement,
/* Generated */ double outReal[] )
/* Generated */ #endif
/**** END GENCODE SECTION 3 - DO NOT DELETE THIS LINE ****/
{
/* Insert local variables here. */
ARRAY_REF(firstEMA);
ARRAY_REF(secondEMA);
double k;
VALUE_HANDLE_INT(firstEMABegIdx);
VALUE_HANDLE_INT(firstEMANbElement);
VALUE_HANDLE_INT(secondEMABegIdx);
VALUE_HANDLE_INT(secondEMANbElement);
VALUE_HANDLE_INT(thirdEMABegIdx);
VALUE_HANDLE_INT(thirdEMANbElement);
int tempInt, outIdx, lookbackTotal, lookbackEMA;
int firstEMAIdx, secondEMAIdx;
ENUM_DECLARATION(RetCode) retCode;
/**** START GENCODE SECTION 4 - DO NOT DELETE THIS LINE ****/
/* Generated */
/* Generated */ #ifndef TA_FUNC_NO_RANGE_CHECK
/* Generated */
/* Generated */ /* Validate the requested output range. */
/* Generated */ if( startIdx < 0 )
/* Generated */ return ENUM_VALUE(RetCode,TA_OUT_OF_RANGE_START_INDEX,OutOfRangeStartIndex);
/* Generated */ if( (endIdx < 0) || (endIdx < startIdx))
/* Generated */ return ENUM_VALUE(RetCode,TA_OUT_OF_RANGE_END_INDEX,OutOfRangeEndIndex);
/* Generated */
/* Generated */ #if !defined(_JAVA)
/* Generated */ if( !inReal ) return ENUM_VALUE(RetCode,TA_BAD_PARAM,BadParam);
/* Generated */ #endif /* !defined(_JAVA)*/
/* Generated */ /* min/max are checked for optInTimePeriod. */
/* Generated */ if( (int)optInTimePeriod == TA_INTEGER_DEFAULT )
/* Generated */ optInTimePeriod = 30;
/* Generated */ else if( ((int)optInTimePeriod < 2) || ((int)optInTimePeriod > 100000) )
/* Generated */ return ENUM_VALUE(RetCode,TA_BAD_PARAM,BadParam);
/* Generated */
/* Generated */ #if !defined(_JAVA)
/* Generated */ if( !outReal )
/* Generated */ return ENUM_VALUE(RetCode,TA_BAD_PARAM,BadParam);
/* Generated */
/* Generated */ #endif /* !defined(_JAVA) */
/* Generated */ #endif /* TA_FUNC_NO_RANGE_CHECK */
/* Generated */
/**** END GENCODE SECTION 4 - DO NOT DELETE THIS LINE ****/
/* Insert TA function code here. */
/* For an explanation of this function, please read:
*
* Stocks & Commodities V. 12:1 (11-19):
* Smoothing Data With Faster Moving Averages
* Stocks & Commodities V. 12:2 (72-80):
* Smoothing Data With Less Lag
*
* Both magazine articles written by Patrick G. Mulloy
*
* Essentially, a TEMA of time serie 't' is:
* EMA1 = EMA(t,period)
* EMA2 = EMA(EMA(t,period),period)
* EMA3 = EMA(EMA(EMA(t,period),period))
* TEMA = 3*EMA1 - 3*EMA2 + EMA3
*
* TEMA offers a moving average with less lags then the
* traditional EMA.
*
* Do not confuse a TEMA with EMA3. Both are called "Triple EMA"
* in the litterature.
*
* DEMA is very similar (and from the same author).
*/
/* Will change only on success. */
VALUE_HANDLE_DEREF_TO_ZERO(outNBElement);
VALUE_HANDLE_DEREF_TO_ZERO(outBegIdx);
/* Adjust startIdx to account for the lookback period. */
lookbackEMA = LOOKBACK_CALL(EMA)( optInTimePeriod );
lookbackTotal = lookbackEMA * 3;
if( startIdx < lookbackTotal )
startIdx = lookbackTotal;
/* Make sure there is still something to evaluate. */
if( startIdx > endIdx )
return ENUM_VALUE(RetCode,TA_SUCCESS,Success);
/* Allocate a temporary buffer for the firstEMA. */
tempInt = lookbackTotal+(endIdx-startIdx)+1;
ARRAY_ALLOC(firstEMA,tempInt);
#if !defined( _JAVA )
if( !firstEMA )
return ENUM_VALUE(RetCode,TA_ALLOC_ERR,AllocErr);
#endif
/* Calculate the first EMA */
k = PER_TO_K(optInTimePeriod);
retCode = FUNCTION_CALL(INT_EMA)( startIdx-(lookbackEMA*2), endIdx, inReal,
optInTimePeriod, k,
VALUE_HANDLE_OUT(firstEMABegIdx), VALUE_HANDLE_OUT(firstEMANbElement),
firstEMA );
/* Verify for failure or if not enough data after
* calculating the first EMA.
*/
if( (retCode != ENUM_VALUE(RetCode,TA_SUCCESS,Success) ) || (VALUE_HANDLE_GET(firstEMANbElement) == 0) )
{
ARRAY_FREE( firstEMA );
return retCode;
}
/* Allocate a temporary buffer for storing the EMA2 */
ARRAY_ALLOC(secondEMA,VALUE_HANDLE_GET(firstEMANbElement));
#if !defined( _JAVA ) && !defined( USE_SUBARRAY )
if( !secondEMA )
{
ARRAY_FREE( firstEMA );
return ENUM_VALUE(RetCode,TA_ALLOC_ERR,AllocErr);
}
#endif
retCode = FUNCTION_CALL_DOUBLE(INT_EMA)( 0, VALUE_HANDLE_GET(firstEMANbElement)-1, firstEMA,
optInTimePeriod, k,
VALUE_HANDLE_OUT(secondEMABegIdx), VALUE_HANDLE_OUT(secondEMANbElement),
secondEMA );
/* Return empty output on failure or if not enough data after
* calculating the second EMA.
*/
if( (retCode != ENUM_VALUE(RetCode,TA_SUCCESS,Success) ) || (VALUE_HANDLE_GET(secondEMANbElement) == 0) )
{
ARRAY_FREE( firstEMA );
ARRAY_FREE( secondEMA );
return retCode;
}
/* Calculate the EMA3 into the caller provided output. */
retCode = FUNCTION_CALL_DOUBLE(INT_EMA)( 0, VALUE_HANDLE_GET(secondEMANbElement)-1, secondEMA,
optInTimePeriod, k,
VALUE_HANDLE_OUT(thirdEMABegIdx), VALUE_HANDLE_OUT(thirdEMANbElement),
outReal );
/* Return empty output on failure or if not enough data after
* calculating the third EMA.
*/
if( (retCode != ENUM_VALUE(RetCode,TA_SUCCESS,Success) ) || (VALUE_HANDLE_GET(thirdEMANbElement) == 0) )
{
ARRAY_FREE( firstEMA );
ARRAY_FREE( secondEMA );
return retCode;
}
/* Indicate where the output starts relative to
* the caller input.
*/
firstEMAIdx = VALUE_HANDLE_GET(thirdEMABegIdx) + VALUE_HANDLE_GET(secondEMABegIdx);
secondEMAIdx = VALUE_HANDLE_GET(thirdEMABegIdx);
VALUE_HANDLE_DEREF(outBegIdx) = firstEMAIdx + VALUE_HANDLE_GET(firstEMABegIdx);
/* Do the TEMA:
* Iterate through the EMA3 (output buffer) and adjust
* the value by using the EMA2 and EMA1.
*/
outIdx = 0;
while( outIdx < VALUE_HANDLE_GET(thirdEMANbElement) )
{
outReal[outIdx] += (3.0*firstEMA[firstEMAIdx++]) - (3.0*secondEMA[secondEMAIdx++]);
outIdx++;
}
ARRAY_FREE( firstEMA );
ARRAY_FREE( secondEMA );
/* Indicates to the caller the number of output
* successfully calculated.
*/
VALUE_HANDLE_DEREF(outNBElement) = outIdx;
return ENUM_VALUE(RetCode,TA_SUCCESS,Success);
}
/**** START GENCODE SECTION 5 - DO NOT DELETE THIS LINE ****/
/* Generated */
/* Generated */ #define USE_SINGLE_PRECISION_INPUT
/* Generated */ #undef TA_LIB_PRO
/* Generated */ #if !defined( _MANAGED ) && !defined( _JAVA )
/* Generated */ #undef TA_PREFIX
/* Generated */ #define TA_PREFIX(x) TA_S_##x
/* Generated */ #endif
/* Generated */ #undef INPUT_TYPE
/* Generated */ #define INPUT_TYPE float
/* Generated */ #if defined( _MANAGED ) && defined( USE_SUBARRAY )
/* Generated */ enum class Core::RetCode Core::Tema( int startIdx,
/* Generated */ int endIdx,
/* Generated */ SubArray<float>^ inReal,
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ [Out]int% outBegIdx,
/* Generated */ [Out]int% outNBElement,
/* Generated */ SubArray<double>^ outReal )
/* Generated */ #elif defined( _MANAGED )
/* Generated */ enum class Core::RetCode Core::Tema( int startIdx,
/* Generated */ int endIdx,
/* Generated */ cli::array<float>^ inReal,
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ [Out]int% outBegIdx,
/* Generated */ [Out]int% outNBElement,
/* Generated */ cli::array<double>^ outReal )
/* Generated */ #elif defined( _JAVA )
/* Generated */ public RetCode tema( int startIdx,
/* Generated */ int endIdx,
/* Generated */ float inReal[],
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ MInteger outBegIdx,
/* Generated */ MInteger outNBElement,
/* Generated */ double outReal[] )
/* Generated */ #else
/* Generated */ TA_RetCode TA_S_TEMA( int startIdx,
/* Generated */ int endIdx,
/* Generated */ const float inReal[],
/* Generated */ int optInTimePeriod, /* From 2 to 100000 */
/* Generated */ int *outBegIdx,
/* Generated */ int *outNBElement,
/* Generated */ double outReal[] )
/* Generated */ #endif
/* Generated */ {
/* Generated */ ARRAY_REF(firstEMA);
/* Generated */ ARRAY_REF(secondEMA);
/* Generated */ double k;
/* Generated */ VALUE_HANDLE_INT(firstEMABegIdx);
/* Generated */ VALUE_HANDLE_INT(firstEMANbElement);
/* Generated */ VALUE_HANDLE_INT(secondEMABegIdx);
/* Generated */ VALUE_HANDLE_INT(secondEMANbElement);
/* Generated */ VALUE_HANDLE_INT(thirdEMABegIdx);
/* Generated */ VALUE_HANDLE_INT(thirdEMANbElement);
/* Generated */ int tempInt, outIdx, lookbackTotal, lookbackEMA;
/* Generated */ int firstEMAIdx, secondEMAIdx;
/* Generated */ ENUM_DECLARATION(RetCode) retCode;
/* Generated */ #ifndef TA_FUNC_NO_RANGE_CHECK
/* Generated */ if( startIdx < 0 )
/* Generated */ return ENUM_VALUE(RetCode,TA_OUT_OF_RANGE_START_INDEX,OutOfRangeStartIndex);
/* Generated */ if( (endIdx < 0) || (endIdx < startIdx))
/* Generated */ return ENUM_VALUE(RetCode,TA_OUT_OF_RANGE_END_INDEX,OutOfRangeEndIndex);
/* Generated */ #if !defined(_JAVA)
/* Generated */ if( !inReal ) return ENUM_VALUE(RetCode,TA_BAD_PARAM,BadParam);
/* Generated */ #endif
/* Generated */ if( (int)optInTimePeriod == TA_INTEGER_DEFAULT )
/* Generated */ optInTimePeriod = 30;
/* Generated */ else if( ((int)optInTimePeriod < 2) || ((int)optInTimePeriod > 100000) )
/* Generated */ return ENUM_VALUE(RetCode,TA_BAD_PARAM,BadParam);
/* Generated */ #if !defined(_JAVA)
/* Generated */ if( !outReal )
/* Generated */ return ENUM_VALUE(RetCode,TA_BAD_PARAM,BadParam);
/* Generated */ #endif
/* Generated */ #endif
/* Generated */ VALUE_HANDLE_DEREF_TO_ZERO(outNBElement);
/* Generated */ VALUE_HANDLE_DEREF_TO_ZERO(outBegIdx);
/* Generated */ lookbackEMA = LOOKBACK_CALL(EMA)( optInTimePeriod );
/* Generated */ lookbackTotal = lookbackEMA * 3;
/* Generated */ if( startIdx < lookbackTotal )
/* Generated */ startIdx = lookbackTotal;
/* Generated */ if( startIdx > endIdx )
/* Generated */ return ENUM_VALUE(RetCode,TA_SUCCESS,Success);
/* Generated */ tempInt = lookbackTotal+(endIdx-startIdx)+1;
/* Generated */ ARRAY_ALLOC(firstEMA,tempInt);
/* Generated */ #if !defined( _JAVA )
/* Generated */ if( !firstEMA )
/* Generated */ return ENUM_VALUE(RetCode,TA_ALLOC_ERR,AllocErr);
/* Generated */ #endif
/* Generated */ k = PER_TO_K(optInTimePeriod);
/* Generated */ retCode = FUNCTION_CALL(INT_EMA)( startIdx-(lookbackEMA*2), endIdx, inReal,
/* Generated */ optInTimePeriod, k,
/* Generated */ VALUE_HANDLE_OUT(firstEMABegIdx), VALUE_HANDLE_OUT(firstEMANbElement),
/* Generated */ firstEMA );
/* Generated */ if( (retCode != ENUM_VALUE(RetCode,TA_SUCCESS,Success) ) || (VALUE_HANDLE_GET(firstEMANbElement) == 0) )
/* Generated */ {
/* Generated */ ARRAY_FREE( firstEMA );
/* Generated */ return retCode;
/* Generated */ }
/* Generated */ ARRAY_ALLOC(secondEMA,VALUE_HANDLE_GET(firstEMANbElement));
/* Generated */ #if !defined( _JAVA ) && !defined( USE_SUBARRAY )
/* Generated */ if( !secondEMA )
/* Generated */ {
/* Generated */ ARRAY_FREE( firstEMA );
/* Generated */ return ENUM_VALUE(RetCode,TA_ALLOC_ERR,AllocErr);
/* Generated */ }
/* Generated */ #endif
/* Generated */ retCode = FUNCTION_CALL_DOUBLE(INT_EMA)( 0, VALUE_HANDLE_GET(firstEMANbElement)-1, firstEMA,
/* Generated */ optInTimePeriod, k,
/* Generated */ VALUE_HANDLE_OUT(secondEMABegIdx), VALUE_HANDLE_OUT(secondEMANbElement),
/* Generated */ secondEMA );
/* Generated */ if( (retCode != ENUM_VALUE(RetCode,TA_SUCCESS,Success) ) || (VALUE_HANDLE_GET(secondEMANbElement) == 0) )
/* Generated */ {
/* Generated */ ARRAY_FREE( firstEMA );
/* Generated */ ARRAY_FREE( secondEMA );
/* Generated */ return retCode;
/* Generated */ }
/* Generated */ retCode = FUNCTION_CALL_DOUBLE(INT_EMA)( 0, VALUE_HANDLE_GET(secondEMANbElement)-1, secondEMA,
/* Generated */ optInTimePeriod, k,
/* Generated */ VALUE_HANDLE_OUT(thirdEMABegIdx), VALUE_HANDLE_OUT(thirdEMANbElement),
/* Generated */ outReal );
/* Generated */ if( (retCode != ENUM_VALUE(RetCode,TA_SUCCESS,Success) ) || (VALUE_HANDLE_GET(thirdEMANbElement) == 0) )
/* Generated */ {
/* Generated */ ARRAY_FREE( firstEMA );
/* Generated */ ARRAY_FREE( secondEMA );
/* Generated */ return retCode;
/* Generated */ }
/* Generated */ firstEMAIdx = VALUE_HANDLE_GET(thirdEMABegIdx) + VALUE_HANDLE_GET(secondEMABegIdx);
/* Generated */ secondEMAIdx = VALUE_HANDLE_GET(thirdEMABegIdx);
/* Generated */ VALUE_HANDLE_DEREF(outBegIdx) = firstEMAIdx + VALUE_HANDLE_GET(firstEMABegIdx);
/* Generated */ outIdx = 0;
/* Generated */ while( outIdx < VALUE_HANDLE_GET(thirdEMANbElement) )
/* Generated */ {
/* Generated */ outReal[outIdx] += (3.0*firstEMA[firstEMAIdx++]) - (3.0*secondEMA[secondEMAIdx++]);
/* Generated */ outIdx++;
/* Generated */ }
/* Generated */ ARRAY_FREE( firstEMA );
/* Generated */ ARRAY_FREE( secondEMA );
/* Generated */ VALUE_HANDLE_DEREF(outNBElement) = outIdx;
/* Generated */ return ENUM_VALUE(RetCode,TA_SUCCESS,Success);
/* Generated */ }
/* Generated */
/* Generated */ #if defined( _MANAGED )
/* Generated */ }}} // Close namespace TicTacTec.TA.Lib
/* Generated */ #endif
/**** END GENCODE SECTION 5 - DO NOT DELETE THIS LINE ****/
| {
"content_hash": "56438d123bf9542146faa10454b5c2f9",
"timestamp": "",
"source": "github",
"line_count": 459,
"max_line_length": 133,
"avg_line_length": 44.28322440087146,
"alnum_prop": 0.5681393289383057,
"repo_name": "mkmarek/forex.analytics",
"id": "9349ccd036653cf840e15da0be10a094910f4acd",
"size": "21897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/ta-lib/src/ta_func/ta_TEMA.c",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5948645"
},
{
"name": "C++",
"bytes": "106423"
},
{
"name": "CMake",
"bytes": "1475"
},
{
"name": "Java",
"bytes": "10420"
},
{
"name": "JavaScript",
"bytes": "10200"
},
{
"name": "Makefile",
"bytes": "126078"
},
{
"name": "Python",
"bytes": "2433"
}
],
"symlink_target": ""
} |
package scala.meta
package internal
package tokenizers
import scala.annotation.switch
import java.lang.{ Character => JCharacter }
import scala.language.postfixOps
/** Contains constants and classifier methods for characters */
object Chars {
// Be very careful touching these.
// Apparently trivial changes to the way you write these constants
// will cause Scanners.scala to go from a nice efficient switch to
// a ghastly nested if statement which will bring the type checker
// to its knees. See ticket #1456
// Martin: (this should be verified now that the pattern rules have been redesigned).
final val LF = '\u000A'
final val FF = '\u000C'
final val CR = '\u000D'
final val SU = '\u001A'
/** Convert a character digit to an Int according to given base,
* -1 if no success
*/
def digit2int(ch: Char, base: Int): Int = {
val num = (
if (ch <= '9') ch - '0'
else if ('a' <= ch && ch <= 'z') ch - 'a' + 10
else if ('A' <= ch && ch <= 'Z') ch - 'A' + 10
else -1
)
if (0 <= num && num < base) num else -1
}
/** Buffer for creating '\ u XXXX' strings. */
private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0)
/** Convert a character to a backslash-u escape */
def char2uescape(c: Char): String = {
@inline def hexChar(ch: Int): Char =
( if (ch < 10) '0' else 'A' - 10 ) + ch toChar
char2uescapeArray(2) = hexChar((c >> 12) )
char2uescapeArray(3) = hexChar((c >> 8) % 16)
char2uescapeArray(4) = hexChar((c >> 4) % 16)
char2uescapeArray(5) = hexChar((c ) % 16)
new String(char2uescapeArray)
}
/** Is character a line break? */
def isLineBreakChar(c: Char) = (c: @switch) match {
case LF|FF|CR|SU => true
case _ => false
}
/** Is character a whitespace character (but not a new line)? */
def isWhitespace(c: Char) =
c == ' ' || c == '\t' || c == CR
/** Can character form part of a doc comment variable xxx? */
def isVarPart(c: Char) =
'0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
/** Can character start an alphanumeric Scala identifier? */
def isIdentifierStart(c: Char): Boolean =
(c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
/** Can character form part of an alphanumeric Scala identifier? */
def isIdentifierPart(c: Char) =
(c == '$') || Character.isUnicodeIdentifierPart(c)
/** Is character a math or other symbol in Unicode? */
def isSpecial(c: Char) = {
val chtp = Character.getType(c)
chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
}
private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_'
private final val letterGroups = {
import JCharacter._
Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER)
}
def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch)
/** Can character form part of a Scala operator name? */
def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | '<' |
'>' | '?' | ':' | '=' | '&' |
'|' | '/' | '\\' => true
case c => isSpecial(c)
}
/** {{{
* (#x20 | #x9 | #xD | #xA)
* }}} */
final def isSpace(ch: Char): Boolean = ch match {
case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
case _ => false
}
/** {{{
* NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
* | CombiningChar | Extender
* }}}
* See [4] and Appendix B of XML 1.0 specification.
*/
def isNameChar(ch: Char) = {
import java.lang.Character._
// The constants represent groups Mc, Me, Mn, Lm, and Nd.
isNameStart(ch) || (getType(ch).toByte match {
case COMBINING_SPACING_MARK |
ENCLOSING_MARK | NON_SPACING_MARK |
MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
case _ => ".-:" contains ch
})
}
/** {{{
* NameStart ::= ( Letter | '_' )
* }}}
* where Letter means in one of the Unicode general
* categories `{ Ll, Lu, Lo, Lt, Nl }`.
*
* We do not allow a name to start with ':'.
* See [3] and Appendix B of XML 1.0 specification
*/
def isNameStart(ch: Char) = {
import java.lang.Character._
getType(ch).toByte match {
case LOWERCASE_LETTER |
UPPERCASE_LETTER | OTHER_LETTER |
TITLECASE_LETTER | LETTER_NUMBER => true
case _ => ch == '_'
}
}
}
| {
"content_hash": "ca0b75e445452612125c511cb4202253",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 96,
"avg_line_length": 33.66187050359712,
"alnum_prop": 0.5558880102586022,
"repo_name": "MasseGuillaume/scalameta",
"id": "0e55c44ddc659c5043ab9b0295b943b7b9adac86",
"size": "4679",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scalameta/tokenizers/shared/src/main/scala/scala/meta/internal/tokenizers/Chars.scala",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "3943"
},
{
"name": "Python",
"bytes": "2237"
},
{
"name": "Scala",
"bytes": "2296121"
},
{
"name": "Shell",
"bytes": "2735"
}
],
"symlink_target": ""
} |
Typr::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The production environment is meant for finished, "live" apps.
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Specifies the header that your server uses for sending files
config.action_dispatch.x_sendfile_header = "X-Sendfile"
# For nginx:
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect'
# If you have no front-end server that supports something like X-Sendfile,
# just comment this out and Rails will serve the files
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Disable Rails's static asset server
# In production, Apache or nginx will already do this
config.serve_static_assets = false
# Enable serving of images, stylesheets, and javascripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
end
| {
"content_hash": "5aa56e2ef03ea7f4918ffebb47419329",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 84,
"avg_line_length": 35.775510204081634,
"alnum_prop": 0.7501426126640046,
"repo_name": "hans/typr",
"id": "179cbb7d392400ed1c7401b051739412fa42f437",
"size": "1753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/environments/production.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "70521"
},
{
"name": "Ruby",
"bytes": "40835"
}
],
"symlink_target": ""
} |
goog.provide('adapt.ops');
goog.require("vivliostyle.constants");
goog.require('adapt.task');
goog.require('adapt.geom');
goog.require('adapt.expr');
goog.require('adapt.css');
goog.require('adapt.csstok');
goog.require('adapt.cssparse');
goog.require('adapt.cssvalid');
goog.require('adapt.csscasc');
goog.require('adapt.cssstyler');
goog.require('adapt.pm');
goog.require('adapt.vtree');
goog.require('adapt.layout');
goog.require('adapt.vgen');
goog.require('adapt.xmldoc');
goog.require('adapt.font');
goog.require('vivliostyle.page');
/**
* @typedef {{properties:adapt.csscasc.ElementStyle,condition:adapt.expr.Val}}
*/
adapt.ops.FontFace;
/**
* @param {adapt.ops.OPSDocStore} store
* @param {adapt.expr.LexicalScope} rootScope
* @param {adapt.expr.LexicalScope} pageScope
* @param {adapt.csscasc.Cascade} cascade
* @param {adapt.pm.RootPageBox} rootBox
* @param {Array.<adapt.ops.FontFace>} fontFaces
* @param {adapt.csscasc.ElementStyle} footnoteProps
* @param {Object.<string,adapt.csscasc.ElementStyle>} flowProps
* @param {Array.<adapt.csscasc.ElementStyle>} viewportProps
* @constructor
*/
adapt.ops.Style = function(store, rootScope, pageScope, cascade, rootBox,
fontFaces, footnoteProps, flowProps, viewportProps) {
/** @const */ this.store = store;
/** @const */ this.rootScope = rootScope;
/** @const */ this.pageScope = pageScope;
/** @const */ this.cascade = cascade;
/** @const */ this.rootBox = rootBox;
/** @const */ this.fontFaces = fontFaces;
/** @const */ this.fontDeobfuscator = store.fontDeobfuscator;
/** @const */ this.footnoteProps = footnoteProps;
/** @const */ this.flowProps = flowProps;
/** @const */ this.viewportProps = viewportProps;
/** @const */ this.validatorSet = store.validatorSet;
this.pageScope.defineBuiltIn("has-content", function(name) {
var styleInstance = /** @type {adapt.ops.StyleInstance} */ (this);
return styleInstance.currentLayoutPosition.hasContent(/** @type {string} */ (name), styleInstance.lookupOffset);
});
this.pageScope.defineName("page-number", new adapt.expr.Native(this.pageScope, function() {
var styleInstance = /** @type {adapt.ops.StyleInstance} */ (this);
return styleInstance.currentLayoutPosition.page;
}, "page-number"));
};
/**
* @param {number} viewportWidth
* @param {number} viewportHeight
* @param {number} fontSize
* @return {{width:number, height:number, fontSize:number}}
*/
adapt.ops.Style.prototype.sizeViewport = function(viewportWidth, viewportHeight, fontSize) {
if (this.viewportProps.length) {
var context = new adapt.expr.Context(this.rootScope, viewportWidth,
viewportHeight, fontSize);
var viewportProps = adapt.csscasc.mergeAll(context, this.viewportProps);
var width = viewportProps["width"];
var height = viewportProps["height"];
var textZoom = viewportProps["text-zoom"];
var scaleFactor = 1;
if ((width && height) || textZoom) {
var defaultFontSize = adapt.expr.defaultUnitSizes["em"];
var zoomVal = textZoom ? textZoom.evaluate(context, "text-zoom") : null;
if (zoomVal === adapt.css.ident.scale) {
scaleFactor = defaultFontSize / fontSize;
fontSize = defaultFontSize;
viewportWidth *= scaleFactor;
viewportHeight *= scaleFactor;
}
if (width && height) {
var widthVal = adapt.css.toNumber(width.evaluate(context, "width"), context);
var heightVal = adapt.css.toNumber(height.evaluate(context, "height"), context);
if (widthVal > 0 && heightVal > 0) {
return {width:widthVal, height:heightVal, fontSize: fontSize};
}
}
}
}
return {width:viewportWidth, height:viewportHeight, fontSize:fontSize};
};
//-------------------------------------------------------------------------------
/**
* @param {adapt.ops.Style} style
* @param {adapt.xmldoc.XMLDocHolder} xmldoc
* @param {?string} defaultLang
* @param {adapt.vgen.Viewport} viewport
* @param {adapt.vtree.ClientLayout} clientLayout
* @param {adapt.font.Mapper} fontMapper
* @param {adapt.vgen.CustomRenderer} customRenderer
* @param {Object.<string,string>} fallbackMap
* @constructor
* @extends {adapt.expr.Context}
* @implements {adapt.cssstyler.FlowListener}
* @implements {adapt.pm.InstanceHolder}
* @implements {adapt.vgen.StylerProducer}
*/
adapt.ops.StyleInstance = function(style, xmldoc, defaultLang, viewport, clientLayout,
fontMapper, customRenderer, fallbackMap) {
adapt.expr.Context.call(this, style.rootScope, viewport.width, viewport.height, viewport.fontSize);
/** @const */ this.style = style;
/** @const */ this.xmldoc = xmldoc;
/** @const */ this.lang = xmldoc.lang || defaultLang;
/** @const */ this.viewport = viewport;
/** @const */ this.primaryFlows = /** @type {Object.<string,boolean>} */ ({ "body": true });
/** @const */ this.clientLayout = clientLayout;
/** @type {adapt.pm.RootPageBoxInstance} */ this.rootPageBoxInstance = null;
/** @type {adapt.cssstyler.Styler} */ this.styler = null;
/** @type {Object.<string,adapt.cssstyler.Styler>} */ this.stylerMap = null;
/** @type {adapt.vtree.LayoutPosition} */ this.currentLayoutPosition = null;
/** @type {number} */ this.lookupOffset = 0;
/** @const */ this.fontMapper = fontMapper;
/** @const */ this.faces = new adapt.font.DocumentFaces(this.style.fontDeobfuscator);
/** @type {Object.<string,adapt.pm.PageBoxInstance>} */ this.pageBoxInstances = {};
/** @type {vivliostyle.page.PageManager} */ this.pageManager = null;
/** @type {boolean} */ this.regionBreak = false;
/** @type {!Object.<string,boolean>} */ this.pageBreaks = {};
/** @type {?vivliostyle.constants.PageProgression} */ this.pageProgression = null;
/** @const */ this.customRenderer = customRenderer;
/** @const */ this.fallbackMap = fallbackMap;
for (var flowName in style.flowProps) {
var flowStyle = style.flowProps[flowName];
var consume = adapt.csscasc.getProp(flowStyle, "flow-consume");
if (consume) {
var consumeVal = consume.evaluate(this, "flow-consume");
if (consumeVal == adapt.css.ident.all) {
this.primaryFlows[flowName] = true;
} else {
delete this.primaryFlows[flowName];
}
}
}
};
goog.inherits(adapt.ops.StyleInstance, adapt.expr.Context);
/**
* @return {!adapt.task.Result.<boolean>}
*/
adapt.ops.StyleInstance.prototype.init = function() {
var self = this;
/** @type {!adapt.task.Frame.<boolean>} */ var frame
= adapt.task.newFrame("StyleInstance.init");
self.styler = new adapt.cssstyler.Styler(self.xmldoc, self.style.cascade,
self.style.rootScope, self, this.primaryFlows, self.style.validatorSet);
self.styler.resetFlowChunkStream(self);
self.stylerMap = {};
self.stylerMap[self.xmldoc.url] = self.styler;
var docElementStyle = self.styler.getTopContainerStyle();
self.pageProgression = vivliostyle.page.resolvePageProgression(docElementStyle);
var rootBox = this.style.rootBox;
this.rootPageBoxInstance = new adapt.pm.RootPageBoxInstance(rootBox);
var cascadeInstance = this.style.cascade.createInstance(self, this.lang);
this.rootPageBoxInstance.applyCascadeAndInit(cascadeInstance, docElementStyle);
this.rootPageBoxInstance.resolveAutoSizing(self);
this.pageManager = new vivliostyle.page.PageManager(cascadeInstance, this.style.pageScope, this.rootPageBoxInstance, self, docElementStyle);
var srcFaces = /** @type {Array.<adapt.font.Face>} */ ([]);
for (var i = 0; i < self.style.fontFaces.length; i++) {
var fontFace = self.style.fontFaces[i++];
if (fontFace.condition && !fontFace.condition.evaluate(self))
continue;
var properties = adapt.font.prepareProperties(fontFace.properties, self);
var srcFace = new adapt.font.Face(properties);
srcFaces.push(srcFace);
}
self.fontMapper.findOrLoadFonts(srcFaces, self.faces).thenFinish(frame);
return frame.result();
};
/**
* @override
*/
adapt.ops.StyleInstance.prototype.getStylerForDoc = function(xmldoc) {
var styler = this.stylerMap[xmldoc.url];
if (!styler) {
var style = this.style.store.getStyleForDoc(xmldoc);
// We need a separate content, so that variables can get potentially different values.
var context = new adapt.expr.Context(style.rootScope, this.pageWidth, this.pageHeight, this.fontSize);
styler = new adapt.cssstyler.Styler(xmldoc, style.cascade,
style.rootScope, context, this.primaryFlows, style.validatorSet);
this.stylerMap[xmldoc.url] = styler;
}
return styler;
};
/**
* @override
*/
adapt.ops.StyleInstance.prototype.registerInstance = function(key, instance) {
this.pageBoxInstances[key] = instance;
};
/**
* @override
*/
adapt.ops.StyleInstance.prototype.lookupInstance = function(key) {
return this.pageBoxInstances[key];
};
/**
* @override
*/
adapt.ops.StyleInstance.prototype.encounteredFlowChunk = function(flowChunk) {
var cp = this.currentLayoutPosition;
if (cp) {
var flowPosition = cp.flowPositions[flowChunk.flowName];
if (!flowPosition) {
flowPosition = new adapt.vtree.FlowPosition();
cp.flowPositions[flowChunk.flowName] = flowPosition;
}
var nodePosition = adapt.vtree.newNodePositionFromNode(flowChunk.element);
var chunkPosition = new adapt.vtree.ChunkPosition(nodePosition);
var flowChunkPosition = new adapt.vtree.FlowChunkPosition(chunkPosition, flowChunk);
flowPosition.positions.push(flowChunkPosition);
}
};
/**
* @param {adapt.vtree.FlowPosition} flowPosition
* @return {number}
*/
adapt.ops.StyleInstance.prototype.getConsumedOffset = function(flowPosition) {
var offset = Number.POSITIVE_INFINITY;
for (var i = 0; i < flowPosition.positions.length; i++) {
var pos = flowPosition.positions[i].chunkPosition.primary;
var node = pos.steps[0].node;
var offsetInNode = pos.offsetInNode;
var after = pos.after;
var k = 0;
while (node.ownerDocument != this.xmldoc.document) {
k++;
node = pos.steps[k].node;
after = false;
offsetInNode = 0;
}
var chunkOffset = this.xmldoc.getNodeOffset(node, offsetInNode, after);
if (chunkOffset < offset)
offset = chunkOffset;
}
return offset;
};
/**
* @param {adapt.vtree.LayoutPosition|undefined} layoutPosition
* @return {number} document offset of the given layoutPosition
*/
adapt.ops.StyleInstance.prototype.getPosition = function(layoutPosition) {
if (!layoutPosition)
return 0;
var currentPosition = Number.POSITIVE_INFINITY;
for (var flowName in this.primaryFlows) {
var flowPosition = layoutPosition.flowPositions[flowName];
if ((!flowPosition || flowPosition.positions.length == 0) && this.currentLayoutPosition) {
this.styler.styleUntilFlowIsReached(flowName);
flowPosition = this.currentLayoutPosition.flowPositions[flowName];
if (layoutPosition != this.currentLayoutPosition) {
if (flowPosition) {
flowPosition = flowPosition.clone();
layoutPosition.flowPositions[flowName] = flowPosition;
}
}
}
if (flowPosition) {
var consumedOffset = this.getConsumedOffset(flowPosition);
if (consumedOffset < currentPosition)
currentPosition = consumedOffset;
}
}
return currentPosition;
};
adapt.ops.StyleInstance.prototype.dumpLocation = function(position) {
adapt.base.log("Location - page " + this.currentLayoutPosition.page);
adapt.base.log(" currnt: " + position);
adapt.base.log(" lookup: " + this.lookupOffset);
for (var flowName in this.currentLayoutPosition.flowPositions) {
var flowPosition = this.currentLayoutPosition.flowPositions[flowName];
for (var i = 0; i < flowPosition.positions.length; i++) {
var p = flowPosition.positions[i];
adapt.base.log(" Chunk " + flowName + ": " + p.flowChunk.startOffset);
}
}
};
/**
* @return {adapt.pm.PageMasterInstance}
*/
adapt.ops.StyleInstance.prototype.selectPageMaster = function() {
var self = this;
var cp = this.currentLayoutPosition;
// 3.5. Page Layout Processing Model
// 1. Determine current position in the document: Find the minimal consumed-offset for all elements
// not fully-consumed in each primary flow. Current position is maximum of the results among all
// primary flows.
var currentPosition = this.getPosition(cp);
if (currentPosition == Number.POSITIVE_INFINITY ) {
// end of primary content is reached
return null;
}
// If there is a page master generated for @page rules, use it.
var pageMaster = this.pageManager.getPageRulePageMaster();
if (pageMaster) {
return pageMaster;
}
// 2. Page master selection: for each page master:
var pageMasters = /** @type {Array.<adapt.pm.PageMasterInstance>} */ (this.rootPageBoxInstance.children);
for (var i = 0; i < pageMasters.length; i++) {
pageMaster = pageMasters[i];
// Skip a page master generated for @page rules
if (pageMaster.pageBox.pseudoName === vivliostyle.page.pageRuleMasterPseudoName)
continue;
var coeff = 1;
// A. Calculate lookup position using current position and utilization
// (see -epubx-utilization property)
var utilization = pageMaster.getProp(self, "utilization");
if (utilization && utilization.isNum())
coeff = (/** @type {adapt.css.Num} */ (utilization)).num;
var em = self.queryUnitSize("em");
var pageArea = self.pageWidth * self.pageHeight;
var lookup = Math.ceil(coeff * pageArea / (em * em));
// B. Determine element eligibility. Each element in a flow is considered eligible if
// it is is not marked as fully consumed and it comes in the document before the lookup position.
// Feed lookupOffset and flow availability into the context
this.lookupOffset = this.styler.styleUntil(currentPosition, lookup);
this.initLingering();
self.clearScope(this.style.pageScope);
// C. Determine content availability. Flow has content available if it contains eligible elements.
// D. Determine if page master is enabled using rules in Section 3.4.7
var enabled = pageMaster.getProp(self, "enabled");
// E. First enabled page master is used for the next page
if (!enabled || enabled === adapt.css.ident._true) {
if (goog.DEBUG) {
this.dumpLocation(currentPosition);
}
return pageMaster;
}
}
throw new Error("No enabled page masters");
};
/**
* @param {adapt.layout.Column} region
* @param {string} flowName
* @param {Array.<string>} regionIds
* @return {adapt.task.Result.<boolean>} holding true
*/
adapt.ops.StyleInstance.prototype.layoutColumn = function(region, flowName, regionIds) {
var flowPosition = this.currentLayoutPosition.flowPositions[flowName];
if (!flowPosition)
return adapt.task.newResult(true);
if (this.primaryFlows[flowName] && region.exclusions.length > 0) {
// In general, we force non-fitting content. Exception is only for primary flow regions
// that have exclusions.
region.forceNonfitting = false;
}
region.init();
var self = this;
/** @type {!adapt.task.Frame.<boolean>} */ var frame = adapt.task.newFrame("layoutColumn");
var repeated = /** @type {Array.<adapt.vtree.FlowChunkPosition>} */ ([]);
frame.loopWithFrame(function(loopFrame) {
while (flowPosition.positions.length > 0) {
var index = 0;
var selected = flowPosition.positions[index];
if (selected.flowChunk.startOffset > self.lookupOffset)
break;
for (var k = 1; k < flowPosition.positions.length; k++) {
var alt = flowPosition.positions[k];
if (alt.flowChunk.startOffset > self.lookupOffset)
break;
if (alt.flowChunk.isBetter(selected.flowChunk)) {
selected = alt;
index = k;
}
}
var flowChunk = selected.flowChunk;
var pending = true;
region.layout(selected.chunkPosition).then(function(newPosition) {
// static: add back to the flow
if (selected.flowChunk.repeated && (newPosition == null || flowChunk.exclusive))
repeated.push(selected);
if (flowChunk.exclusive) {
// exclusive, only can have one, remove from the flow even if it did not fit
flowPosition.positions.splice(index, 1);
loopFrame.breakLoop();
return;
} else {
// not exclusive, did not fit completely
if (newPosition) {
selected.chunkPosition = newPosition;
loopFrame.breakLoop();
return;
}
// go to the next element in the flow
flowPosition.positions.splice(index, 1);
}
if (pending) {
// Sync result
pending = false;
} else {
// Async result
loopFrame.continueLoop();
}
});
if (pending) {
// Async result
pending = false;
return;
}
// Sync result
}
loopFrame.breakLoop();
}).then(function() {
// add all repeated back
if (repeated.length > 0)
flowPosition.positions = repeated.concat(flowPosition.positions);
frame.finish(true);
});
return frame.result();
};
/**
* @param {adapt.vtree.Page} page
* @param {adapt.pm.PageBoxInstance} boxInstance
* @param {HTMLElement} parentContainer
* @param {number} offsetX
* @param {number} offsetY
* @param {Array.<adapt.geom.Shape>} exclusions
* @return {adapt.task.Result.<boolean>} holding true
*/
adapt.ops.StyleInstance.prototype.layoutContainer = function(page, boxInstance,
parentContainer, offsetX, offsetY, exclusions) {
var self = this;
var enabled = boxInstance.getProp(self, "enabled");
if (enabled && enabled !== adapt.css.ident._true) {
return adapt.task.newResult(true);
}
/** @type {!adapt.task.Frame.<boolean>} */ var frame
= adapt.task.newFrame("layoutContainer");
var wrapFlow = boxInstance.getProp(self, "wrap-flow");
var dontExclude = wrapFlow === adapt.css.ident.auto;
var dontApplyExclusions = boxInstance.vertical
? boxInstance.isAutoWidth && boxInstance.isRightDependentOnAutoWidth
: boxInstance.isAutoHeight && boxInstance.isTopDependentOnAutoHeight;
var flowName = boxInstance.getProp(self, "flow-from");
var boxContainer = self.viewport.document.createElement("div");
var position = boxInstance.getProp(self, "position");
adapt.base.setCSSProperty(boxContainer, "position", position ? position.name : "absolute");
parentContainer.insertBefore(boxContainer, parentContainer.firstChild);
var layoutContainer = new adapt.vtree.Container(boxContainer);
layoutContainer.vertical = boxInstance.vertical;
boxInstance.prepareContainer(self, layoutContainer, page);
layoutContainer.originX = offsetX;
layoutContainer.originY = offsetY;
offsetX += layoutContainer.left + layoutContainer.marginLeft + layoutContainer.borderLeft;
offsetY += layoutContainer.top + layoutContainer.marginTop + layoutContainer.borderTop;
var cont;
if (!flowName || !flowName.isIdent()) {
var contentVal = boxInstance.getProp(self, "content");
if (contentVal) {
if (adapt.vtree.nonTrivialContent(contentVal)) {
contentVal.visit(new adapt.vtree.ContentPropertyHandler(boxContainer));
boxInstance.transferContentProps(self, layoutContainer, page);
}
}
boxInstance.finishContainer(self, layoutContainer, page, null, 1, self.clientLayout);
cont = adapt.task.newResult(true);
} else if (!self.pageBreaks[flowName.toString()]) {
/** @type {!adapt.task.Frame.<boolean>} */ var innerFrame = adapt.task.newFrame("layoutContainer.inner");
var flowNameStr = flowName.toString();
// for now only a single column in vertical case
var columnCount = boxInstance.getPropAsNumber(self, "column-count");
var columnGap = boxInstance.getPropAsNumber(self, "column-gap");
// Don't query columnWidth when it's not needed, so that width calculation can be delayed
// for width: auto columns.
var columnWidth = (columnCount > 1 ? boxInstance.getPropAsNumber(self, "column-width") : layoutContainer.width);
var regionIds = boxInstance.getActiveRegions(self);
var computedBlockSize = 0;
var innerShapeVal = boxInstance.getProp(self, "shape-inside");
var innerShape = adapt.cssprop.toShape(innerShapeVal, 0, 0,
layoutContainer.width, layoutContainer.height, self);
var layoutContext = new adapt.vgen.ViewFactory(flowNameStr, self,
self.viewport, self.styler, regionIds, self.xmldoc, self.faces,
self.style.footnoteProps, self, page, self.customRenderer,
self.fallbackMap);
var columnIndex = 0;
var region = null;
frame.loopWithFrame(function(loopFrame) {
while(columnIndex < columnCount) {
var column = columnIndex++;
if (columnCount > 1) {
var columnContainer = self.viewport.document.createElement("div");
adapt.base.setCSSProperty(columnContainer, "position", "absolute");
boxContainer.appendChild(columnContainer);
region = new adapt.layout.Column(columnContainer, layoutContext, self.clientLayout);
region.vertical = layoutContainer.vertical;
region.snapHeight = layoutContainer.snapHeight;
region.snapWidth = layoutContainer.snapWidth;
if (layoutContainer.vertical) {
adapt.base.setCSSProperty(columnContainer, "margin-left", layoutContainer.paddingLeft + "px");
adapt.base.setCSSProperty(columnContainer, "margin-right", layoutContainer.paddingRight + "px");
var columnY = column * (columnWidth + columnGap) + layoutContainer.paddingTop;
region.setHorizontalPosition(0, layoutContainer.width);
region.setVerticalPosition(columnY, columnWidth);
} else {
adapt.base.setCSSProperty(columnContainer, "margin-top", layoutContainer.paddingTop + "px");
adapt.base.setCSSProperty(columnContainer, "margin-bottom", layoutContainer.paddingBottom + "px");
var columnX = column * (columnWidth + columnGap) + layoutContainer.paddingLeft;
region.setVerticalPosition(0, layoutContainer.height);
region.setHorizontalPosition(columnX, columnWidth);
}
region.originX = offsetX + layoutContainer.paddingLeft;
region.originY = offsetY + layoutContainer.paddingTop;
} else {
region = new adapt.layout.Column(boxContainer, layoutContext, self.clientLayout);
region.copyFrom(layoutContainer);
layoutContainer = region;
}
region.exclusions = dontApplyExclusions ? [] : exclusions;
region.innerShape = innerShape;
var lr;
if (region.width >= 0) {
// region.element.style.outline = "1px dotted green";
/** @type {!adapt.task.Frame.<boolean>} */ var innerFrame = adapt.task.newFrame("inner");
self.layoutColumn(region, flowNameStr, regionIds).then(function() {
if (region.pageBreakType) {
if (region.pageBreakType != "column") {
// skip remaining columns
columnIndex = columnCount;
if (region.pageBreakType != "region") {
// skip remaining regions
self.pageBreaks[flowNameStr] = true;
}
}
}
innerFrame.finish(true);
});
lr = innerFrame.result();
} else {
lr = adapt.task.newResult(true);
}
if (lr.isPending()) {
lr.then(function() {
computedBlockSize = Math.max(computedBlockSize, region.computedBlockSize);
loopFrame.continueLoop();
});
return;
} else {
computedBlockSize = Math.max(computedBlockSize, region.computedBlockSize);
}
}
loopFrame.breakLoop();
}).then(function() {
layoutContainer.computedBlockSize = computedBlockSize;
boxInstance.finishContainer(self, layoutContainer, page, region,
columnCount, self.clientLayout);
innerFrame.finish(true);
});
cont = innerFrame.result();
} else {
boxInstance.finishContainer(self, layoutContainer, page, null, 1, self.clientLayout);
cont = adapt.task.newResult(true);
}
cont.then(function() {
if (!boxInstance.isAutoHeight || Math.floor(layoutContainer.computedBlockSize) > 0) {
if (!dontExclude) {
var outerX = layoutContainer.originX + layoutContainer.left;
var outerY = layoutContainer.originY + layoutContainer.top;
var outerWidth = layoutContainer.getInsetLeft() + layoutContainer.width + layoutContainer.getInsetRight();
var outerHeight = layoutContainer.getInsetTop() + layoutContainer.height + layoutContainer.getInsetBottom();
var outerShapeProp = boxInstance.getProp(self, "shape-outside");
var outerShape = adapt.cssprop.toShape(outerShapeProp, outerX, outerY,
outerWidth, outerHeight, self);
if (adapt.base.checkLShapeFloatBug(self.viewport.root)) {
// Simplistic bug workaround: add a copy of the shape translated up.
exclusions.push(outerShape.withOffset(0, -1.25 * self.queryUnitSize("em")));
}
exclusions.push(outerShape);
}
} else if (boxInstance.children.length == 0) {
parentContainer.removeChild(boxContainer);
frame.finish(true);
return;
}
var i = boxInstance.children.length - 1;
frame.loop(function() {
while (i >= 0) {
var child = boxInstance.children[i--];
var r = self.layoutContainer(page, child, /** @type {HTMLElement} */ (boxContainer),
offsetX, offsetY, exclusions);
if (r.isPending()) {
return r;
}
}
return adapt.task.newResult(false);
}).then(function() {
frame.finish(true);
});
});
return frame.result();
};
/**
* @return {void}
*/
adapt.ops.StyleInstance.prototype.processLinger = function() {
var pageNumber = this.currentLayoutPosition.page;
for (var flowName in this.currentLayoutPosition.flowPositions) {
var flowPosition = this.currentLayoutPosition.flowPositions[flowName];
for (var i = flowPosition.positions.length - 1; i >= 0; i--) {
var pos = flowPosition.positions[i];
if (pos.flowChunk.startPage >= 0 &&
pos.flowChunk.startPage + pos.flowChunk.linger - 1 <= pageNumber) {
flowPosition.positions.splice(i, 1);
}
}
}
};
/**
* @return {void}
*/
adapt.ops.StyleInstance.prototype.initLingering = function() {
var pageNumber = this.currentLayoutPosition.page;
for (var flowName in this.currentLayoutPosition.flowPositions) {
var flowPosition = this.currentLayoutPosition.flowPositions[flowName];
for (var i = flowPosition.positions.length - 1; i >= 0; i--) {
var pos = flowPosition.positions[i];
if (pos.flowChunk.startPage < 0 && pos.flowChunk.startOffset < this.lookupOffset) {
pos.flowChunk.startPage = pageNumber;
}
}
}
};
/**
* @param {adapt.vtree.LayoutPosition} cp
* @return {boolean}
*/
adapt.ops.StyleInstance.prototype.noMorePrimaryFlows = function(cp) {
for (var flowName in this.primaryFlows) {
var flowPosition = cp.flowPositions[flowName];
if (flowPosition && flowPosition.positions.length > 0) {
return false;
}
}
return true;
};
/**
* @param {adapt.vtree.Page} page
* @param {adapt.vtree.LayoutPosition|undefined} cp
* @return {adapt.task.Result.<adapt.vtree.LayoutPosition>}
*/
adapt.ops.StyleInstance.prototype.layoutNextPage = function(page, cp) {
var self = this;
self.pageBreaks = {};
if (cp) {
self.currentLayoutPosition = cp.clone();
self.styler.replayFlowElementsFromOffset(cp.highestSeenOffset);
} else {
self.currentLayoutPosition = new adapt.vtree.LayoutPosition();
self.styler.replayFlowElementsFromOffset(-1);
}
if (this.lang) {
page.container.setAttribute("lang", this.lang);
}
cp = self.currentLayoutPosition;
cp.page++;
self.clearScope(self.style.pageScope);
var pageMaster = self.selectPageMaster();
if (!pageMaster) {
// end of primary content
return adapt.task.newResult(/** @type {adapt.vtree.LayoutPosition}*/ (null));
}
/** @type {!adapt.task.Frame.<adapt.vtree.LayoutPosition>} */ var frame
= adapt.task.newFrame("layoutNextPage");
self.layoutContainer(page, pageMaster, page.container, 0, 0, []).then(function() {
var isLeftPage = new adapt.expr.Named(pageMaster.pageBox.scope, "left-page");
page.side = isLeftPage.evaluate(self) ? vivliostyle.constants.PageSide.LEFT : vivliostyle.constants.PageSide.RIGHT;
self.processLinger();
self.currentLayoutPosition = null;
cp.highestSeenOffset = self.styler.getReachedOffset();
var triggers = self.style.store.getTriggersForDoc(self.xmldoc);
page.finish(triggers);
if (self.noMorePrimaryFlows(cp)) {
cp = null;
}
frame.finish(cp);
});
return frame.result();
};
/**
* @param {adapt.ops.StyleParserHandler} masterHandler
* @param {adapt.expr.Val} condition
* @param {adapt.ops.BaseParserHandler} parent
* @param {?string} regionId
* @constructor
* @extends {adapt.csscasc.CascadeParserHandler}
*/
adapt.ops.BaseParserHandler = function(masterHandler, condition, parent, regionId) {
adapt.csscasc.CascadeParserHandler.call(this, masterHandler.rootScope, masterHandler,
condition, parent, regionId, masterHandler.validatorSet, !parent);
/** @type {adapt.ops.StyleParserHandler} */ this.masterHandler = masterHandler;
/** @type {boolean} */ this.insideRegion = false;
};
goog.inherits(adapt.ops.BaseParserHandler, adapt.csscasc.CascadeParserHandler);
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startPageTemplateRule = function() {
// override, so we don't register an error
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startPageMasterRule = function(name, pseudoName, classes) {
var pageMaster = new adapt.pm.PageMaster(this.masterHandler.pageScope, name, pseudoName,
classes, this.masterHandler.rootBox, this.condition, this.owner.getBaseSpecificity());
this.masterHandler.pushHandler(new adapt.pm.PageMasterParserHandler(pageMaster.scope,
this.masterHandler, pageMaster, this.validatorSet));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startWhenRule = function(conditionVal) {
var condition = conditionVal.expr;
if (this.condition != null)
condition = adapt.expr.and(this.scope, this.condition, condition);
this.masterHandler.pushHandler(new adapt.ops.BaseParserHandler(
this.masterHandler, condition, this, this.regionId));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startDefineRule = function() {
this.masterHandler.pushHandler(new adapt.csscasc.DefineParserHandler(
this.scope, this.owner));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startFontFaceRule = function() {
var properties = /** @type {adapt.csscasc.ElementStyle} */ ({});
this.masterHandler.fontFaces.push({properties: properties, condition: this.condition});
this.masterHandler.pushHandler(new adapt.csscasc.PropSetParserHandler(
this.scope, this.owner, null, properties, this.masterHandler.validatorSet));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startFlowRule = function(flowName) {
var style = this.masterHandler.flowProps[flowName];
if (!style) {
style = /** @type {adapt.csscasc.ElementStyle} */ ({});
this.masterHandler.flowProps[flowName] = style;
}
this.masterHandler.pushHandler(new adapt.csscasc.PropSetParserHandler(
this.scope, this.owner, null, style,
this.masterHandler.validatorSet));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startViewportRule = function() {
var viewportProps = /** @type {adapt.csscasc.ElementStyle} */ ({});
this.masterHandler.viewportProps.push(viewportProps);
this.masterHandler.pushHandler(new adapt.csscasc.PropSetParserHandler(
this.scope, this.owner, this.condition, viewportProps,
this.masterHandler.validatorSet));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startFootnoteRule = function(pseudoelement) {
var style = this.masterHandler.footnoteProps;
if (pseudoelement) {
var pseudos = adapt.csscasc.getMutableStyleMap(style, "_pseudos");
style = pseudos[pseudoelement];
if (!style) {
style = /** @type {adapt.csscasc.ElementStyle} */ ({});
pseudos[pseudoelement] = style;
}
}
this.masterHandler.pushHandler(new adapt.csscasc.PropSetParserHandler(
this.scope, this.owner, null, style,
this.masterHandler.validatorSet));
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startRegionRule = function() {
this.insideRegion = true;
this.startSelectorRule();
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startPageRule = function() {
var pageHandler = new vivliostyle.page.PageParserHandler(this.masterHandler.pageScope,
this.masterHandler, this, this.validatorSet);
this.masterHandler.pushHandler(pageHandler);
pageHandler.startPageRule();
};
/**
* @override
*/
adapt.ops.BaseParserHandler.prototype.startRuleBody = function() {
adapt.csscasc.CascadeParserHandler.prototype.startRuleBody.call(this);
if (this.insideRegion) {
this.insideRegion = false;
var regionId = "R" + this.masterHandler.regionCount++;
this.special("region-id", adapt.css.getName(regionId));
this.endRule();
var regionHandler = new adapt.ops.BaseParserHandler(this.masterHandler, this.condition,
this, regionId);
this.masterHandler.pushHandler(regionHandler);
regionHandler.startRuleBody();
}
};
/**
* @param {Element} meta
* @return {string}
*/
adapt.ops.processViewportMeta = function(meta) {
var content = meta.getAttribute("content");
if (!content) {
return "";
}
var vals = {};
var r;
while ((r = content.match(/^,?\s*([-A-Za-z_.][-A-Za-z_0-9.]*)\s*=\s*([-+A-Za-z_0-9.]*)\s*/)) != null) {
content = content.substr(r[0].length);
vals[r[1]] = r[2];
}
var width = vals["width"] - 0;
var height = vals["height"] - 0;
if (width && height) {
return "@-epubx-viewport{width:" + width + "px;height:" + height + "px;}";
}
return "";
};
/**
* @param {adapt.cssvalid.ValidatorSet} validatorSet
* @constructor
* @extends {adapt.cssparse.DispatchParserHandler}
*/
adapt.ops.StyleParserHandler = function(validatorSet) {
adapt.cssparse.DispatchParserHandler.call(this);
/** @const */ this.validatorSet = validatorSet;
/** @const */ this.rootScope = new adapt.expr.LexicalScope(null);
/** @const */ this.pageScope = new adapt.expr.LexicalScope(this.rootScope);
/** @const */ this.rootBox = new adapt.pm.RootPageBox(this.rootScope);
/** @const */ this.cascadeParserHandler =
new adapt.ops.BaseParserHandler(this, null, null, null);
/** @type {number} */ this.regionCount = 0;
/** @const */ this.fontFaces = /** @type {Array.<adapt.ops.FontFace>} */ ([]);
/** @const */ this.footnoteProps = /** @type {adapt.csscasc.ElementStyle} */ ({});
/** @const */ this.flowProps = /** @type {Object.<string,adapt.csscasc.ElementStyle>} */ ({});
/** @const */ this.viewportProps = /** @type {Array.<adapt.csscasc.ElementStyle>} */ ([]);
this.slave = this.cascadeParserHandler;
};
goog.inherits(adapt.ops.StyleParserHandler, adapt.cssparse.DispatchParserHandler);
/**
* @override
*/
adapt.ops.StyleParserHandler.prototype.error = function(mnemonics, token) {
adapt.base.log("CSS parser: " + mnemonics);
};
/**
* @typedef {{
* url: string,
* text: ?string,
* flavor: adapt.cssparse.StylesheetFlavor,
* classes: ?string,
* media: ?string
* }}
*/
adapt.ops.StyleSource;
/**
* @param {adapt.net.Response} response
* @param {adapt.xmldoc.XMLDocStore} store
* @return {!adapt.task.Result.<!adapt.xmldoc.XMLDocHolder>}
*/
adapt.ops.parseOPSResource = function(response, store) {
return (/** @type {adapt.ops.OPSDocStore} */ (store)).parseOPSResource(response);
};
/**
* @param {?function(string):?function(Blob):adapt.task.Result.<Blob>} fontDeobfuscator
* @constructor
* @extends {adapt.xmldoc.XMLDocStore}
*/
adapt.ops.OPSDocStore = function(fontDeobfuscator) {
adapt.net.ResourceStore.call(this, adapt.ops.parseOPSResource, false);
/** @type {?function(string):?function(Blob):adapt.task.Result.<Blob>} */ this.fontDeobfuscator = fontDeobfuscator;
/** @type {Object.<string,adapt.ops.Style>} */ this.styleByKey = {};
/** @type {Object.<string,adapt.taskutil.Fetcher.<adapt.ops.Style>>} */ this.styleFetcherByKey = {};
/** @type {Object.<string,adapt.ops.Style>} */ this.styleByDocURL = {};
/** @type {Object.<string,Array.<adapt.vtree.Trigger>>} */ this.triggersByDocURL = {};
/** @type {adapt.cssvalid.ValidatorSet} */ this.validatorSet = null;
/** @private @const @type {Array.<adapt.ops.StyleSource>} */ this.userStyleSheets = [];
};
goog.inherits(adapt.ops.OPSDocStore, adapt.net.ResourceStore);
/**
* @return {!adapt.task.Result.<boolean>}
*/
adapt.ops.OPSDocStore.prototype.init = function() {
var userAgentXML = adapt.base.resolveURL("user-agent.xml", adapt.base.resourceBaseURL);
var frame = adapt.task.newFrame("OPSDocStore.init");
var self = this;
adapt.cssvalid.loadValidatorSet().then(function(validatorSet) {
self.validatorSet = validatorSet;
adapt.csscasc.loadUABase().then(function() {
self.load(userAgentXML).then(function() {
frame.finish(true);
});
});
});
return frame.result();
};
/**
* @param {adapt.xmldoc.XMLDocHolder} xmldoc
* @return {adapt.ops.Style}
*/
adapt.ops.OPSDocStore.prototype.getStyleForDoc = function(xmldoc) {
return this.styleByDocURL[xmldoc.url];
};
/**
* @param {adapt.xmldoc.XMLDocHolder} xmldoc
* @return {Array.<adapt.vtree.Trigger>}
*/
adapt.ops.OPSDocStore.prototype.getTriggersForDoc = function(xmldoc) {
return this.triggersByDocURL[xmldoc.url];
};
/**
* @param {{url: ?string, text: ?string}} stylesheet
*/
adapt.ops.OPSDocStore.prototype.addUserStyleSheet = function(stylesheet) {
this.userStyleSheets.push({url: stylesheet.url, text: stylesheet.text,
flavor: adapt.cssparse.StylesheetFlavor.USER, classes: null, media: null});
};
/**
* @param {adapt.net.Response} response
* @return {!adapt.task.Result.<!adapt.xmldoc.XMLDocHolder>}
*/
adapt.ops.OPSDocStore.prototype.parseOPSResource = function(response) {
/** @type {!adapt.task.Frame.<!adapt.xmldoc.XMLDocHolder>} */ var frame
= adapt.task.newFrame("OPSDocStore.load");
var self = this;
var url = response.url;
adapt.xmldoc.parseXMLResource(response, self).then(function(xmldoc) {
var triggers = [];
var triggerList = xmldoc.document.getElementsByTagNameNS(adapt.base.NS.epub, "trigger");
for (var i = 0; i < triggerList.length; i++) {
var triggerElem = triggerList[i];
var observer = triggerElem.getAttributeNS(adapt.base.NS.EV, "observer");
var event = triggerElem.getAttributeNS(adapt.base.NS.EV, "event");
var action = triggerElem.getAttribute("action");
var ref = triggerElem.getAttribute("ref");
if (observer && event && action && ref) {
triggers.push({observer:observer, event:event, action:action, ref:ref});
}
}
self.triggersByDocURL[url] = triggers;
var sources = /** @type {Array.<adapt.ops.StyleSource>} */ ([]);
var userAgentURL = adapt.base.resolveURL("user-agent-page.css", adapt.base.resourceBaseURL);
sources.push({url: userAgentURL, text:null,
flavor:adapt.cssparse.StylesheetFlavor.USER_AGENT, classes: null, media: null});
for (var i = 0; i < self.userStyleSheets.length; i++) {
sources.push(self.userStyleSheets[i]);
}
var head = xmldoc.head;
if (head) {
for (var c = head.firstChild ; c ; c = c.nextSibling) {
if (c.nodeType != 1)
continue;
var child = /** @type {Element} */ (c);
var ns = child.namespaceURI;
var localName = child.localName;
if (ns == adapt.base.NS.XHTML) {
if (localName == "style") {
sources.push({url:url, text:child.textContent,
flavor:adapt.cssparse.StylesheetFlavor.AUTHOR, classes: null, media: null});
} else if (localName == "link") {
var rel = child.getAttribute("rel");
var classes = child.getAttribute("class");
var media = child.getAttribute("media");
if (rel == "stylesheet" || (rel == "alternate stylesheet" && classes)) {
var src = child.getAttribute("href");
src = adapt.base.resolveURL(src, url);
sources.push({url:src, text:null, classes: classes, media: media,
flavor:adapt.cssparse.StylesheetFlavor.AUTHOR});
}
} else if (localName == "meta" && child.getAttribute("name") == "viewport") {
sources.push({url:url, text: adapt.ops.processViewportMeta(child),
flavor:adapt.cssparse.StylesheetFlavor.AUTHOR, condition: null, media: null});
}
} else if (ns == adapt.base.NS.FB2) {
if (localName == "stylesheet" && child.getAttribute("type") == "text/css") {
sources.push({url:url, text:child.textContent,
flavor:adapt.cssparse.StylesheetFlavor.AUTHOR, classes: null, media: null});
}
} else if (ns == adapt.base.NS.SSE && localName === "property") {
// look for stylesheet specification like:
// <property><name>stylesheet</name><value>style.css</value></property>
var name = child.getElementsByTagName("name")[0];
if (name && name.textContent === "stylesheet") {
var value = child.getElementsByTagName("value")[0];
if (value) {
var src = adapt.base.resolveURL(value.textContent, url);
sources.push({
url: src, text: null, classes: null, media: null,
flavor: adapt.cssparse.StylesheetFlavor.AUTHOR
});
}
}
}
}
}
var key = "";
for (var i = 0; i < sources.length; i++) {
key += sources[i].url;
key += "^";
if (sources[i].text) {
key += sources[i].text;
}
key += "^";
}
var style = self.styleByKey[key];
if (style) {
self.styleByDocURL[url] = style;
frame.finish(xmldoc);
return;
}
var fetcher = self.styleFetcherByKey[key];
if (!fetcher) {
fetcher = new adapt.taskutil.Fetcher(function() {
/** @type {!adapt.task.Frame.<adapt.ops.Style>} */ var innerFrame
= adapt.task.newFrame("fetchStylesheet");
var index = 0;
var sph = new adapt.ops.StyleParserHandler(self.validatorSet);
innerFrame.loop(function() {
if (index < sources.length) {
var source = sources[index++];
sph.startStylesheet(source.flavor);
if (source.text) {
return adapt.cssparse.parseStylesheetFromText(source.text, sph, source.url, source.classes, source.media);
} else {
return adapt.cssparse.parseStylesheetFromURL(source.url, sph, source.classes, source.media);
}
}
return adapt.task.newResult(false);
}).then(function() {
var cascade = sph.cascadeParserHandler.finish();
style = new adapt.ops.Style(self, sph.rootScope, sph.pageScope, cascade, sph.rootBox,
sph.fontFaces, sph.footnoteProps, sph.flowProps, sph.viewportProps);
self.styleByKey[key] = style;
delete self.styleFetcherByKey[key];
innerFrame.finish(style);
});
return innerFrame.result();
}, "FetchStylesheet " + url);
self.styleFetcherByKey[key] = fetcher;
fetcher.start();
}
fetcher.get().then(function(style) {
self.styleByDocURL[url] = style;
frame.finish(xmldoc);
});
});
return frame.result();
};
| {
"content_hash": "c4c70195a99ac4ea00df5c70a592348f",
"timestamp": "",
"source": "github",
"line_count": 1114,
"max_line_length": 144,
"avg_line_length": 40.9524236983842,
"alnum_prop": 0.6464128361938581,
"repo_name": "zopyx/vivliostyle.js",
"id": "9e590b1be0c179932728f3e3448d4f6dcc9dbc27",
"size": "45783",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/adapt/ops.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18927"
},
{
"name": "HTML",
"bytes": "2965"
},
{
"name": "JavaScript",
"bytes": "927560"
},
{
"name": "Ruby",
"bytes": "248"
},
{
"name": "Shell",
"bytes": "1674"
}
],
"symlink_target": ""
} |
:local(.addFriendInput) {
border-radius: 0;
border-color: #ABAAAA;
border-left: 0;
border-right: 0;
}
| {
"content_hash": "dc3bedcd4653ec14899c4989022a43a4",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 25,
"avg_line_length": 19.333333333333332,
"alnum_prop": 0.6206896551724138,
"repo_name": "davloal/friendList",
"id": "c5e8c07a70dd74c51c704f2c3eb03e75dd3ec616",
"size": "116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/components/AddFriendInput.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1124"
},
{
"name": "HTML",
"bytes": "466"
},
{
"name": "JavaScript",
"bytes": "8944"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.