text stringlengths 2 1.04M | meta dict |
|---|---|
/**
* "First, solve the problem. Then, write the code. -John Johnson"
* "Or use Vangav M"
* www.vangav.com
* */
/**
* MIT License
*
* Copyright (c) 2016 Vangav
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
* */
/**
* Community
* Facebook Group: Vangav Open Source - Backend
* fb.com/groups/575834775932682/
* Facebook Page: Vangav
* fb.com/vangav.f
*
* Third party communities for Vangav Backend
* - play framework
* - cassandra
* - datastax
*
* Tag your question online (e.g.: stack overflow, etc ...) with
* #vangav_backend
* to easier find questions/answers online
* */
package com.vangav.backend.vangav_m.json_client;
import java.io.File;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import com.vangav.backend.exceptions.VangavException;
import com.vangav.backend.files.FileLoaderInl;
import com.vangav.backend.files.FileWriterInl;
import com.vangav.backend.networks.rest_client.RestSyncInl;
import com.vangav.backend.system.CommandLineInl;
import com.vangav.backend.vangav_m.json_client.json.VangavMSolutionJson;
/**
* @author mustapha
* fb.com/mustapha.abdallah
*/
/**
* IMPORTANT: this client is tuned to work with generated Vangav Backends
* and Workers
* for a more generic client check out www.vangav.com
* This client is compiled into a jar under
* tools_bin/assets/vangav_m_json_client.jar
* and is automatically added to newly generated Vangav Backends and Workers
* VangavMJsonClientMain handles generating Vangav M solutions through the
* following steps
* 1- load available solutions
* 2- verify loaded solutions
* 3- clear old generated solutions
* 4- generate solutions
* 5- extract generated solutions
* 6- clear old class path links in .classpath
* 7- link solutions to .classpath
* 8- clean
* */
public class VangavMJsonClientMain {
private static final String kVangavMApiLink =
"http://mapi.vangav.com/Solution";
private static final char kCommentLinePrefix =
'#';
private static final String kSolutionsDir =
"solutions";
private static final String kGeneratedSolutions =
"solutions/generated_solutions";
private static final String kGeneratedSolutionsTmp =
"solutions/tmp_generated_solutions";
private static final String kMlangExt =
".mlang";
private static final String kSolutionPrefix =
"vangav_m_";
private static final String kZipExt =
".zip";
private static final String kClassPathFilePath =
"../.classpath";
private static final String kExtractionScriptFormat =
"#!/bin/bash\n\n"
+ "cd "
+ kGeneratedSolutions
+ "\n\n"
+ "unzip %s\n";
private static final String kExtractionScriptPath =
kGeneratedSolutions
+ "/solution_extraction.sh";
private static final String kJarExt =
".jar";
private static final String kLibsPath =
"../lib";
private static final String kClassPathEntryFormat =
" <classpathentry kind=\"lib\" path=\"lib/vangav_m_%s.jar\"/>\n";
/**
* generateVangavMSolutions
* @throws Exception
*/
private static void generateVangavMSolutions () throws Exception {
// 1/2- load and verify available solutions
// file name --> file content
Map<String, String> solutionsFiles =
FileLoaderInl.loadTextFilesWithoutComments(
kCommentLinePrefix,
kSolutionsDir,
kMlangExt);
// solution name --> solution json
Map<String, VangavMSolutionJson> solutions =
new HashMap<String, VangavMSolutionJson>();
// solution name --> solution file name
Map<String, String> solutionsFileNames =
new HashMap<String, String>();
VangavMSolutionJson currSolution;
System.out.println(
"Loading and verifying solutions ...\n");
for (String fileName : solutionsFiles.keySet() ) {
// parse solution into JSON Object
currSolution =
(VangavMSolutionJson)new VangavMSolutionJson().fromJsonString(
solutionsFiles.get(fileName) );
try {
// verify solution's json
currSolution.verify();
} catch (Exception e) {
try {
VangavException vangavException = (VangavException)e;
System.err.println(
"Solution file ["
+ fileName
+ "] is invalid because of:\n"
+ vangavException.toString() );
} catch (Exception e2) {}
System.err.println(
"\nCancelling Vangav M solution's generation, fix solution file ["
+ fileName
+ "] and try again ....\n");
System.exit(0);
}
// duplicate solution name?
if (solutions.containsKey(currSolution.name) == true) {
System.err.println(
"Solution name ["
+ currSolution.name
+ "] is duplicate in solutions files ["
+ solutionsFileNames.get(currSolution.name)
+ "] and ["
+ fileName
+ "]. Solutions names must be unique per solution, fix that and "
+ "try again ....\n");
System.exit(0);
}
// solution verified, store it in memory
solutions.put(currSolution.name, currSolution);
solutionsFileNames.put(currSolution.name, fileName);
}
// 3- clear old generated solutions
if (FileLoaderInl.fileExists(kGeneratedSolutions) == false) {
FileWriterInl.mkdirs(
kGeneratedSolutions,
false);
}
if (FileLoaderInl.fileExists(kGeneratedSolutionsTmp) == false) {
FileWriterInl.mkdirs(
kGeneratedSolutionsTmp,
false);
}
CommandLineInl.executeCommand(
"rm -r -f "
+ kGeneratedSolutionsTmp);
CommandLineInl.executeCommand(
"mv "
+ kGeneratedSolutions
+ " "
+ kGeneratedSolutionsTmp);
// 4- generate solutions
// make generated solutions directory
FileWriterInl.mkdirs(
kGeneratedSolutions,
false);
URLConnection currURLConnection;
String currSolutionPath;
for (String solutionName : solutions.keySet() ) {
System.out.println(
"Generating solution ["
+ solutionName
+ "] @["
+ solutionsFileNames.get(solutionName)
+ "] ...");
currURLConnection =
RestSyncInl.restCall(
kVangavMApiLink,
solutions.get(solutionName) );
if (RestSyncInl.isResponseStatusSuccess(currURLConnection) == false) {
resetSolutionsGeneration();
System.err.println(
"Failed to generate solution [name: "
+ solutionName
+ "] from solution file ["
+ solutionsFileNames.get(solutionName)
+ "], API returned response status code ["
+ RestSyncInl.getResponseStatus(currURLConnection)
+ "]. Cancelling Vangav M solution's generation, fix problematic "
+ "solution and try again ....\n");
System.exit(0);
}
currSolutionPath =
kGeneratedSolutions
+ "/"
+ kSolutionPrefix
+ solutionName
+ kZipExt;
try {
RestSyncInl.writeResponseFile(
currSolutionPath,
currURLConnection,
false);
} catch (Exception e) {
resetSolutionsGeneration();
System.err.println(
"Failed to generate solution [name: "
+ solutionName
+ "] from solution file ["
+ solutionsFileNames.get(solutionName)
+ "], failed to write solution file ["
+ currSolutionPath
+ "]. Cancelling Vangav M solution's generation, fix problematic "
+ "solution and try again ....\n\n"
+ "Exception stack trace:\n"
+ VangavException.getExceptionStackTrace(e) );
System.exit(0);
}
}
// 5- extract generated solutions
String currSolutionZipFileName;
String currSolutionZipFilePath;
String currSolutionJarPath;
String currSolutionJarDestPath;
// delete current vangav m solutions' lib jars
File[] libFiles =
FileLoaderInl.loadFiles(
kLibsPath,
kJarExt);
if (libFiles != null) {
for (File libFile : libFiles) {
if (libFile.getName().contains(kSolutionPrefix) == true) {
libFile.delete();
}
}
}
// extract solutions and copy jars to lib directory
for (String solutionName : solutions.keySet() ) {
currSolutionZipFileName =
kSolutionPrefix
+ solutionName
+ kZipExt;
// write extraction script
FileWriterInl.writeTextFile(
String.format(
kExtractionScriptFormat,
currSolutionZipFileName),
kExtractionScriptPath,
false);
// make extraction script executable
CommandLineInl.executeCommand(
"chmod +x "
+ kExtractionScriptPath);
// execute extraction script
CommandLineInl.executeCommand(kExtractionScriptPath);
// delete script file
CommandLineInl.executeCommand(
"rm "
+ kExtractionScriptPath);
currSolutionZipFilePath =
kGeneratedSolutions
+ "/"
+ kSolutionPrefix
+ solutionName
+ kZipExt;
// delete solution's zip file
CommandLineInl.executeCommand(
"rm "
+ currSolutionZipFilePath);
// copy solution's jar to project's lib directory
currSolutionJarPath =
kGeneratedSolutions
+ "/"
+ kSolutionPrefix
+ solutionName
+ "/"
+ kSolutionPrefix
+ solutionName
+ kJarExt;
currSolutionJarDestPath =
kLibsPath
+ "/"
+ kSolutionPrefix
+ solutionName
+ kJarExt;
CommandLineInl.executeCommand(
"cp "
+ currSolutionJarPath
+ " "
+ currSolutionJarDestPath);
}
// 6- clear old class path links in .classpath
ArrayList<String> classPathLines =
FileLoaderInl.loadTextFileLines(kClassPathFilePath);
ArrayList<String> classPathLinesNew = new ArrayList<String>();
for (int i = 0; i < classPathLines.size(); i ++) {
if (classPathLines.get(i).contains(
"path=\"lib/vangav_m_") == false) {
classPathLinesNew.add(classPathLines.get(i) );
}
}
// 7- link solutions to .classpath
System.out.println(
"\nLinking solutions ...");
ArrayList<String> classPathNewLinks = new ArrayList<String>();
for (String solutionName : solutions.keySet() ) {
classPathNewLinks.add(
String.format(
kClassPathEntryFormat,
solutionName) );
}
for (int i = 0; i < classPathLinesNew.size(); i ++) {
if (classPathLinesNew.get(i).contains(
"kind=\"src\" output=\".target\" path=\"test\"/>") == true) {
classPathLinesNew.addAll(
i + 1,
classPathNewLinks);
break;
}
}
FileWriterInl.writeTextFile(
classPathLinesNew,
kClassPathFilePath,
false,
false);
// 8- clean
CommandLineInl.executeCommand(
"rm -r -f "
+ kGeneratedSolutionsTmp);
System.out.println(
"\nFinished, have fun!\n");
}
/**
* resetSolutionsGeneration
* used when solutions generation fails midway
* @throws Exception
*/
private static void resetSolutionsGeneration () throws Exception {
CommandLineInl.executeCommand(
"rm -r -f "
+ kGeneratedSolutions);
CommandLineInl.executeCommand(
"mv "
+ kGeneratedSolutionsTmp
+ " "
+ kGeneratedSolutions);
}
/**
* main
* @param args - no arguments needed
* @throws Exception
*/
public static void main (String[] args) throws Exception {
System.out.println(
"\n\no o o o \n"
+ "8 8 8b d8 \n"
+ "8 8 .oPYo. odYo. .oPYo. .oPYo. o o 8`b d'8 \n"
+ "`b d' .oooo8 8' `8 8 8 .oooo8 Y. .P 8 `o' 8 \n"
+ " `b d' 8 8 8 8 8 8 8 8 `b..d' 8 8 \n"
+ " `8' `YooP8 8 8 `YooP8 `YooP8 `YP' 8 8 \n"
+ ":::..::::.....:..::..:....8 :.....:::...::::..::::..\n"
+ ":::::::::::::::::::::::ooP'.::::::::::::::::::::::::\n"
+ ":::::::::::::::::::::::...::::::::::::::::::::::::::\n\n");
System.out.println(
"Vangav M Generator\n"
+ "www.vangav.com\n\n"
+ "Vangav M\n"
+ "Code. Mighty.\n\n");
generateVangavMSolutions();
}
}
| {
"content_hash": "0aa57d73c64a08d2fbcfcf793bda9dc2",
"timestamp": "",
"source": "github",
"line_count": 491,
"max_line_length": 79,
"avg_line_length": 28.513238289205702,
"alnum_prop": 0.5967142857142858,
"repo_name": "vangav/vos_backend",
"id": "f9250d0862cbf0889cf62833d9a61305ec9db371",
"size": "14000",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/vangav/backend/vangav_m/json_client/VangavMJsonClientMain.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "31912"
},
{
"name": "CSS",
"bytes": "68697"
},
{
"name": "CoffeeScript",
"bytes": "28149"
},
{
"name": "HTML",
"bytes": "140969"
},
{
"name": "Java",
"bytes": "2403319"
},
{
"name": "JavaScript",
"bytes": "1130298"
},
{
"name": "PowerShell",
"bytes": "37758"
},
{
"name": "Python",
"bytes": "312883"
},
{
"name": "Scala",
"bytes": "1906531"
},
{
"name": "Shell",
"bytes": "58628"
},
{
"name": "TSQL",
"bytes": "208586"
},
{
"name": "Thrift",
"bytes": "40240"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
namespace InstagramDownloader
{
public partial class Form1 : Form
{
public static string _URL = "https://www.instagram.com/explore/tags/cornmarket/?hl=en";
//public static string _URL = "https://www.instagram.com/justinbieber/?hl=en";
public Form1()
{
InitializeComponent();
}
private void btnRun_Click(object sender, EventArgs e)
{
webBrowser1.Select();
webBrowser1.Navigate(_URL);
}
private void webBrowser1_DocumentCompleted(object sender, WebBrowserDocumentCompletedEventArgs e)
{
label1.Text = "Complete";
string htmlElementString = "";
HtmlElementCollection divs = webBrowser1.Document.GetElementsByTagName("div");
foreach (HtmlElement div in divs)
{
//System.Diagnostics.Debug.WriteLine(div.GetAttribute("name").ToString());
if (div.InnerHtml != null)
{
if (div.InnerHtml.Contains("Load more"))
{
//System.Diagnostics.Debug.WriteLine(div.InnerHtml);
htmlElementString = div.InnerHtml;
}
}
}
// Parse the string to extract the Load more url
//string regexPattern = "(href=\")(.*?)max_id(.*?)+\">Load more</a>";
string regexPattern = "(?<=href\\s*=\\s*\")(?:(?<1>[^\"']*)max_id=\\d+)";
Regex rx = new Regex(regexPattern);
Match match = rx.Match(htmlElementString);
if (match.Success)
{
_URL = "http://www.instagram.com";
System.Diagnostics.Debug.WriteLine(match.Groups[0].Value);
_URL += match.Groups[0].Value;
//webBrowser1.Navigate(_URL);
}
//else
//{
// System.Diagnostics.Debug.WriteLine("No match :(");
//}
if (_URL.Contains("max_id"))
{
// Loop through and count the number of images
int numImages = webBrowser1.Document.Images.Count;
System.Diagnostics.Debug.WriteLine("# Images: " + numImages.ToString());
System.Diagnostics.Debug.WriteLine("URL: " + _URL);
webBrowser1.Document.Focus();
SendKeys.Send("{End}");
//System.Threading.Thread.Sleep(1000);
//System.Threading.Thread.Sleep(100);
}
}
private void Form1_Load(object sender, EventArgs e)
{
webBrowser1.Focus();
}
}
}
| {
"content_hash": "b9d94f18a336e3a70edd9a8160c69dbc",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 105,
"avg_line_length": 31.73404255319149,
"alnum_prop": 0.5353670801206839,
"repo_name": "techdude101/C-Sharp",
"id": "347b699a61c631f2c958861e510fe6861bec5765",
"size": "2985",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "InstagramDownloader/InstagramDownloader/Form1.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "44232"
},
{
"name": "C",
"bytes": "147"
},
{
"name": "C#",
"bytes": "515539"
},
{
"name": "C++",
"bytes": "73615"
},
{
"name": "CSS",
"bytes": "26771"
},
{
"name": "HTML",
"bytes": "31984"
},
{
"name": "JavaScript",
"bytes": "264"
},
{
"name": "PowerShell",
"bytes": "3137"
},
{
"name": "Visual Basic",
"bytes": "88527"
}
],
"symlink_target": ""
} |
import re
import traceback
from enum import IntEnum, auto
from contextlib import suppress
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from typing import Pattern, Union, Optional, Tuple
from dateutil.parser import parse as parse_date
from dateutil.tz import UTC
import pandas as pd
from google.cloud import firestore
from tqdm.auto import tqdm
from astropy import units as u
from astropy.coordinates import SkyCoord
from astropy.time import Time
from astropy.io.fits.header import Header
from astropy.utils.data import download_file
from loguru import logger
from panoptes.utils.utils import listify
from panoptes.utils.time import current_time, flatten_time
from panoptes.utils.images import fits as fits_utils
class SequenceStatus(IntEnum):
RECEIVING = 0
RECEIVED = 10
class ImageStatus(IntEnum):
ERROR = -10
MASKED = -5
UNKNOWN = -1
RECEIVING = 0
RECEIVED = 5
CALIBRATING = 10
CALIBRATED = 15
SOLVING = 20
SOLVED = 25
MATCHING = 30
MATCHED = 35
EXTRACTING = 40
EXTRACTED = 45
class ObservationStatus(IntEnum):
ERROR = -10
NOT_ENOUGH_FRAMES = -2
UNKNOWN = -1
CREATED = 0
CALIBRATED = 10
MATCHED = 20
PROCESSING = 30
PROCESSED = 35
OBS_BASE_URL = 'https://storage.googleapis.com/panoptes-observations'
OBSERVATIONS_URL = 'https://storage.googleapis.com/panoptes-exp.appspot.com/observations.csv'
PATH_MATCHER: Pattern[str] = re.compile(r"""^
(?P<pre_info>.*)? # Anything before unit_id
(?P<unit_id>PAN\d{3}) # unit_id - PAN + 3 digits
/?(?P<field_name>.*)? # Legacy field name - any
/(?P<camera_id>[a-gA-G0-9]{6}) # camera_id - 6 digits
/(?P<sequence_time>[0-9]{8}T[0-9]{6}) # Observation start time
/(?P<image_time>[0-9]{8}T[0-9]{6}) # Image start time
(?P<post_info>.*)? # Anything after (file ext)
$""",
re.VERBOSE)
@dataclass
class ObservationPathInfo:
"""Parse the location path for an image.
This is a small dataclass that offers some convenience methods for dealing
with a path based on the image id.
This would usually be instantiated via `path`:
..doctest::
>>> from panoptes.pipeline.utils.metadata import ObservationPathInfo
>>> bucket_path = 'gs://panoptes-images-background/PAN012/Hd189733/358d0f/20180824T035917/20180824T040118.fits'
>>> path_info = ObservationPathInfo(path=bucket_path)
>>> path_info.id
'PAN012_358d0f_20180824T035917_20180824T040118'
>>> path_info.unit_id
'PAN012'
>>> path_info.sequence_id
'PAN012_358d0f_20180824T035917'
>>> path_info.image_id
'PAN012_358d0f_20180824T040118'
>>> path_info.as_path(base='/tmp', ext='.jpg')
'/tmp/PAN012/358d0f/20180824T035917/20180824T040118.jpg'
>>> ObservationPathInfo(path='foobar')
Traceback (most recent call last):
...
ValueError: Invalid path received: self.path='foobar'
"""
unit_id: str = None
camera_id: str = None
field_name: str = None
sequence_time: Union[str, datetime, Time] = None
image_time: Union[str, datetime, Time] = None
path: Union[str, Path] = None
def __post_init__(self):
"""Parse the path when provided upon initialization."""
if self.path is not None:
path_match = PATH_MATCHER.match(self.path)
if path_match is None:
raise ValueError(f'Invalid path received: {self.path=}')
self.unit_id = path_match.group('unit_id')
self.camera_id = path_match.group('camera_id')
self.field_name = path_match.group('field_name')
self.sequence_time = Time(parse_date(path_match.group('sequence_time')))
self.image_time = Time(parse_date(path_match.group('image_time')))
@property
def id(self):
"""Full path info joined with underscores"""
return self.get_full_id()
@property
def sequence_id(self) -> str:
"""The sequence id."""
return f'{self.unit_id}_{self.camera_id}_{flatten_time(self.sequence_time)}'
@property
def image_id(self) -> str:
"""The matched image id."""
return f'{self.unit_id}_{self.camera_id}_{flatten_time(self.image_time)}'
def as_path(self, base: Union[Path, str] = None, ext: str = None) -> Path:
"""Return a Path object."""
image_str = flatten_time(self.image_time)
if ext is not None:
image_str = f'{image_str}.{ext}'
full_path = Path(self.unit_id, self.camera_id, flatten_time(self.sequence_time), image_str)
if base is not None:
full_path = base / full_path
return full_path
def get_full_id(self, sep='_') -> str:
"""Returns the full path id with the given separator."""
return f'{sep}'.join([
self.unit_id,
self.camera_id,
flatten_time(self.sequence_time),
flatten_time(self.image_time)
])
@classmethod
def from_fits(cls, fits_file):
header = fits_utils.getheader(fits_file)
return cls.from_fits_header(header)
@classmethod
def from_fits_header(cls, header):
try:
new_instance = cls(path=header['FILENAME'])
except ValueError:
sequence_id = header['SEQID']
image_id = header['IMAGEID']
unit_id, camera_id, sequence_time = sequence_id.split('_')
_, _, image_time = image_id.split('_')
new_instance = cls(unit_id=unit_id,
camera_id=camera_id,
sequence_time=Time(parse_date(sequence_time)),
image_time=Time(parse_date(image_time)))
return new_instance
def extract_metadata(header: Header) -> dict:
"""Get the metadata from a FITS image."""
path_info = ObservationPathInfo.from_fits_header(header)
try:
measured_rggb = [float(x) for x in header.get('MEASRGGB', '0 0 0 0').split(' ')]
if 'DATE' in header:
file_date = parse_date(header.get('DATE')).replace(tzinfo=UTC)
else:
file_date = path_info.image_time.to_datetime(timezone=UTC)
camera_date = parse_date(header.get('DATE-OBS', path_info.image_time)).replace(tzinfo=UTC)
unit_info = dict(
unit_id=path_info.unit_id,
latitude=header.get('LAT-OBS'),
longitude=header.get('LONG-OBS'),
elevation=float(header.get('ELEV-OBS'))
)
sequence_info = dict(
unit_id=path_info.unit_id,
sequence_id=path_info.sequence_id,
time=path_info.sequence_time.to_datetime(timezone=UTC),
coordinates=dict(
mount_dec=header.get('DEC-MNT'),
mount_ra=header.get('RA-MNT'),
),
camera=dict(
camera_id=path_info.camera_id,
lens_serial_number=header.get('INTSN'),
serial_number=str(header.get('CAMSN')),
),
field_name=header.get('FIELD', ''),
software_version=header.get('CREATOR', ''),
)
image_info = dict(
uid=path_info.get_full_id(sep='_'),
camera=dict(
dateobs=camera_date,
exptime=float(header.get('EXPTIME')),
iso=header.get('ISO'),
circconf=float(header.get('CIRCCONF', '0.').split(' ')[0]),
blue_balance=float(header.get('BLUEBAL')),
red_balance=float(header.get('REDBAL')),
colortemp=float(header.get('COLORTMP')),
measured_rggb=measured_rggb,
measured_ev=[float(header.get('MEASEV')), float(header.get('MEASEV2'))],
temperature=float(header.get('CAMTEMP', 0).split(' ')[0]),
white_lvln=header.get('WHTLVLN'),
white_lvls=header.get('WHTLVLS'),
),
environment=dict(
moonfrac=float(header.get('MOONFRAC')),
moonsep=float(header.get('MOONSEP')),
),
coordinates=dict(
airmass=header.get('AIRMASS'),
mount_ha=header.get('HA-MNT'),
ra=header.get('CRVAL1'),
dec=header.get('CRVAL2'),
),
file_creation_date=file_date,
time=path_info.image_time.to_datetime(timezone=UTC),
)
except Exception as e:
logger.error(f'Error in adding record: {e!r}')
raise e
logger.success(f'Metadata extracted from header')
return dict(unit=unit_info, sequence=sequence_info, image=image_info)
def get_observation_metadata(sequence_ids, fields=None, show_progress=False):
"""Get the metadata for the given sequence_id(s).
NOTE: This is slated for removal soon.
This function will search for pre-processed observations that have a stored
parquet file.
Note that since the files are stored in parquet format, specifying the `fields`
does in fact save on the size of the returned data. If requesting many `sequence_ids`
it may be worth figuring out exactly what columns you need first.
Args:
sequence_ids (list): A list of sequence_ids as strings.
fields (list|None): A list of fields to fetch from the database in addition
to the 'time' and 'sequence_id' columns. If None, returns all fields.
show_progress (bool): If True, show a progress bar, default False.
Returns:
`pandas.DataFrame`: DataFrame containing the observation metadata.
"""
sequence_ids = listify(sequence_ids)
observation_dfs = list()
if show_progress:
iterator = tqdm(sequence_ids, desc='Getting image metadata')
else:
iterator = sequence_ids
logger.debug(f'Getting images metadata for {len(sequence_ids)} files')
for sequence_id in iterator:
df_file = f'{OBS_BASE_URL}/{sequence_id}-metadata.parquet'
if fields:
fields = listify(fields)
# Always return the ID fields.
fields.insert(0, 'time')
fields.insert(1, 'sequence_id')
fields = list(set(fields))
try:
df = pd.read_parquet(df_file, columns=fields)
except Exception as e:
logger.warning(f'Problem reading {df_file}: {e!r}')
else:
observation_dfs.append(df)
if len(observation_dfs) == 0:
logger.info(f'No documents found for sequence_ids={sequence_ids}')
return
df = pd.concat(observation_dfs)
# Return column names in sorted order
df = df.reindex(sorted(df.columns), axis=1)
# TODO(wtgee) any data cleaning or preparation for observations here.
logger.success(f'Returning {len(df)} rows of metadata sorted by time')
return df.sort_values(by=['time'])
def search_observations(
coords=None,
unit_id=None,
start_date=None,
end_date=None,
ra=None,
dec=None,
radius=10, # degrees
status='matched',
min_num_images=1,
source_url=OBSERVATIONS_URL,
source=None
):
"""Search PANOPTES observations.
Either a `coords` or `ra` and `dec` must be specified for search to work.
>>> from astropy.coordinates import SkyCoord
>>> from panoptes.pipeline.utils.metadata import search_observations
>>> coords = SkyCoord.from_name('Andromeda Galaxy')
>>> start_date = '2019-01-01'
>>> end_date = '2019-12-31'
>>> search_results = search_observations(coords=coords, min_num_images=10, start_date=start_date, end_date=end_date)
>>> # The result is a DataFrame you can further work with.
>>> image_count = search_results.groupby(['unit_id', 'field_name']).num_images.sum()
>>> image_count
unit_id field_name
PAN001 Andromeda Galaxy 378
HAT-P-19 148
TESS_SEC17_CAM02 9949
PAN012 Andromeda Galaxy 70
HAT-P-16 b 268
TESS_SEC17_CAM02 1983
PAN018 TESS_SEC17_CAM02 244
Name: num_images, dtype: Int64
>>> print('Total minutes exposure:', search_results.total_minutes_exptime.sum())
Total minutes exposure: 20376.83
Args:
coords (`astropy.coordinates.SkyCoord`|None): A valid coordinate instance.
ra (float|None): The RA position in degrees of the center of search.
dec (float|None): The Dec position in degrees of the center of the search.
radius (float): The search radius in degrees. Searches are currently done in
a square box, so this is half the length of the side of the box.
start_date (str|`datetime.datetime`|None): A valid datetime instance or `None` (default).
If `None` then the beginning of 2018 is used as a start date.
end_date (str|`datetime.datetime`|None): A valid datetime instance or `None` (default).
If `None` then today is used.
unit_id (str|list|None): A str or list of strs of unit_ids to include.
Default `None` will include all.
status (str|list|None): A str or list of observation status to include.
Defaults to "matched" for observations that have been fully processed. Passing
`None` will return all status.
min_num_images (int): Minimum number of images the observation should have, default 1.
source_url (str): The remote url where the static CSV file is located, default to PANOPTES
storage location.
source (`pandas.DataFrame`|None): The dataframe to use or the search.
If `None` (default) then the `source_url` will be used to look up the file.
Returns:
`pandas.DataFrame`: A table with the matching observation results.
"""
logger.debug(f'Setting up search params')
if coords is None:
try:
coords = SkyCoord(ra=ra, dec=dec, unit='degree')
except ValueError:
raise
# Setup defaults for search.
if start_date is None:
start_date = '2018-01-01'
if end_date is None:
end_date = current_time()
with suppress(TypeError):
start_date = parse_date(start_date).replace(tzinfo=None)
with suppress(TypeError):
end_date = parse_date(end_date).replace(tzinfo=None)
ra_max = (coords.ra + (radius * u.degree)).value
ra_min = (coords.ra - (radius * u.degree)).value
dec_max = (coords.dec + (radius * u.degree)).value
dec_min = (coords.dec - (radius * u.degree)).value
logger.debug(f'Getting list of observations')
# Get the observation list
obs_df = source
if obs_df is None:
local_path = download_file(source_url,
cache='update',
show_progress=False,
pkgname='panoptes')
obs_df = pd.read_csv(local_path)
logger.info(f'Found {len(obs_df)} total observations')
# Perform filtering on other fields here.
logger.debug(f'Filtering observations')
obs_df.query(
f'dec >= {dec_min} and dec <= {dec_max}'
' and '
f'ra >= {ra_min} and ra <= {ra_max}'
' and '
f'time >= "{start_date}"'
' and '
f'time <= "{end_date}"'
' and '
f'num_images >= {min_num_images}'
,
inplace=True
)
logger.debug(f'Found {len(obs_df)} observations after initial filter')
unit_ids = listify(unit_id)
if len(unit_ids) > 0 and unit_ids != 'The Whole World! 🌎':
obs_df.query(f'unit_id in {listify(unit_ids)}', inplace=True)
logger.debug(f'Found {len(obs_df)} observations after unit filter')
if status is not None:
obs_df.query(f'status in {listify(status)}', inplace=True)
logger.debug(f'Found {len(obs_df)} observations after status filter')
logger.debug(f'Found {len(obs_df)} observations after filtering')
obs_df = obs_df.reindex(sorted(obs_df.columns), axis=1)
obs_df.sort_values(by=['time'], inplace=True)
# TODO(wtgee) any data cleaning or preparation for observations here.
columns = [
'sequence_id',
'unit_id',
'camera_id',
'ra',
'dec',
'exptime',
'field_name',
'iso',
'num_images',
'software_version',
'status',
'time',
'total_minutes_exptime',
]
logger.success(f'Returning {len(obs_df)} observations')
return obs_df.reindex(columns=columns)
def get_firestore_refs(
bucket_path: str,
unit_collection: str = 'units',
observation_collection: str = 'observations',
image_collection: str = 'images',
firestore_db: firestore.Client = None
) -> Tuple[firestore.DocumentReference, firestore.DocumentReference, firestore.DocumentReference]:
"""Gets the firestore image document reference"""
firestore_db = firestore_db or firestore.Client()
path_info = ObservationPathInfo(path=bucket_path)
sequence_id = path_info.sequence_id
image_id = path_info.image_id
print(f'Getting document for image {path_info.get_full_id()}')
unit_collection_ref = firestore_db.collection(unit_collection)
unit_doc_ref = unit_collection_ref.document(f'{path_info.unit_id}')
seq_doc_ref = unit_doc_ref.collection(observation_collection).document(sequence_id)
image_doc_ref = seq_doc_ref.collection(image_collection).document(image_id)
return unit_doc_ref, seq_doc_ref, image_doc_ref
def record_metadata(bucket_path: str, metadata: dict, **kwargs) -> str:
"""Add FITS header info to firestore_db.
Note:
This function doesn't check header for proper entries and
assumes a large list of keywords. See source for details.
Returns:
str: The image_id.
Raises:
e: Description
"""
# TODO support batch operation.
if not metadata:
raise RuntimeError('Need valid metadata')
print(f'Recording header metadata in firestore for {bucket_path=}')
path_info = ObservationPathInfo(path=bucket_path)
sequence_id = path_info.sequence_id
try:
print(f'Getting document for observation {sequence_id}')
unit_doc_ref, sequence_doc_ref, image_doc_ref = get_firestore_refs(bucket_path, **kwargs)
metadata['unit']['num_images'] = firestore.Increment(1)
metadata['sequence']['num_images'] = firestore.Increment(1)
# Increment exptime totals.
with suppress(KeyError):
exptime = float(metadata['image']['camera']['exptime'])
metadata['unit']['total_exptime'] = firestore.Increment(exptime)
metadata['sequence']['total_exptime'] = firestore.Increment(exptime)
unit_doc_ref.set(metadata['unit'], merge=True)
sequence_doc_ref.set(metadata['sequence'], merge=True)
metadata['image']['received_time'] = firestore.SERVER_TIMESTAMP
image_doc_ref.set(metadata['image'], merge=True)
except Exception as e:
print(f'Error in adding record: {traceback.format_exc()!r}')
raise e
else:
print(f'Recorded metadata for {path_info.get_full_id()} with {image_doc_ref.id=}')
return image_doc_ref.path
| {
"content_hash": "a7327a0a171de4ddf671f92780dd09e6",
"timestamp": "",
"source": "github",
"line_count": 550,
"max_line_length": 129,
"avg_line_length": 35.88,
"alnum_prop": 0.5988142292490118,
"repo_name": "panoptes/PIAA",
"id": "300c9d4e1c1faef546fb5112802c70ee86822058",
"size": "19737",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/panoptes/pipeline/utils/metadata.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "6616114"
},
{
"name": "Python",
"bytes": "37233"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "60bf92acf0d5d36708cfa27e68576ecc",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "8ac5c5b68dc4595a944d71ba1f61c2f6357557f7",
"size": "196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Amaranthaceae/Charpentiera/Charpentiera bracteolata/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
class Serializers::User < Serializers::Base
structure(:default) do |user|
{
first_name: user.first_name,
last_name: user.last_name,
email: user.email,
created_at: user.created_at.try(:iso8601),
id: user.id,
updated_at: user.updated_at.try(:iso8601),
}
end
end
| {
"content_hash": "648527367bcfd81c18115282c4952f3c",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 50,
"avg_line_length": 27.833333333333332,
"alnum_prop": 0.5718562874251497,
"repo_name": "ruprict/pliny_test",
"id": "7009b6177b6835662d2734aaf93e70313b05ee00",
"size": "334",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/serializers/user.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "17998"
},
{
"name": "Shell",
"bytes": "564"
}
],
"symlink_target": ""
} |
package org.kuali.rice.krad.document;
/**
* Determines what actions are applicable to the given document, irrespective of user
* or other state. These initial actions are used as inputs for further filtering depending
* on context.
* @see DocumentAuthorizer
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public interface DocumentPresentationController {
public boolean canInitiate(String documentTypeName);
public boolean canEdit(Document document);
public boolean canAnnotate(Document document);
public boolean canReload(Document document);
public boolean canClose(Document document);
public boolean canSave(Document document);
public boolean canRoute(Document document);
public boolean canCancel(Document document);
public boolean canCopy(Document document);
public boolean canPerformRouteReport(Document document);
public boolean canAddAdhocRequests(Document document);
public boolean canBlanketApprove(Document document);
public boolean canApprove(Document document);
public boolean canDisapprove(Document document);
public boolean canSendAdhocRequests(Document document);
public boolean canSendNoteFyi(Document document);
public boolean canEditDocumentOverview(Document document);
public boolean canFyi(Document document);
public boolean canAcknowledge(Document document);
public boolean canComplete(Document document);
/**
* @since 2.1
*/
public boolean canRecall(Document document);
}
| {
"content_hash": "ca89e68690f6d78b6679d63031a55374",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 92,
"avg_line_length": 27.344827586206897,
"alnum_prop": 0.7332912988650694,
"repo_name": "ewestfal/rice-svn2git-test",
"id": "b90994b6e2c3504ed5ea48da6655705e2e4ee12f",
"size": "2221",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/document/DocumentPresentationController.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "793243"
},
{
"name": "Groovy",
"bytes": "2170621"
},
{
"name": "Java",
"bytes": "34445604"
},
{
"name": "JavaScript",
"bytes": "2635300"
},
{
"name": "PHP",
"bytes": "15766"
},
{
"name": "Shell",
"bytes": "10444"
},
{
"name": "XSLT",
"bytes": "107686"
}
],
"symlink_target": ""
} |
export const SnmpDeviceCfgComponentConfig: any =
{
'name' : 'SNMP Devices',
'table-columns' : [
{ title: 'ID', name: 'ID' },
{ title: 'Host', name: 'Host' },
{ title: 'Port', name: 'Port' },
{ title: 'Active', name: 'Active' },
{ title: 'Alternate System OIDs', name: 'SystemOIDs' },
{ title: 'Snmp Version', name: 'SnmpVersion' },
{ title: 'Snmp Debug', name: 'SnmpDebug' },
{ title: 'Polling Period (sec)', name: 'Freq' },
{ title: 'Update Filter (Cycles)', name: 'UpdateFltFreq' },
{ title: 'Concurrent Gather', name: 'ConcurrentGather' },
{ title: 'Influx DB', name: 'OutDB' },
{ title: 'Log Level', name: 'LogLevel' },
{ title: 'Disable Snmp Bulk Queries', name: 'DisableBulk' },
{ title: 'MaxOids for SNMP GET', name: 'MaxOids' },
{ title: 'Timeout', name: 'Timeout' },
{ title: 'Retries', name: 'Retries' },
{ title: 'SNMP Max Repetitions', name: 'MaxRepetitions' },
{ title: 'Tag Name', name: 'DeviceTagName' },
{ title: 'Tag Value', name: 'DeviceTagValue' },
{ title: 'Extra Tags', name: 'ExtraTags' },
{ title: 'Device Variables', name: 'DeviceVars' },
{ title: 'Measurement Groups', name: 'MeasurementGroups' },
{ title: 'Measurement Filters', name: 'MeasFilters' }
],
'slug' : 'snmpdevicecfg'
};
export const ExtraActions: any = {
data: [
{
title: 'Runtime Ops', content: [
{
type: 'boolean-label',
enabled: '<label class="glyphicon glyphicon-minus-sign"></label>',
disabled: '<i role="button" class="glyphicon glyphicon-plus-sign"></i>',
property: 'IsRuntime',
action: 'changeruntime',
tooltip: 'Un/Deploy device in runtime'
},
{
type: 'boolean-label',
enabled: '<label class="glyphicon glyphicon-refresh"></label>',
disabled: null,
property: 'IsRuntime',
action: 'updateruntime',
tooltip: 'Refresh device configuration in runtime'
},
{
type: 'button-label',
text: '<label role="button" class="glyphicon glyphicon-remove-sign"></label>',
property: 'Active',
action: 'deletefull',
tooltip: 'Delete in runtime and config'
}
]
}],
position: 'first'
};
export const TableRole : string = 'fulledit';
export const OverrideRoleActions : Array<Object> = [
{'name':'export', 'type':'icon', 'icon' : 'glyphicon glyphicon-download-alt text-default', 'tooltip': 'Export item'},
{'name':'test-connection', 'type':'icon', 'icon' : 'glyphicon glyphicon-flash text-info', 'tooltip': 'Test connection'},
{'name':'view', 'types':'icon', 'icon' : 'glyphicon glyphicon-eye-open text-success', 'tooltip': 'View item'},
{'name':'edit', 'type':'icon', 'icon' : 'glyphicon glyphicon-edit text-warning', 'tooltip': 'Edit item'},
{'name':'remove', 'type':'icon', 'icon' : 'glyphicon glyphicon glyphicon-remove text-danger', 'tooltip': 'Remove item'}
]
| {
"content_hash": "03d679cff87751e29bda356eb2180c25",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 124,
"avg_line_length": 43.29577464788732,
"alnum_prop": 0.5731945348080677,
"repo_name": "toni-moreno/snmpcollector",
"id": "0b783025cce8456934661e499ab038d8a36866c5",
"size": "3074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/snmpdevice/snmpdevicecfg.data.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15601"
},
{
"name": "Dockerfile",
"bytes": "1443"
},
{
"name": "Go",
"bytes": "621113"
},
{
"name": "HTML",
"bytes": "190579"
},
{
"name": "JavaScript",
"bytes": "1892"
},
{
"name": "Shell",
"bytes": "12938"
},
{
"name": "TypeScript",
"bytes": "362127"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>com.amazonaws.eclipse.lambda</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>eclipse-plugin</packaging>
<parent>
<groupId>com.amazonaws.eclipse</groupId>
<artifactId>com.amazonaws.eclipse.bundles</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
</project>
| {
"content_hash": "d7409e266ea229990a523f53b315d373",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 101,
"avg_line_length": 42.357142857142854,
"alnum_prop": 0.7166947723440135,
"repo_name": "zhangzhx/aws-toolkit-eclipse",
"id": "23afb766ec9443c580227722a702a5a6792e6c06",
"size": "593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bundles/com.amazonaws.eclipse.lambda/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2016"
},
{
"name": "FreeMarker",
"bytes": "77017"
},
{
"name": "HTML",
"bytes": "3566"
},
{
"name": "Java",
"bytes": "5463857"
},
{
"name": "Shell",
"bytes": "3124"
},
{
"name": "XSLT",
"bytes": "485"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "8c85e933bb6db8f19b367d3d3e26148f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "8f5d04d0da76265b05d9a0fe0d3fa98dd9358eff",
"size": "179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Aphanactis hutchisonii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
using System;
using System.Collections;
using System.Linq;
namespace ForumSystem.Common.Repository
{
public interface IRepository<T> : IDisposable
{
IQueryable<T> All();
T GetById(int id);
void Add(T entity);
void Update(T entity);
void Delete(T entity);
void Delete(int id);
void Detach(T entity);
int SaveChanges();
}
}
| {
"content_hash": "414e546643a9bd89039a8bcce818238f",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 49,
"avg_line_length": 16.28,
"alnum_prop": 0.597051597051597,
"repo_name": "didimitrov/ForumSystem",
"id": "a32951a58dea622446560d0c220dac291067e6e2",
"size": "409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ForumSystem/Data/ForumSystem.Common/Repository/IRepository.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "106"
},
{
"name": "C#",
"bytes": "141881"
},
{
"name": "CSS",
"bytes": "141071"
},
{
"name": "HTML",
"bytes": "5127"
},
{
"name": "JavaScript",
"bytes": "11032"
}
],
"symlink_target": ""
} |
class Mercurial < Formula
desc "Scalable distributed version control system"
homepage "https://mercurial-scm.org/"
url "https://www.mercurial-scm.org/release/mercurial-5.8.tar.gz"
sha256 "fc5d6a8f6478d88ef83cdd0ab6d86ad68ee722bbdf4964e6a0b47c3c6ba5309f"
license "GPL-2.0-or-later"
livecheck do
url "https://www.mercurial-scm.org/release/"
regex(/href=.*?mercurial[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_big_sur: "b5d662bf05a3ccd236631371069faaac56e3f7c3f3d60c45efa4d5dbfefb7f0c"
sha256 big_sur: "171d741546c8f5120e8b786a4de8b98d98a200d5a6895bb6eda47aa86171eb2e"
sha256 catalina: "00830f2d99780c23b2e4836294e3b687aa7f5c61364440348a518b976ad22744"
sha256 mojave: "d6c18ff4a46eb606ea11ba2d4f63fe892c2575fd0014575a5db6b3d19c26b9c9"
end
depends_on "python@3.9"
def install
ENV["HGPYTHON3"] = "1"
system "make", "PREFIX=#{prefix}", "PYTHON=python3", "install-bin"
# Install chg (see https://www.mercurial-scm.org/wiki/CHg)
cd "contrib/chg" do
system "make", "PREFIX=#{prefix}", "PYTHON=python3", "HGPATH=#{bin}/hg",
"HG=#{bin}/hg"
bin.install "chg"
end
# Configure a nicer default pager
(buildpath/"hgrc").write <<~EOS
[pager]
pager = less -FRX
EOS
(etc/"mercurial").install "hgrc"
# Install man pages, which come pre-built in source releases
man1.install "doc/hg.1"
man5.install "doc/hgignore.5", "doc/hgrc.5"
# install the completion scripts
bash_completion.install "contrib/bash_completion" => "hg-completion.bash"
zsh_completion.install "contrib/zsh_completion" => "_hg"
end
def caveats
return unless (opt_bin/"hg").exist?
cacerts_configured = `#{opt_bin}/hg config web.cacerts`.strip
return if cacerts_configured.empty?
<<~EOS
Homebrew has detected that Mercurial is configured to use a certificate
bundle file as its trust store for TLS connections instead of using the
default OpenSSL store. If you have trouble connecting to remote
repositories, consider unsetting the `web.cacerts` property. You can
determine where the property is being set by running:
hg config --debug web.cacerts
EOS
end
test do
system "#{bin}/hg", "init"
end
end
| {
"content_hash": "2169b2375a783f6e7827148caab3fed3",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 92,
"avg_line_length": 32.98571428571429,
"alnum_prop": 0.6899090515374621,
"repo_name": "JCount/homebrew-core",
"id": "721cb852ca8f197f03a35fdaa3909f5f3d3f3432",
"size": "2403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Formula/mercurial.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Perl",
"bytes": "628"
},
{
"name": "Ruby",
"bytes": "8290585"
}
],
"symlink_target": ""
} |
package org.elasticsearch.cluster.routing;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.test.ESTestCase;
import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength;
/**
* A helper that allows to create shard routing instances within tests, while not requiring to expose
* different simplified constructors on the ShardRouting itself.
*/
public class TestShardRouting {
public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state) {
return newShardRouting(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, shardId), currentNodeId, primary, state);
}
public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) {
return new ShardRouting(shardId, currentNodeId, null, primary, state, buildRecoveryTarget(primary, state),
buildUnassignedInfo(state), buildAllocationId(state), -1);
}
public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary,
ShardRoutingState state, RecoverySource recoverySource) {
return new ShardRouting(shardId, currentNodeId, null, primary, state, recoverySource,
buildUnassignedInfo(state), buildAllocationId(state), -1);
}
public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, String relocatingNodeId,
boolean primary, ShardRoutingState state) {
return newShardRouting(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, shardId), currentNodeId,
relocatingNodeId, primary, state);
}
public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, String relocatingNodeId,
boolean primary, ShardRoutingState state) {
return new ShardRouting(shardId, currentNodeId, relocatingNodeId, primary, state,
buildRecoveryTarget(primary, state), buildUnassignedInfo(state), buildAllocationId(state), -1);
}
public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, boolean primary, ShardRoutingState state, AllocationId allocationId) {
return newShardRouting(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, shardId), currentNodeId,
relocatingNodeId, primary, state, allocationId);
}
public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, String relocatingNodeId, boolean primary,
ShardRoutingState state, AllocationId allocationId) {
return new ShardRouting(shardId, currentNodeId, relocatingNodeId, primary, state,
buildRecoveryTarget(primary, state), buildUnassignedInfo(state), allocationId, -1);
}
public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, boolean primary, ShardRoutingState state,
UnassignedInfo unassignedInfo) {
return newShardRouting(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, shardId), currentNodeId, relocatingNodeId,
primary, state, unassignedInfo);
}
public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId,
String relocatingNodeId, boolean primary, ShardRoutingState state,
UnassignedInfo unassignedInfo) {
return new ShardRouting(shardId, currentNodeId, relocatingNodeId, primary, state, buildRecoveryTarget(primary, state),
unassignedInfo, buildAllocationId(state), -1);
}
public static ShardRouting relocate(ShardRouting shardRouting, String relocatingNodeId, long expectedShardSize) {
return shardRouting.relocate(relocatingNodeId, expectedShardSize);
}
private static RecoverySource buildRecoveryTarget(boolean primary, ShardRoutingState state) {
switch (state) {
case UNASSIGNED:
case INITIALIZING:
if (primary) {
return ESTestCase.randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE,
RecoverySource.ExistingStoreRecoverySource.INSTANCE);
} else {
return RecoverySource.PeerRecoverySource.INSTANCE;
}
case STARTED:
case RELOCATING:
return null;
default:
throw new IllegalStateException("illegal state");
}
}
private static AllocationId buildAllocationId(ShardRoutingState state) {
switch (state) {
case UNASSIGNED:
return null;
case INITIALIZING:
case STARTED:
return AllocationId.newInitializing();
case RELOCATING:
AllocationId allocationId = AllocationId.newInitializing();
return AllocationId.newRelocation(allocationId);
default:
throw new IllegalStateException("illegal state");
}
}
private static UnassignedInfo buildUnassignedInfo(ShardRoutingState state) {
switch (state) {
case UNASSIGNED:
case INITIALIZING:
return new UnassignedInfo(ESTestCase.randomFrom(UnassignedInfo.Reason.values()), "auto generated for test");
case STARTED:
case RELOCATING:
return null;
default:
throw new IllegalStateException("illegal state");
}
}
public static RecoverySource randomRecoverySource() {
return ESTestCase.randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE,
RecoverySource.ExistingStoreRecoverySource.INSTANCE,
RecoverySource.PeerRecoverySource.INSTANCE,
RecoverySource.LocalShardsRecoverySource.INSTANCE,
new RecoverySource.SnapshotRecoverySource(
UUIDs.randomBase64UUID(),
new Snapshot("repo", new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())),
Version.CURRENT,
"some_index"));
}
}
| {
"content_hash": "0b64fc25b0a2910f98f6b4d1484a52eb",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 139,
"avg_line_length": 48.55555555555556,
"alnum_prop": 0.6825324180015255,
"repo_name": "coding0011/elasticsearch",
"id": "9d892d192a25a3a9138ed386f86f54de3e6bea56",
"size": "7343",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "11081"
},
{
"name": "Batchfile",
"bytes": "18064"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "312193"
},
{
"name": "HTML",
"bytes": "5519"
},
{
"name": "Java",
"bytes": "41505710"
},
{
"name": "Perl",
"bytes": "7271"
},
{
"name": "Python",
"bytes": "55163"
},
{
"name": "Shell",
"bytes": "119286"
}
],
"symlink_target": ""
} |
layout: post
title: pyraminx blindfolded solving?
date: 2010-12-05 21:59:22.000000000 +07:00
type: post
published: true
status: publish
categories:
- rubik's cube
tags:
- abel brata
- BLD
- blindfolded
- puzzle
- pyraminx
- pyraminx blindfolded
meta:
_edit_last: '7761285'
_wp_old_slug: ''
geo_latitude: '-8.588990'
geo_longitude: '116.130400'
geo_accuracy: '206'
geo_address: mataram
geo_public: '1'
author:
login: pdft
email: padfoot.tgz@gmail.com
display_name: piko
first_name: herpiko
last_name: dwi aguno
---
<p>bosen main puzlle? pasti. habis, itu-itu saja. tapi saya yakin BLD pyraminx bukan termasuk dalam kategori itu-itu saja. sangat menarik dan menantang! yak, ngomong2 kemarin si pemilik blog sudah solve 4 kali. :)</p>
<p>petunjuknya dapat dari <a href="http://www.speedsolvers.com/forum/viewtopic.php?f=6&t=1114&p=5924&hilit=bld+pyraminx#p5924">sini.</a> apa sih? gak ngerti. sama, saya juga gak ngerti kalau gak mikir keras. mas abel cuma ngasih petunjuk dasar dan bahkan itu masih experimental. doh.</p>
<p>jika waktu memungkinkan, saya akan menulis tutorialnya yang lebih lengkap. janji.</p>
<p>nah, mengenai atmosfer komunitas puzzle indonesia, kok sekarang rada aneh. para petinggi NSA sudah jarang nongol di rubikku, juga para member angkatan awal. terlebih pasca Wicaksono Adi meninggalkan NSA (karena ada suatu masalah dan masalah lain, saya turut bersedih). saya menyatakan turut berduka dan semoga mas wicak cepat sembuh dan segar dari masalah2, skripsi selesai, dan kembali nongol di rubikku.</p>
| {
"content_hash": "820612bd33123bce3dc3f5f38a5b75af",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 412,
"avg_line_length": 45.970588235294116,
"alnum_prop": 0.7581573896353166,
"repo_name": "herpiko/herpiko.github.io",
"id": "ea4f1aba124503600bf0b1f9beaa2a8e2e0285a8",
"size": "1567",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2010-12-05-pyraminx-blindfolded-solving.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17673"
},
{
"name": "HTML",
"bytes": "1742418"
},
{
"name": "Ruby",
"bytes": "2437"
}
],
"symlink_target": ""
} |
// Karma configuration
// Generated on Tue Jan 10 2017 20:14:56 GMT+0100 (WAT)
module.exports = function(config) {
config.set({
/* base path that will be used to resolve all patterns
(eg. files, exclude) */
basePath: '',
/* frameworks to use
available frameworks:
https://npmjs.org/browse/keyword/karma-adapter */
frameworks: ['jasmine'],
// list of files / patterns to load in the browser
files: [
'test/src/**/*.js',
'test/unit/TestSpec.js'
],
// list of files to exclude:
exclude: [],
plugins: [
'karma-jasmine',
'karma-chrome-launcher',
'karma-coverage',
'karma-coveralls'
],
/* preprocess matching files before serving them to the browser
available preprocessors:
https://npmjs.org/browse/keyword/karma-preprocessor */
preprocessors: { 'app/**/*.js': ['coverage'] },
/* test results reporter to use
possible values: 'dots', 'progress'
available reporters:
https://npmjs.org/browse/keyword/karma-reporter */
reporters: ['progress', 'coverage', 'coveralls', 'verbose'],
coverageReporter: {
type: 'lcov',
dir: 'coverage/'
},
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
/* level of logging
possible values: config.LOG_DISABLE || config.LOG_ERROR ||
config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG */
logLevel: config.LOG_INFO,
/* enable / disable watching file and executing
/* enable / disable watching file and executing
tests whenever any file changes
*/
autoWatch: false,
/* start these browsers
// available browser launchers:
https://npmjs.org/browse/keyword/karma-launcher */
browsers: ['Chrome', 'Firefox'],
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: true,
// Concurrency level
// how many browser should be started simultaneous
concurrency: Infinity
});
};
| {
"content_hash": "e821b81d0e214f2e389e0bf93a83c526",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 67,
"avg_line_length": 22.387096774193548,
"alnum_prop": 0.6282420749279539,
"repo_name": "andela/temari-cfh",
"id": "bf28515388abc665884a776dca91ced0afbf1d1a",
"size": "2082",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "karma.conf.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "112358"
},
{
"name": "HTML",
"bytes": "75325"
},
{
"name": "JavaScript",
"bytes": "113962"
}
],
"symlink_target": ""
} |
package jwebform.integration.fromBean;
import jwebform.Form;
import jwebform.field.TextType;
import jwebform.integration.bean2form.DefaultBean2Form;
import org.junit.Before;
import org.junit.Test;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
// Tests if it works, to convert a bean containing a String to a form with a TextType field via Bean2Form
public class TestString {
public static final String INITIAL_VALUE = "testname";
Form form;
@Before
public void init() {
Bean bean = new Bean();
bean.name = INITIAL_VALUE;
form = new DefaultBean2Form().getFormFromBean(bean);
}
@Test
public void test_beanWithString() {
assertEquals(form.getFields().size(), 1);
assertTrue(form.getFields().get(0).fieldType instanceof TextType);
}
@Test
public void test_beanNameCorrect() {
TextType textType = (TextType)form.getFields().get(0).fieldType;
assertEquals(textType.oneValueField.name, "name");
}
@Test
public void test_presetCorrect() {
TextType textType = (TextType)form.getFields().get(0).fieldType;
assertEquals( INITIAL_VALUE, textType.oneValueField.initialValue);
}
public class Bean {
public String name;
}
}
| {
"content_hash": "9280c909fd0a4fcd89d09ecb8eb0dc07",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 105,
"avg_line_length": 25.875,
"alnum_prop": 0.7318840579710145,
"repo_name": "jochen777/jWebForm",
"id": "f0f11eacc55c20938d8cdaa164fa399e38c23528",
"size": "1242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jwebform-integration/src/test/java/jwebform/integration/fromBean/TestString.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "36541"
},
{
"name": "Java",
"bytes": "248659"
}
],
"symlink_target": ""
} |
var path = require('path')
module.exports = {
build: {
env: require('./prod.env'),
index: path.resolve(__dirname, '../dist/index.html'),
assetsRoot: path.resolve(__dirname, '../dist'),
assetsSubDirectory: 'static',
assetsPublicPath: '/',
productionSourceMap: true,
// Gzip off by default as many popular static hosts such as
// Surge or Netlify already gzip all static assets for you.
// Before setting to `true`, make sure to:
// npm install --save-dev compression-webpack-plugin
productionGzip: false,
productionGzipExtensions: ['js', 'css'],
// Run the build command with an extra argument to
// View the bundle analyzer report after build finishes:
// `npm run build --report`
// Set to `true` or `false` to always turn it on or off
bundleAnalyzerReport: process.env.npm_config_report
},
dev: {
env: require('./dev.env'),
port: 8080,
autoOpenBrowser: true,
assetsSubDirectory: 'static',
assetsPublicPath: '/',
proxyTable: {
'/api': {
target: 'http://localhost:9000'
}
},
// CSS Sourcemaps off by default because relative paths are "buggy"
// with this option, according to the CSS-Loader README
// (https://github.com/webpack/css-loader#sourcemaps)
// In our experience, they generally work as expected,
// just be aware of this issue when enabling this option.
cssSourceMap: false
}
}
| {
"content_hash": "5a5035342c0cfd81911520960d297a5e",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 71,
"avg_line_length": 35.24390243902439,
"alnum_prop": 0.6491349480968858,
"repo_name": "hellofornow/wedding-site-client",
"id": "76fe3bbb39df5f12c106013db1179a55dda05fc2",
"size": "1512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1584"
},
{
"name": "JavaScript",
"bytes": "17705"
},
{
"name": "Vue",
"bytes": "15874"
}
],
"symlink_target": ""
} |
module.exports = require('./lib/selleck');
| {
"content_hash": "67b97f26fd267bcbc5e5137cd9d8b274",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 42,
"avg_line_length": 43,
"alnum_prop": 0.6976744186046512,
"repo_name": "yui/selleck",
"id": "446dc1898683cc232b06b0a43432fba1af247892",
"size": "43",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "index.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "18118"
},
{
"name": "JavaScript",
"bytes": "103764"
}
],
"symlink_target": ""
} |
using System.Web;
using System.Web.Mvc;
namespace RailsSharp.Example
{
public class FilterConfig
{
public static void RegisterGlobalFilters(GlobalFilterCollection filters)
{
filters.Add(new HandleErrorAttribute());
}
}
}
| {
"content_hash": "85c25b66f63e331c5a13f99ade38a2de",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 74,
"avg_line_length": 18.076923076923077,
"alnum_prop": 0.7617021276595745,
"repo_name": "chrismbarr/LAN.Core.Eventing",
"id": "086e3065d7b21fb93161c0aef026b0cec42d256a",
"size": "237",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "RailsSharp.Example/App_Start/FilterConfig.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "1155"
},
{
"name": "C#",
"bytes": "134767"
},
{
"name": "CSS",
"bytes": "776"
},
{
"name": "HTML",
"bytes": "5127"
},
{
"name": "JavaScript",
"bytes": "671654"
},
{
"name": "TypeScript",
"bytes": "10560"
}
],
"symlink_target": ""
} |
namespace swri_transform_util
{
class Wgs84Transformer : public Transformer
{
public:
Wgs84Transformer();
virtual std::map<std::string, std::vector<std::string> > Supports() const;
virtual bool GetTransform(
const std::string& target_frame,
const std::string& source_frame,
const ros::Time& time,
Transform& transform);
protected:
virtual bool Initialize();
boost::shared_ptr<LocalXyWgs84Util> local_xy_util_;
std::string local_xy_frame_;
};
class TfToWgs84Transform : public TransformImpl
{
public:
TfToWgs84Transform(
const tf::StampedTransform& transform,
boost::shared_ptr<LocalXyWgs84Util> local_xy_util);
virtual void Transform(const tf::Vector3& v_in, tf::Vector3& v_out) const;
virtual tf::Quaternion GetOrientation() const;
virtual TransformImplPtr Inverse() const;
protected:
tf::StampedTransform transform_;
boost::shared_ptr<LocalXyWgs84Util> local_xy_util_;
};
class Wgs84ToTfTransform : public TransformImpl
{
public:
Wgs84ToTfTransform(
const tf::StampedTransform& transform,
boost::shared_ptr<LocalXyWgs84Util> local_xy_util);
virtual void Transform(const tf::Vector3& v_in, tf::Vector3& v_out) const;
virtual tf::Quaternion GetOrientation() const;
virtual TransformImplPtr Inverse() const;
protected:
tf::StampedTransform transform_;
boost::shared_ptr<LocalXyWgs84Util> local_xy_util_;
};
}
#endif // TRANSFORM_UTIL_WGS84_TRANSFORMER_H_
| {
"content_hash": "93417b0246619226a8f415ada4fd0e90",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 80,
"avg_line_length": 27.210526315789473,
"alnum_prop": 0.6840747904577692,
"repo_name": "elliotjo/marti_common",
"id": "49ba618f4bd369fac5dfcaef03c9f4294a1cc8a4",
"size": "3676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "swri_transform_util/include/swri_transform_util/wgs84_transformer.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2127"
},
{
"name": "C++",
"bytes": "734249"
},
{
"name": "CMake",
"bytes": "27636"
},
{
"name": "GLSL",
"bytes": "630"
},
{
"name": "Python",
"bytes": "30262"
},
{
"name": "Shell",
"bytes": "1913"
}
],
"symlink_target": ""
} |
#ifndef SKSL_NFASTATE
#define SKSL_NFASTATE
#include <string>
#include <vector>
#include "src/sksl/lex/LexUtil.h"
struct NFAState {
enum Kind {
// represents an accept state - if the NFA ends up in this state, we have successfully
// matched the token indicated by fData[0]
kAccept_Kind,
// matches the single character fChar
kChar_Kind,
// the regex '.'; matches any char but '\n'
kDot_Kind,
// a state which serves as a placeholder for the states indicated in fData. When we
// transition to this state, we instead transition to all of the fData states.
kRemapped_Kind,
// contains a list of true/false values in fData. fData[c] tells us whether we accept the
// character c.
kTable_Kind
};
NFAState(Kind kind, std::vector<int> next)
: fKind(kind)
, fNext(std::move(next)) {}
NFAState(char c, std::vector<int> next)
: fKind(kChar_Kind)
, fChar(c)
, fNext(std::move(next)) {}
NFAState(std::vector<int> states)
: fKind(kRemapped_Kind)
, fData(std::move(states)) {}
NFAState(bool inverse, std::vector<bool> accepts, std::vector<int> next)
: fKind(kTable_Kind)
, fInverse(inverse)
, fNext(std::move(next)) {
for (bool b : accepts) {
fData.push_back(b);
}
}
NFAState(int token)
: fKind(kAccept_Kind) {
fData.push_back(token);
}
bool accept(char c) const {
switch (fKind) {
case kAccept_Kind:
return false;
case kChar_Kind:
return c == fChar;
case kDot_Kind:
return c != '\n';
case kTable_Kind: {
bool value;
if ((size_t) c < fData.size()) {
value = fData[c];
} else {
value = false;
}
return value != fInverse;
}
default:
SkUNREACHABLE;
}
}
#ifdef SK_DEBUG
std::string description() const {
switch (fKind) {
case kAccept_Kind:
return "Accept(" + std::to_string(fData[0]) + ")";
case kChar_Kind: {
std::string result = "Char('" + std::string(1, fChar) + "'";
for (int v : fNext) {
result += ", ";
result += std::to_string(v);
}
result += ")";
return result;
}
case kDot_Kind: {
std::string result = "Dot(";
const char* separator = "";
for (int v : fNext) {
result += separator;
result += std::to_string(v);
separator = ", ";
}
result += ")";
return result;
}
case kRemapped_Kind: {
std::string result = "Remapped(";
const char* separator = "";
for (int v : fData) {
result += separator;
result += std::to_string(v);
separator = ", ";
}
result += ")";
return result;
}
case kTable_Kind: {
std::string result = std::string("Table(") + (fInverse ? "true" : "false") + ", [";
const char* separator = "";
for (int v : fData) {
result += separator;
result += v ? "true" : "false";
separator = ", ";
}
result += "]";
for (int n : fNext) {
result += ", ";
result += std::to_string(n);
}
result += ")";
return result;
}
default:
SkUNREACHABLE;
}
}
#endif
Kind fKind;
char fChar = 0;
bool fInverse = false;
std::vector<int> fData;
// states we transition to upon a succesful match from this state
std::vector<int> fNext;
};
#endif
| {
"content_hash": "ec36626905fcf1457faad307a4ad8b4d",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 99,
"avg_line_length": 28.64625850340136,
"alnum_prop": 0.44478746141059133,
"repo_name": "google/skia",
"id": "848a6f11eef10a736aa5a5d4a191976a6be89dd2",
"size": "4354",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "src/sksl/lex/NFAState.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1277381"
},
{
"name": "Batchfile",
"bytes": "17474"
},
{
"name": "C",
"bytes": "6724920"
},
{
"name": "C#",
"bytes": "4683"
},
{
"name": "C++",
"bytes": "28759992"
},
{
"name": "CMake",
"bytes": "2850"
},
{
"name": "Cuda",
"bytes": "944096"
},
{
"name": "Dockerfile",
"bytes": "7142"
},
{
"name": "GLSL",
"bytes": "65328"
},
{
"name": "Go",
"bytes": "108521"
},
{
"name": "HTML",
"bytes": "1274414"
},
{
"name": "Java",
"bytes": "165376"
},
{
"name": "JavaScript",
"bytes": "110447"
},
{
"name": "Lex",
"bytes": "2458"
},
{
"name": "Lua",
"bytes": "70982"
},
{
"name": "Makefile",
"bytes": "10499"
},
{
"name": "Objective-C",
"bytes": "55140"
},
{
"name": "Objective-C++",
"bytes": "161861"
},
{
"name": "PHP",
"bytes": "128097"
},
{
"name": "Python",
"bytes": "1028767"
},
{
"name": "Shell",
"bytes": "63875"
}
],
"symlink_target": ""
} |
package regexodus.derivative;
/**
* Ported to Java from http://blog.errstr.com/2013/01/22/implementing-a-more-powerful-regex/
* D_c(L1 ∩ L2) = D_c(L1) ∩ D_c(L2)
* δ(L1 ∩ L2) = δ(L1) ∩ δ(L2)
*/
public class Intersection extends RegEx {
private RegEx first;
private RegEx second;
private Intersection(RegEx first, RegEx second) {
this.first = first;
this.second = second;
}
@Override
public RegEx derive(char[] c, int idx) {
return new Intersection(
first.derive(c, idx).shareParent(parent),
second.derive(c, idx).shareParent(parent)
).shareParent(parent);
}
@Override
public boolean emptySuccess() {
return first.emptySuccess() && second.emptySuccess();
}
@Override
public int kind() {
return INTERSECTION;
}
@Override
public void reset() {
if(midway)
return;
midway = true;
first.reset();
second.reset();
midway = false;
}
}
| {
"content_hash": "640b66997fa3800f546d67c920cb4445",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 92,
"avg_line_length": 22.933333333333334,
"alnum_prop": 0.5746124031007752,
"repo_name": "tommyettinger/RegExodus",
"id": "d9cb2adaa3d9db93e08f7b01445f420351d069e9",
"size": "1043",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "etc/regexodus/derivative/Intersection.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "955073"
},
{
"name": "JavaScript",
"bytes": "10691"
}
],
"symlink_target": ""
} |
package com.google.gson;
import junit.framework.TestCase;
/**
* Performs some unit testing for the {@link Escaper} class.
*
* @author Joel Leitch
*/
public class EscaperTest extends TestCase {
private Escaper escapeHtmlChar;
private Escaper noEscapeHtmlChar;
@Override
protected void setUp() throws Exception {
super.setUp();
escapeHtmlChar = new Escaper(true);
noEscapeHtmlChar = new Escaper(false);
}
public void testNoSpecialCharacters() {
String value = "Testing123";
String escapedString = escapeHtmlChar.escapeJsonString(value);
assertEquals(value, escapedString);
}
public void testNewlineEscaping() throws Exception {
String containsNewline = "123\n456";
String escapedString = escapeHtmlChar.escapeJsonString(containsNewline);
assertEquals("123\\n456", escapedString);
}
public void testCarrageReturnEscaping() throws Exception {
String containsCarrageReturn = "123\r456";
String escapedString = escapeHtmlChar.escapeJsonString(containsCarrageReturn);
assertEquals("123\\r456", escapedString);
}
public void testTabEscaping() throws Exception {
String containsTab = "123\t456";
String escapedString = escapeHtmlChar.escapeJsonString(containsTab);
assertEquals("123\\t456", escapedString);
}
public void testDoubleQuoteEscaping() throws Exception {
String containsQuote = "123\"456";
String escapedString = escapeHtmlChar.escapeJsonString(containsQuote);
assertEquals("123\\\"456", escapedString);
}
public void testSingleQuoteEscaping() throws Exception {
String containsQuote = "123'456";
String escapedString = escapeHtmlChar.escapeJsonString(containsQuote);
assertEquals("123\\u0027456", escapedString);
}
public void testLineSeparatorEscaping() throws Exception {
String src = "123\u2028 456";
String escapedString = escapeHtmlChar.escapeJsonString(src);
assertEquals("123\\u2028 456", escapedString);
}
public void testParagraphSeparatorEscaping() throws Exception {
String src = "123\u2029 456";
String escapedString = escapeHtmlChar.escapeJsonString(src);
assertEquals("123\\u2029 456", escapedString);
}
public void testControlCharBlockEscaping() throws Exception {
for (char c = '\u007f'; c <= '\u009f'; ++c) {
String src = "123 " + c + " 456";
String escapedString = escapeHtmlChar.escapeJsonString(src);
assertFalse(src.equals(escapedString));
}
}
public void testEqualsEscaping() throws Exception {
String containsEquals = "123=456";
int index = containsEquals.indexOf('=');
String unicodeValue = convertToUnicodeString(Character.codePointAt(containsEquals, index));
String escapedString = escapeHtmlChar.escapeJsonString(containsEquals);
assertEquals("123" + unicodeValue + "456", escapedString);
escapedString = noEscapeHtmlChar.escapeJsonString(containsEquals);
assertEquals(containsEquals, escapedString);
}
public void testGreaterThanAndLessThanEscaping() throws Exception {
String containsLtGt = "123>456<";
int gtIndex = containsLtGt.indexOf('>');
int ltIndex = containsLtGt.indexOf('<');
String gtAsUnicode = convertToUnicodeString(Character.codePointAt(containsLtGt, gtIndex));
String ltAsUnicode = convertToUnicodeString(Character.codePointAt(containsLtGt, ltIndex));
String escapedString = escapeHtmlChar.escapeJsonString(containsLtGt);
assertEquals("123" + gtAsUnicode + "456" + ltAsUnicode, escapedString);
escapedString = noEscapeHtmlChar.escapeJsonString(containsLtGt);
assertEquals(containsLtGt, escapedString);
}
public void testAmpersandEscaping() throws Exception {
String containsAmp = "123&456";
int ampIndex = containsAmp.indexOf('&');
String ampAsUnicode = convertToUnicodeString(Character.codePointAt(containsAmp, ampIndex));
String escapedString = escapeHtmlChar.escapeJsonString(containsAmp);
assertEquals("123" + ampAsUnicode + "456", escapedString);
escapedString = noEscapeHtmlChar.escapeJsonString(containsAmp);
assertEquals(containsAmp, escapedString);
char ampCharAsUnicode = '\u0026';
String containsAmpUnicode = "123" + ampCharAsUnicode + "456";
escapedString = escapeHtmlChar.escapeJsonString(containsAmpUnicode);
assertEquals("123" + ampAsUnicode + "456", escapedString);
escapedString = noEscapeHtmlChar.escapeJsonString(containsAmpUnicode);
assertEquals(containsAmp, escapedString);
}
public void testSlashEscaping() throws Exception {
String containsSlash = "123\\456";
String escapedString = escapeHtmlChar.escapeJsonString(containsSlash);
assertEquals("123\\\\456", escapedString);
}
public void testSingleQuoteNotEscaped() throws Exception {
String containsSingleQuote = "123'456";
String escapedString = noEscapeHtmlChar.escapeJsonString(containsSingleQuote);
assertEquals(containsSingleQuote, escapedString);
}
public void testRequiredEscapingUnicodeCharacter() throws Exception {
char unicodeChar = '\u2028';
String unicodeString = "Testing" + unicodeChar;
String escapedString = escapeHtmlChar.escapeJsonString(unicodeString);
assertFalse(unicodeString.equals(escapedString));
assertEquals("Testing\\u2028", escapedString);
}
public void testUnicodeCharacterStringNoEscaping() throws Exception {
String unicodeString = "\u0065\u0066";
String escapedString = escapeHtmlChar.escapeJsonString(unicodeString);
assertEquals(unicodeString, escapedString);
}
/*
public void testChineseCharacterEscaping() throws Exception {
String unicodeString = "\u597d\u597d\u597d";
String chineseString = "好好好";
assertEquals(unicodeString, chineseString);
String expectedEscapedString = "\\u597d\\u597d\\u597d";
String escapedString = Escaper.escapeJsonString(chineseString);
assertEquals(expectedEscapedString, escapedString);
}
*/
private String convertToUnicodeString(int codepoint) {
String hexValue = Integer.toHexString(codepoint);
StringBuilder sb = new StringBuilder("\\u");
for (int i = 0; i < 4 - hexValue.length(); i++) {
sb.append(0);
}
sb.append(hexValue);
return sb.toString().toLowerCase();
}
}
| {
"content_hash": "31129d9411c29873fe35827e7784f5e3",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 93,
"avg_line_length": 34.434285714285714,
"alnum_prop": 0.7663458347162296,
"repo_name": "vnc-biz/zcs-lib-gson",
"id": "a52e31079bbff9cdc8e967c7229670d9f2dff283",
"size": "6626",
"binary": false,
"copies": "1",
"ref": "refs/heads/zcs-lib-gson",
"path": "src/test/java/com/google/gson/EscaperTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "836375"
}
],
"symlink_target": ""
} |
test:
./node_modules/.bin/mocha
cov test-cov:
./node_modules/.bin/istanbul cover node_modules/.bin/_mocha --report lcovonly
.PHONY: test cov test-cov | {
"content_hash": "8a5d21ea4df0864358afc6e49cf7ee69",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 78,
"avg_line_length": 21.857142857142858,
"alnum_prop": 0.7320261437908496,
"repo_name": "nykma/npg",
"id": "57c1c9fd1811350012249c8c854bc89f621cbbb9",
"size": "153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Makefile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "5220"
},
{
"name": "Makefile",
"bytes": "153"
}
],
"symlink_target": ""
} |
node.default['rubies']['list'] = ['ruby 2.1.3']
node.default['rubies']['bundler']['install'] = false
node.default['chruby_install']['default_ruby'] = true
include_recipe 'rubies'
%w(erb gem irb rake rdoc ri ruby testrb bundle bundler).each do |rb|
link "/usr/bin/#{rb}" do
to "/opt/rubies/ruby-2.1.3/bin/#{rb}"
end
end
node['supermarket']['gem']['dep_packages'].each do |pkg|
package pkg
end
gem_package 'bundler' do
gem_binary '/opt/rubies/ruby-2.1.3/bin/gem'
version '>= 1.7.3'
end
%w(bundle bundler).each do |rb|
link "/usr/bin/#{rb}" do
to "/opt/rubies/ruby-2.1.3/bin/#{rb}"
end
end
| {
"content_hash": "4dd7999ac042de19d55107db93378873",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 68,
"avg_line_length": 24.307692307692307,
"alnum_prop": 0.625,
"repo_name": "chef-cookbooks/supermarket",
"id": "98b941f5cd9d59deb89835f5dcf167f9c6db560e",
"size": "1284",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/_ruby.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "34635"
},
{
"name": "Ruby",
"bytes": "52484"
},
{
"name": "Shell",
"bytes": "2732"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ImageView
android:id="@+id/iv_icon"
android:layout_width="@dimen/video_play_icon_size_small"
android:layout_height="@dimen/video_play_icon_size_small"
android:layout_alignParentLeft="true"
android:layout_centerVertical="true"
android:layout_marginBottom="@dimen/base_margin"
android:layout_marginLeft="@dimen/base_margin"
android:layout_marginTop="@dimen/base_margin" />
<TextView
android:id="@+id/tv_controller_header"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_centerVertical="true"
android:layout_toRightOf="@id/iv_icon"
android:paddingLeft="@dimen/base_little_margin"
android:text="kfldsfdjght"
android:textColor="@color/black_light"
android:textSize="@dimen/title_text_size" />
<View
android:layout_width="match_parent"
android:layout_height="@dimen/dimen_0.5dp"
android:layout_alignParentBottom="true"
android:layout_marginLeft="@dimen/base_margin"
android:layout_marginRight="@dimen/base_margin"
android:background="@color/grey_light" />
</RelativeLayout>
| {
"content_hash": "1d47204d49faeeac653d9e9c2ffdf846",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 74,
"avg_line_length": 40.142857142857146,
"alnum_prop": 0.6704626334519573,
"repo_name": "uin3566/Dota2Helper",
"id": "f8e4b0680059784bd6cf85e9bf22e5f701b2baac",
"size": "1405",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/item_header_caching_controller.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "785333"
},
{
"name": "Lua",
"bytes": "3338"
}
],
"symlink_target": ""
} |
using namespace std;
using namespace boost;
//
// Global state
//
CCriticalSection cs_setpwalletRegistered;
set<CWallet*> setpwalletRegistered;
CCriticalSection cs_main;
CTxMemPool mempool;
unsigned int nTransactionsUpdated = 0;
map<uint256, CBlockIndex*> mapBlockIndex;
uint256 hashGenesisBlock("0x3932037a7d96bc26d6379cdb6b3ecf1f78a1203ff667f06298d181f0ecbd2be7");
static CBigNum bnProofOfWorkLimit(~uint256(0) >> 20); // starting difficulty is 1 / 2^12
CBlockIndex* pindexGenesisBlock = NULL;
int nBestHeight = -1;
CBigNum bnBestChainWork = 0;
CBigNum bnBestInvalidWork = 0;
uint256 hashBestChain = 0;
CBlockIndex* pindexBest = NULL;
int64 nTimeBestReceived = 0;
CMedianFilter<int> cPeerBlockCounts(5, 0); // Amount of blocks that other nodes claim to have
map<uint256, CBlock*> mapOrphanBlocks;
multimap<uint256, CBlock*> mapOrphanBlocksByPrev;
map<uint256, CDataStream*> mapOrphanTransactions;
map<uint256, map<uint256, CDataStream*> > mapOrphanTransactionsByPrev;
// Constant stuff for coinbase transactions we create:
CScript COINBASE_FLAGS;
const string strMessageMagic = "PiggieCoin Signed Message:\n";
double dHashesPerSec;
int64 nHPSTimerStart;
// Settings
int64 nTransactionFee = 0;
int64 nMinimumInputValue = CENT / 100;
//////////////////////////////////////////////////////////////////////////////
//
// dispatching functions
//
// These functions dispatch to one or all registered wallets
void RegisterWallet(CWallet* pwalletIn)
{
{
LOCK(cs_setpwalletRegistered);
setpwalletRegistered.insert(pwalletIn);
}
}
void UnregisterWallet(CWallet* pwalletIn)
{
{
LOCK(cs_setpwalletRegistered);
setpwalletRegistered.erase(pwalletIn);
}
}
// check whether the passed transaction is from us
bool static IsFromMe(CTransaction& tx)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
if (pwallet->IsFromMe(tx))
return true;
return false;
}
// get the wallet transaction with the given hash (if it exists)
bool static GetTransaction(const uint256& hashTx, CWalletTx& wtx)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
if (pwallet->GetTransaction(hashTx,wtx))
return true;
return false;
}
// erases transaction with the given hash from all wallets
void static EraseFromWallets(uint256 hash)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->EraseFromWallet(hash);
}
// make sure all wallets know about the given transaction, in the given block
void SyncWithWallets(const CTransaction& tx, const CBlock* pblock, bool fUpdate)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->AddToWalletIfInvolvingMe(tx, pblock, fUpdate);
}
// notify wallets about a new best chain
void static SetBestChain(const CBlockLocator& loc)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->SetBestChain(loc);
}
// notify wallets about an updated transaction
void static UpdatedTransaction(const uint256& hashTx)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->UpdatedTransaction(hashTx);
}
// dump all wallets
void static PrintWallets(const CBlock& block)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->PrintWallet(block);
}
// notify wallets about an incoming inventory (for request counts)
void static Inventory(const uint256& hash)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->Inventory(hash);
}
// ask wallets to resend their transactions
void static ResendWalletTransactions()
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->ResendWalletTransactions();
}
//////////////////////////////////////////////////////////////////////////////
//
// mapOrphanTransactions
//
bool AddOrphanTx(const CDataStream& vMsg)
{
CTransaction tx;
CDataStream(vMsg) >> tx;
uint256 hash = tx.GetHash();
if (mapOrphanTransactions.count(hash))
return false;
CDataStream* pvMsg = new CDataStream(vMsg);
// Ignore big transactions, to avoid a
// send-big-orphans memory exhaustion attack. If a peer has a legitimate
// large transaction with a missing parent then we assume
// it will rebroadcast it later, after the parent transaction(s)
// have been mined or received.
// 10,000 orphans, each of which is at most 5,000 bytes big is
// at most 500 megabytes of orphans:
if (pvMsg->size() > 5000)
{
printf("ignoring large orphan tx (size: %u, hash: %s)\n", pvMsg->size(), hash.ToString().substr(0,10).c_str());
delete pvMsg;
return false;
}
mapOrphanTransactions[hash] = pvMsg;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
mapOrphanTransactionsByPrev[txin.prevout.hash].insert(make_pair(hash, pvMsg));
printf("stored orphan tx %s (mapsz %u)\n", hash.ToString().substr(0,10).c_str(),
mapOrphanTransactions.size());
return true;
}
void static EraseOrphanTx(uint256 hash)
{
if (!mapOrphanTransactions.count(hash))
return;
const CDataStream* pvMsg = mapOrphanTransactions[hash];
CTransaction tx;
CDataStream(*pvMsg) >> tx;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
mapOrphanTransactionsByPrev[txin.prevout.hash].erase(hash);
if (mapOrphanTransactionsByPrev[txin.prevout.hash].empty())
mapOrphanTransactionsByPrev.erase(txin.prevout.hash);
}
delete pvMsg;
mapOrphanTransactions.erase(hash);
}
unsigned int LimitOrphanTxSize(unsigned int nMaxOrphans)
{
unsigned int nEvicted = 0;
while (mapOrphanTransactions.size() > nMaxOrphans)
{
// Evict a random orphan:
uint256 randomhash = GetRandHash();
map<uint256, CDataStream*>::iterator it = mapOrphanTransactions.lower_bound(randomhash);
if (it == mapOrphanTransactions.end())
it = mapOrphanTransactions.begin();
EraseOrphanTx(it->first);
++nEvicted;
}
return nEvicted;
}
//////////////////////////////////////////////////////////////////////////////
//
// CTransaction and CTxIndex
//
bool CTransaction::ReadFromDisk(CTxDB& txdb, COutPoint prevout, CTxIndex& txindexRet)
{
SetNull();
if (!txdb.ReadTxIndex(prevout.hash, txindexRet))
return false;
if (!ReadFromDisk(txindexRet.pos))
return false;
if (prevout.n >= vout.size())
{
SetNull();
return false;
}
return true;
}
bool CTransaction::ReadFromDisk(CTxDB& txdb, COutPoint prevout)
{
CTxIndex txindex;
return ReadFromDisk(txdb, prevout, txindex);
}
bool CTransaction::ReadFromDisk(COutPoint prevout)
{
CTxDB txdb("r");
CTxIndex txindex;
return ReadFromDisk(txdb, prevout, txindex);
}
bool CTransaction::IsStandard() const
{
if (nVersion > CTransaction::CURRENT_VERSION)
return false;
BOOST_FOREACH(const CTxIn& txin, vin)
{
// Biggest 'standard' txin is a 3-signature 3-of-3 CHECKMULTISIG
// pay-to-script-hash, which is 3 ~80-byte signatures, 3
// ~65-byte public keys, plus a few script ops.
if (txin.scriptSig.size() > 500)
return false;
if (!txin.scriptSig.IsPushOnly())
return false;
}
BOOST_FOREACH(const CTxOut& txout, vout)
if (!::IsStandard(txout.scriptPubKey))
return false;
return true;
}
//
// Check transaction inputs, and make sure any
// pay-to-script-hash transactions are evaluating IsStandard scripts
//
// Why bother? To avoid denial-of-service attacks; an attacker
// can submit a standard HASH... OP_EQUAL transaction,
// which will get accepted into blocks. The redemption
// script can be anything; an attacker could use a very
// expensive-to-check-upon-redemption script like:
// DUP CHECKSIG DROP ... repeated 100 times... OP_1
//
bool CTransaction::AreInputsStandard(const MapPrevTx& mapInputs) const
{
if (IsCoinBase())
return true; // Coinbases don't use vin normally
for (unsigned int i = 0; i < vin.size(); i++)
{
const CTxOut& prev = GetOutputFor(vin[i], mapInputs);
vector<vector<unsigned char> > vSolutions;
txnouttype whichType;
// get the scriptPubKey corresponding to this input:
const CScript& prevScript = prev.scriptPubKey;
if (!Solver(prevScript, whichType, vSolutions))
return false;
int nArgsExpected = ScriptSigArgsExpected(whichType, vSolutions);
if (nArgsExpected < 0)
return false;
// Transactions with extra stuff in their scriptSigs are
// non-standard. Note that this EvalScript() call will
// be quick, because if there are any operations
// beside "push data" in the scriptSig the
// IsStandard() call returns false
vector<vector<unsigned char> > stack;
if (!EvalScript(stack, vin[i].scriptSig, *this, i, 0))
return false;
if (whichType == TX_SCRIPTHASH)
{
if (stack.empty())
return false;
CScript subscript(stack.back().begin(), stack.back().end());
vector<vector<unsigned char> > vSolutions2;
txnouttype whichType2;
if (!Solver(subscript, whichType2, vSolutions2))
return false;
if (whichType2 == TX_SCRIPTHASH)
return false;
int tmpExpected;
tmpExpected = ScriptSigArgsExpected(whichType2, vSolutions2);
if (tmpExpected < 0)
return false;
nArgsExpected += tmpExpected;
}
if (stack.size() != (unsigned int)nArgsExpected)
return false;
}
return true;
}
unsigned int
CTransaction::GetLegacySigOpCount() const
{
unsigned int nSigOps = 0;
BOOST_FOREACH(const CTxIn& txin, vin)
{
nSigOps += txin.scriptSig.GetSigOpCount(false);
}
BOOST_FOREACH(const CTxOut& txout, vout)
{
nSigOps += txout.scriptPubKey.GetSigOpCount(false);
}
return nSigOps;
}
int CMerkleTx::SetMerkleBranch(const CBlock* pblock)
{
if (fClient)
{
if (hashBlock == 0)
return 0;
}
else
{
CBlock blockTmp;
if (pblock == NULL)
{
// Load the block this tx is in
CTxIndex txindex;
if (!CTxDB("r").ReadTxIndex(GetHash(), txindex))
return 0;
if (!blockTmp.ReadFromDisk(txindex.pos.nFile, txindex.pos.nBlockPos))
return 0;
pblock = &blockTmp;
}
// Update the tx's hashBlock
hashBlock = pblock->GetHash();
// Locate the transaction
for (nIndex = 0; nIndex < (int)pblock->vtx.size(); nIndex++)
if (pblock->vtx[nIndex] == *(CTransaction*)this)
break;
if (nIndex == (int)pblock->vtx.size())
{
vMerkleBranch.clear();
nIndex = -1;
printf("ERROR: SetMerkleBranch() : couldn't find tx in block\n");
return 0;
}
// Fill in merkle branch
vMerkleBranch = pblock->GetMerkleBranch(nIndex);
}
// Is the tx in a block that's in the main chain
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi == mapBlockIndex.end())
return 0;
CBlockIndex* pindex = (*mi).second;
if (!pindex || !pindex->IsInMainChain())
return 0;
return pindexBest->nHeight - pindex->nHeight + 1;
}
bool CTransaction::CheckTransaction() const
{
// Basic checks that don't depend on any context
if (vin.empty())
return DoS(10, error("CTransaction::CheckTransaction() : vin empty"));
if (vout.empty())
return DoS(10, error("CTransaction::CheckTransaction() : vout empty"));
// Size limits
if (::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION) > MAX_BLOCK_SIZE)
return DoS(100, error("CTransaction::CheckTransaction() : size limits failed"));
// Check for negative or overflow output values
int64 nValueOut = 0;
BOOST_FOREACH(const CTxOut& txout, vout)
{
if (txout.nValue < 0)
return DoS(100, error("CTransaction::CheckTransaction() : txout.nValue negative"));
if (txout.nValue > MAX_MONEY)
return DoS(100, error("CTransaction::CheckTransaction() : txout.nValue too high"));
nValueOut += txout.nValue;
if (!MoneyRange(nValueOut))
return DoS(100, error("CTransaction::CheckTransaction() : txout total out of range"));
}
// Check for duplicate inputs
set<COutPoint> vInOutPoints;
BOOST_FOREACH(const CTxIn& txin, vin)
{
if (vInOutPoints.count(txin.prevout))
return false;
vInOutPoints.insert(txin.prevout);
}
if (IsCoinBase())
{
if (vin[0].scriptSig.size() < 2 || vin[0].scriptSig.size() > 100)
return DoS(100, error("CTransaction::CheckTransaction() : coinbase script size"));
}
else
{
BOOST_FOREACH(const CTxIn& txin, vin)
if (txin.prevout.IsNull())
return DoS(10, error("CTransaction::CheckTransaction() : prevout is null"));
}
return true;
}
bool CTxMemPool::accept(CTxDB& txdb, CTransaction &tx, bool fCheckInputs,
bool* pfMissingInputs)
{
if (pfMissingInputs)
*pfMissingInputs = false;
if (!tx.CheckTransaction())
return error("CTxMemPool::accept() : CheckTransaction failed");
// Coinbase is only valid in a block, not as a loose transaction
if (tx.IsCoinBase())
return tx.DoS(100, error("CTxMemPool::accept() : coinbase as individual tx"));
// To help v0.1.5 clients who would see it as a negative number
if ((int64)tx.nLockTime > std::numeric_limits<int>::max())
return error("CTxMemPool::accept() : not accepting nLockTime beyond 2038 yet");
// Rather not work on nonstandard transactions (unless -testnet)
if (!fTestNet && !tx.IsStandard())
return error("CTxMemPool::accept() : nonstandard transaction type");
// Do we already have it?
uint256 hash = tx.GetHash();
{
LOCK(cs);
if (mapTx.count(hash))
return false;
}
if (fCheckInputs)
if (txdb.ContainsTx(hash))
return false;
// Check for conflicts with in-memory transactions
CTransaction* ptxOld = NULL;
for (unsigned int i = 0; i < tx.vin.size(); i++)
{
COutPoint outpoint = tx.vin[i].prevout;
if (mapNextTx.count(outpoint))
{
// Disable replacement feature for now
return false;
// Allow replacing with a newer version of the same transaction
if (i != 0)
return false;
ptxOld = mapNextTx[outpoint].ptx;
if (ptxOld->IsFinal())
return false;
if (!tx.IsNewerThan(*ptxOld))
return false;
for (unsigned int i = 0; i < tx.vin.size(); i++)
{
COutPoint outpoint = tx.vin[i].prevout;
if (!mapNextTx.count(outpoint) || mapNextTx[outpoint].ptx != ptxOld)
return false;
}
break;
}
}
if (fCheckInputs)
{
MapPrevTx mapInputs;
map<uint256, CTxIndex> mapUnused;
bool fInvalid = false;
if (!tx.FetchInputs(txdb, mapUnused, false, false, mapInputs, fInvalid))
{
if (fInvalid)
return error("CTxMemPool::accept() : FetchInputs found invalid tx %s", hash.ToString().substr(0,10).c_str());
if (pfMissingInputs)
*pfMissingInputs = true;
return false;
}
// Check for non-standard pay-to-script-hash in inputs
if (!tx.AreInputsStandard(mapInputs) && !fTestNet)
return error("CTxMemPool::accept() : nonstandard transaction input");
// Note: if you modify this code to accept non-standard transactions, then
// you should add code here to check that the transaction does a
// reasonable number of ECDSA signature verifications.
int64 nFees = tx.GetValueIn(mapInputs)-tx.GetValueOut();
unsigned int nSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
// Don't accept it if it can't get into a block
if (nFees < tx.GetMinFee(1000, true, GMF_RELAY))
return error("CTxMemPool::accept() : not enough fees");
// Continuously rate-limit free transactions
// This mitigates 'penny-flooding' -- sending thousands of free transactions just to
// be annoying or make other's transactions take longer to confirm.
if (nFees < MIN_RELAY_TX_FEE)
{
static CCriticalSection cs;
static double dFreeCount;
static int64 nLastTime;
int64 nNow = GetTime();
{
LOCK(cs);
// Use an exponentially decaying ~10-minute window:
dFreeCount *= pow(1.0 - 1.0/600.0, (double)(nNow - nLastTime));
nLastTime = nNow;
// -limitfreerelay unit is thousand-bytes-per-minute
// At default rate it would take over a month to fill 1GB
if (dFreeCount > GetArg("-limitfreerelay", 15)*10*1000 && !IsFromMe(tx))
return error("CTxMemPool::accept() : free transaction rejected by rate limiter");
if (fDebug)
printf("Rate limit dFreeCount: %g => %g\n", dFreeCount, dFreeCount+nSize);
dFreeCount += nSize;
}
}
// Check against previous transactions
// This is done last to help prevent CPU exhaustion denial-of-service attacks.
if (!tx.ConnectInputs(mapInputs, mapUnused, CDiskTxPos(1,1,1), pindexBest, false, false))
{
return error("CTxMemPool::accept() : ConnectInputs failed %s", hash.ToString().substr(0,10).c_str());
}
}
// Store transaction in memory
{
LOCK(cs);
if (ptxOld)
{
printf("CTxMemPool::accept() : replacing tx %s with new version\n", ptxOld->GetHash().ToString().c_str());
remove(*ptxOld);
}
addUnchecked(hash, tx);
}
///// are we sure this is ok when loading transactions or restoring block txes
// If updated, erase old tx from wallet
if (ptxOld)
EraseFromWallets(ptxOld->GetHash());
printf("CTxMemPool::accept() : accepted %s (poolsz %u)\n",
hash.ToString().substr(0,10).c_str(),
mapTx.size());
return true;
}
bool CTransaction::AcceptToMemoryPool(CTxDB& txdb, bool fCheckInputs, bool* pfMissingInputs)
{
return mempool.accept(txdb, *this, fCheckInputs, pfMissingInputs);
}
bool CTxMemPool::addUnchecked(const uint256& hash, CTransaction &tx)
{
// Add to memory pool without checking anything. Don't call this directly,
// call CTxMemPool::accept to properly check the transaction first.
{
mapTx[hash] = tx;
for (unsigned int i = 0; i < tx.vin.size(); i++)
mapNextTx[tx.vin[i].prevout] = CInPoint(&mapTx[hash], i);
nTransactionsUpdated++;
}
return true;
}
bool CTxMemPool::remove(CTransaction &tx)
{
// Remove transaction from memory pool
{
LOCK(cs);
uint256 hash = tx.GetHash();
if (mapTx.count(hash))
{
BOOST_FOREACH(const CTxIn& txin, tx.vin)
mapNextTx.erase(txin.prevout);
mapTx.erase(hash);
nTransactionsUpdated++;
}
}
return true;
}
void CTxMemPool::queryHashes(std::vector<uint256>& vtxid)
{
vtxid.clear();
LOCK(cs);
vtxid.reserve(mapTx.size());
for (map<uint256, CTransaction>::iterator mi = mapTx.begin(); mi != mapTx.end(); ++mi)
vtxid.push_back((*mi).first);
}
int CMerkleTx::GetDepthInMainChain(CBlockIndex* &pindexRet) const
{
if (hashBlock == 0 || nIndex == -1)
return 0;
// Find the block it claims to be in
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi == mapBlockIndex.end())
return 0;
CBlockIndex* pindex = (*mi).second;
if (!pindex || !pindex->IsInMainChain())
return 0;
// Make sure the merkle branch connects to this block
if (!fMerkleVerified)
{
if (CBlock::CheckMerkleBranch(GetHash(), vMerkleBranch, nIndex) != pindex->hashMerkleRoot)
return 0;
fMerkleVerified = true;
}
pindexRet = pindex;
return pindexBest->nHeight - pindex->nHeight + 1;
}
int CMerkleTx::GetBlocksToMaturity() const
{
if (!IsCoinBase())
return 0;
return max(0, (COINBASE_MATURITY+10) - GetDepthInMainChain());
}
bool CMerkleTx::AcceptToMemoryPool(CTxDB& txdb, bool fCheckInputs)
{
if (fClient)
{
if (!IsInMainChain() && !ClientConnectInputs())
return false;
return CTransaction::AcceptToMemoryPool(txdb, false);
}
else
{
return CTransaction::AcceptToMemoryPool(txdb, fCheckInputs);
}
}
bool CMerkleTx::AcceptToMemoryPool()
{
CTxDB txdb("r");
return AcceptToMemoryPool(txdb);
}
bool CWalletTx::AcceptWalletTransaction(CTxDB& txdb, bool fCheckInputs)
{
{
LOCK(mempool.cs);
// Add previous supporting transactions first
BOOST_FOREACH(CMerkleTx& tx, vtxPrev)
{
if (!tx.IsCoinBase())
{
uint256 hash = tx.GetHash();
if (!mempool.exists(hash) && !txdb.ContainsTx(hash))
tx.AcceptToMemoryPool(txdb, fCheckInputs);
}
}
return AcceptToMemoryPool(txdb, fCheckInputs);
}
return false;
}
bool CWalletTx::AcceptWalletTransaction()
{
CTxDB txdb("r");
return AcceptWalletTransaction(txdb);
}
int CTxIndex::GetDepthInMainChain() const
{
// Read block header
CBlock block;
if (!block.ReadFromDisk(pos.nFile, pos.nBlockPos, false))
return 0;
// Find the block in the index
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(block.GetHash());
if (mi == mapBlockIndex.end())
return 0;
CBlockIndex* pindex = (*mi).second;
if (!pindex || !pindex->IsInMainChain())
return 0;
return 1 + nBestHeight - pindex->nHeight;
}
// Return transaction in tx, and if it was found inside a block, its hash is placed in hashBlock
bool GetTransaction(const uint256 &hash, CTransaction &tx, uint256 &hashBlock)
{
{
LOCK(cs_main);
{
LOCK(mempool.cs);
if (mempool.exists(hash))
{
tx = mempool.lookup(hash);
return true;
}
}
CTxDB txdb("r");
CTxIndex txindex;
if (tx.ReadFromDisk(txdb, COutPoint(hash, 0), txindex))
{
CBlock block;
if (block.ReadFromDisk(txindex.pos.nFile, txindex.pos.nBlockPos, false))
hashBlock = block.GetHash();
return true;
}
}
return false;
}
//////////////////////////////////////////////////////////////////////////////
//
// CBlock and CBlockIndex
//
bool CBlock::ReadFromDisk(const CBlockIndex* pindex, bool fReadTransactions)
{
if (!fReadTransactions)
{
*this = pindex->GetBlockHeader();
return true;
}
if (!ReadFromDisk(pindex->nFile, pindex->nBlockPos, fReadTransactions))
return false;
if (GetHash() != pindex->GetBlockHash())
return error("CBlock::ReadFromDisk() : GetHash() doesn't match index");
return true;
}
uint256 static GetOrphanRoot(const CBlock* pblock)
{
// Work back to the first block in the orphan chain
while (mapOrphanBlocks.count(pblock->hashPrevBlock))
pblock = mapOrphanBlocks[pblock->hashPrevBlock];
return pblock->GetHash();
}
// added from dogecoin source code
int static generateMTRandom(unsigned int s, int range)
{
random::mt19937 gen(s);
random::uniform_int_distribution<> dist(1, range);
return dist(gen);
}
//int64 static GetBlockValue(int nHeight, int64 nFees)
//{
// int64 nSubsidy = 4 * COIN;
//
//
// if(nHeight < 17280) // no block reward within the first 3 days
// nSubsidy = 0;
// if(nHeight > 10519200) // no block reward after 5 years
// nSubsidy = 0;
//
// return nSubsidy + nFees;
//}
int64 static GetBlockValue(int nHeight, int64 nFees, uint256 prevHash)
{
int64 nSubsidy = 10000 * COIN;
std::string cseed_str = prevHash.ToString().substr(7,7);
const char* cseed = cseed_str.c_str();
long seed = hex2long(cseed);
int rand = generateMTRandom(seed, 999999);
int rand1 = 0;
int rand2 = 0;
int rand3 = 0;
int rand4 = 0;
int rand5 = 0;
if(nHeight < 50000)
{
nSubsidy = (1 + rand) * COIN;
}
else if(nHeight < 100000)
{
cseed_str = prevHash.ToString().substr(7,7);
cseed = cseed_str.c_str();
seed = hex2long(cseed);
rand1 = generateMTRandom(seed, 499999);
nSubsidy = (1 + rand1) * COIN;
}
else if(nHeight < 150000)
{
cseed_str = prevHash.ToString().substr(6,7);
cseed = cseed_str.c_str();
seed = hex2long(cseed);
rand2 = generateMTRandom(seed, 249999);
nSubsidy = (1 + rand2) * COIN;
}
else if(nHeight < 200000)
{
cseed_str = prevHash.ToString().substr(7,7);
cseed = cseed_str.c_str();
seed = hex2long(cseed);
rand3 = generateMTRandom(seed, 124999);
nSubsidy = (1 + rand3) * COIN;
}
else if(nHeight < 250000)
{
cseed_str = prevHash.ToString().substr(7,7);
cseed = cseed_str.c_str();
seed = hex2long(cseed);
rand4 = generateMTRandom(seed, 62499);
nSubsidy = (1 + rand4) * COIN;
}
else if(nHeight < 300000)
{
cseed_str = prevHash.ToString().substr(6,7);
cseed = cseed_str.c_str();
seed = hex2long(cseed);
rand5 = generateMTRandom(seed, 31249);
nSubsidy = (1 + rand5) * COIN;
}
return nSubsidy * 10 + nFees;
}
static const int64 nTargetTimespan = 3 * 60 * 60; // PiggieCoin: 3 hours
static const int64 nTargetSpacing = 30; // PiggieCoin: 30 seconds
static const int64 nInterval = nTargetTimespan / nTargetSpacing;
// Thanks: Balthazar for suggesting the following fix
// https://bitcointalk.org/index.php?topic=182430.msg1904506#msg1904506
static const int64 nReTargetHistoryFact = 4; // look at 4 times the retarget
// interval into the block history
//
// minimum amount of work that could possibly be required nTime after
// minimum work required was nBase
//
unsigned int ComputeMinWork(unsigned int nBase, int64 nTime)
{
// Testnet has min-difficulty blocks
// after nTargetSpacing*2 time between blocks:
if (fTestNet && nTime > nTargetSpacing*2)
return bnProofOfWorkLimit.GetCompact();
CBigNum bnResult;
bnResult.SetCompact(nBase);
while (nTime > 0 && bnResult < bnProofOfWorkLimit)
{
// Maximum 400% adjustment...
bnResult *= 4;
// ... in best-case exactly 4-times-normal target time
nTime -= nTargetTimespan*4;
}
if (bnResult > bnProofOfWorkLimit)
bnResult = bnProofOfWorkLimit;
return bnResult.GetCompact();
}
unsigned int static GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlock *pblock)
{
unsigned int nProofOfWorkLimit = bnProofOfWorkLimit.GetCompact();
// Genesis block
if (pindexLast == NULL)
return nProofOfWorkLimit;
// Only change once per interval
if ((pindexLast->nHeight+1) % nInterval != 0)
{
// Special difficulty rule for testnet:
if (fTestNet)
{
// If the new block's timestamp is more than 2* 10 minutes
// then allow mining of a min-difficulty block.
if (pblock->nTime > pindexLast->nTime + nTargetSpacing*2)
return nProofOfWorkLimit;
else
{
// Return the last non-special-min-difficulty-rules-block
const CBlockIndex* pindex = pindexLast;
while (pindex->pprev && pindex->nHeight % nInterval != 0 && pindex->nBits == nProofOfWorkLimit)
pindex = pindex->pprev;
return pindex->nBits;
}
}
return pindexLast->nBits;
}
// Litecoin: This fixes an issue where a 51% attack can change difficulty at will.
// Go back the full period unless it's the first retarget after genesis. Code courtesy of Art Forz
int blockstogoback = nInterval-1;
if ((pindexLast->nHeight+1) != nInterval)
blockstogoback = nInterval;
if (pindexLast->nHeight > COINFIX1_BLOCK) {
blockstogoback = nReTargetHistoryFact * nInterval;
}
// Go back by what we want to be nReTargetHistoryFact*nInterval blocks
const CBlockIndex* pindexFirst = pindexLast;
for (int i = 0; pindexFirst && i < blockstogoback; i++)
pindexFirst = pindexFirst->pprev;
assert(pindexFirst);
// Limit adjustment step
int64 nActualTimespan = 0;
if (pindexLast->nHeight > COINFIX1_BLOCK)
// obtain average actual timespan
nActualTimespan = (pindexLast->GetBlockTime() - pindexFirst->GetBlockTime())/nReTargetHistoryFact;
else
nActualTimespan = pindexLast->GetBlockTime() - pindexFirst->GetBlockTime();
printf(" nActualTimespan = %"PRI64d" before bounds\n", nActualTimespan);
if (nActualTimespan < nTargetTimespan/4)
nActualTimespan = nTargetTimespan/4;
if (nActualTimespan > nTargetTimespan*4)
nActualTimespan = nTargetTimespan*4;
// Retarget
CBigNum bnNew;
bnNew.SetCompact(pindexLast->nBits);
bnNew *= nActualTimespan;
bnNew /= nTargetTimespan;
if (bnNew > bnProofOfWorkLimit)
bnNew = bnProofOfWorkLimit;
/// debug print
printf("GetNextWorkRequired RETARGET\n");
printf("nTargetTimespan = %"PRI64d" nActualTimespan = %"PRI64d"\n", nTargetTimespan, nActualTimespan);
printf("Before: %08x %s\n", pindexLast->nBits, CBigNum().SetCompact(pindexLast->nBits).getuint256().ToString().c_str());
printf("After: %08x %s\n", bnNew.GetCompact(), bnNew.getuint256().ToString().c_str());
return bnNew.GetCompact();
}
bool CheckProofOfWork(uint256 hash, unsigned int nBits)
{
CBigNum bnTarget;
bnTarget.SetCompact(nBits);
// Check range
if (bnTarget <= 0 || bnTarget > bnProofOfWorkLimit)
return error("CheckProofOfWork() : nBits below minimum work");
// Check proof of work matches claimed amount
if (hash > bnTarget.getuint256())
return error("CheckProofOfWork() : hash doesn't match nBits");
return true;
}
// Return maximum amount of blocks that other nodes claim to have
int GetNumBlocksOfPeers()
{
return std::max(cPeerBlockCounts.median(), Checkpoints::GetTotalBlocksEstimate());
}
bool IsInitialBlockDownload()
{
if (pindexBest == NULL || nBestHeight < Checkpoints::GetTotalBlocksEstimate())
return true;
static int64 nLastUpdate;
static CBlockIndex* pindexLastBest;
if (pindexBest != pindexLastBest)
{
pindexLastBest = pindexBest;
nLastUpdate = GetTime();
}
return (GetTime() - nLastUpdate < 10 &&
pindexBest->GetBlockTime() < GetTime() - 24 * 60 * 60);
}
void static InvalidChainFound(CBlockIndex* pindexNew)
{
if (pindexNew->bnChainWork > bnBestInvalidWork)
{
bnBestInvalidWork = pindexNew->bnChainWork;
CTxDB().WriteBestInvalidWork(bnBestInvalidWork);
uiInterface.NotifyBlocksChanged();
}
printf("InvalidChainFound: invalid block=%s height=%d work=%s date=%s\n",
pindexNew->GetBlockHash().ToString().substr(0,20).c_str(), pindexNew->nHeight,
pindexNew->bnChainWork.ToString().c_str(), DateTimeStrFormat("%x %H:%M:%S",
pindexNew->GetBlockTime()).c_str());
printf("InvalidChainFound: current best=%s height=%d work=%s date=%s\n",
hashBestChain.ToString().substr(0,20).c_str(), nBestHeight, bnBestChainWork.ToString().c_str(),
DateTimeStrFormat("%x %H:%M:%S", pindexBest->GetBlockTime()).c_str());
if (pindexBest && bnBestInvalidWork > bnBestChainWork + pindexBest->GetBlockWork() * 6)
printf("InvalidChainFound: WARNING: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.\n");
}
void CBlock::UpdateTime(const CBlockIndex* pindexPrev)
{
nTime = max(pindexPrev->GetMedianTimePast()+1, GetAdjustedTime());
// Updating time can change work required on testnet:
if (fTestNet)
nBits = GetNextWorkRequired(pindexPrev, this);
}
bool CTransaction::DisconnectInputs(CTxDB& txdb)
{
// Relinquish previous transactions' spent pointers
if (!IsCoinBase())
{
BOOST_FOREACH(const CTxIn& txin, vin)
{
COutPoint prevout = txin.prevout;
// Get prev txindex from disk
CTxIndex txindex;
if (!txdb.ReadTxIndex(prevout.hash, txindex))
return error("DisconnectInputs() : ReadTxIndex failed");
if (prevout.n >= txindex.vSpent.size())
return error("DisconnectInputs() : prevout.n out of range");
// Mark outpoint as not spent
txindex.vSpent[prevout.n].SetNull();
// Write back
if (!txdb.UpdateTxIndex(prevout.hash, txindex))
return error("DisconnectInputs() : UpdateTxIndex failed");
}
}
// Remove transaction from index
// This can fail if a duplicate of this transaction was in a chain that got
// reorganized away. This is only possible if this transaction was completely
// spent, so erasing it would be a no-op anway.
txdb.EraseTxIndex(*this);
return true;
}
bool CTransaction::FetchInputs(CTxDB& txdb, const map<uint256, CTxIndex>& mapTestPool,
bool fBlock, bool fMiner, MapPrevTx& inputsRet, bool& fInvalid)
{
// FetchInputs can return false either because we just haven't seen some inputs
// (in which case the transaction should be stored as an orphan)
// or because the transaction is malformed (in which case the transaction should
// be dropped). If tx is definitely invalid, fInvalid will be set to true.
fInvalid = false;
if (IsCoinBase())
return true; // Coinbase transactions have no inputs to fetch.
for (unsigned int i = 0; i < vin.size(); i++)
{
COutPoint prevout = vin[i].prevout;
if (inputsRet.count(prevout.hash))
continue; // Got it already
// Read txindex
CTxIndex& txindex = inputsRet[prevout.hash].first;
bool fFound = true;
if ((fBlock || fMiner) && mapTestPool.count(prevout.hash))
{
// Get txindex from current proposed changes
txindex = mapTestPool.find(prevout.hash)->second;
}
else
{
// Read txindex from txdb
fFound = txdb.ReadTxIndex(prevout.hash, txindex);
}
if (!fFound && (fBlock || fMiner))
return fMiner ? false : error("FetchInputs() : %s prev tx %s index entry not found", GetHash().ToString().substr(0,10).c_str(), prevout.hash.ToString().substr(0,10).c_str());
// Read txPrev
CTransaction& txPrev = inputsRet[prevout.hash].second;
if (!fFound || txindex.pos == CDiskTxPos(1,1,1))
{
// Get prev tx from single transactions in memory
{
LOCK(mempool.cs);
if (!mempool.exists(prevout.hash))
return error("FetchInputs() : %s mempool Tx prev not found %s", GetHash().ToString().substr(0,10).c_str(), prevout.hash.ToString().substr(0,10).c_str());
txPrev = mempool.lookup(prevout.hash);
}
if (!fFound)
txindex.vSpent.resize(txPrev.vout.size());
}
else
{
// Get prev tx from disk
if (!txPrev.ReadFromDisk(txindex.pos))
return error("FetchInputs() : %s ReadFromDisk prev tx %s failed", GetHash().ToString().substr(0,10).c_str(), prevout.hash.ToString().substr(0,10).c_str());
}
}
// Make sure all prevout.n's are valid:
for (unsigned int i = 0; i < vin.size(); i++)
{
const COutPoint prevout = vin[i].prevout;
assert(inputsRet.count(prevout.hash) != 0);
const CTxIndex& txindex = inputsRet[prevout.hash].first;
const CTransaction& txPrev = inputsRet[prevout.hash].second;
if (prevout.n >= txPrev.vout.size() || prevout.n >= txindex.vSpent.size())
{
// Revisit this if/when transaction replacement is implemented and allows
// adding inputs:
fInvalid = true;
return DoS(100, error("FetchInputs() : %s prevout.n out of range %d %d %d prev tx %s\n%s", GetHash().ToString().substr(0,10).c_str(), prevout.n, txPrev.vout.size(), txindex.vSpent.size(), prevout.hash.ToString().substr(0,10).c_str(), txPrev.ToString().c_str()));
}
}
return true;
}
const CTxOut& CTransaction::GetOutputFor(const CTxIn& input, const MapPrevTx& inputs) const
{
MapPrevTx::const_iterator mi = inputs.find(input.prevout.hash);
if (mi == inputs.end())
throw std::runtime_error("CTransaction::GetOutputFor() : prevout.hash not found");
const CTransaction& txPrev = (mi->second).second;
if (input.prevout.n >= txPrev.vout.size())
throw std::runtime_error("CTransaction::GetOutputFor() : prevout.n out of range");
return txPrev.vout[input.prevout.n];
}
int64 CTransaction::GetValueIn(const MapPrevTx& inputs) const
{
if (IsCoinBase())
return 0;
int64 nResult = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
nResult += GetOutputFor(vin[i], inputs).nValue;
}
return nResult;
}
unsigned int CTransaction::GetP2SHSigOpCount(const MapPrevTx& inputs) const
{
if (IsCoinBase())
return 0;
unsigned int nSigOps = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
const CTxOut& prevout = GetOutputFor(vin[i], inputs);
if (prevout.scriptPubKey.IsPayToScriptHash())
nSigOps += prevout.scriptPubKey.GetSigOpCount(vin[i].scriptSig);
}
return nSigOps;
}
bool CTransaction::ConnectInputs(MapPrevTx inputs,
map<uint256, CTxIndex>& mapTestPool, const CDiskTxPos& posThisTx,
const CBlockIndex* pindexBlock, bool fBlock, bool fMiner, bool fStrictPayToScriptHash)
{
// Take over previous transactions' spent pointers
// fBlock is true when this is called from AcceptBlock when a new best-block is added to the blockchain
// fMiner is true when called from the internal piggiecoin miner
// ... both are false when called from CTransaction::AcceptToMemoryPool
if (!IsCoinBase())
{
int64 nValueIn = 0;
int64 nFees = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
COutPoint prevout = vin[i].prevout;
assert(inputs.count(prevout.hash) > 0);
CTxIndex& txindex = inputs[prevout.hash].first;
CTransaction& txPrev = inputs[prevout.hash].second;
if (prevout.n >= txPrev.vout.size() || prevout.n >= txindex.vSpent.size())
return DoS(100, error("ConnectInputs() : %s prevout.n out of range %d %d %d prev tx %s\n%s", GetHash().ToString().substr(0,10).c_str(), prevout.n, txPrev.vout.size(), txindex.vSpent.size(), prevout.hash.ToString().substr(0,10).c_str(), txPrev.ToString().c_str()));
// If prev is coinbase, check that it's matured
if (txPrev.IsCoinBase())
for (const CBlockIndex* pindex = pindexBlock; pindex && pindexBlock->nHeight - pindex->nHeight < COINBASE_MATURITY; pindex = pindex->pprev)
if (pindex->nBlockPos == txindex.pos.nBlockPos && pindex->nFile == txindex.pos.nFile)
return error("ConnectInputs() : tried to spend coinbase at depth %d", pindexBlock->nHeight - pindex->nHeight);
// Check for negative or overflow input values
nValueIn += txPrev.vout[prevout.n].nValue;
if (!MoneyRange(txPrev.vout[prevout.n].nValue) || !MoneyRange(nValueIn))
return DoS(100, error("ConnectInputs() : txin values out of range"));
}
// The first loop above does all the inexpensive checks.
// Only if ALL inputs pass do we perform expensive ECDSA signature checks.
// Helps prevent CPU exhaustion attacks.
for (unsigned int i = 0; i < vin.size(); i++)
{
COutPoint prevout = vin[i].prevout;
assert(inputs.count(prevout.hash) > 0);
CTxIndex& txindex = inputs[prevout.hash].first;
CTransaction& txPrev = inputs[prevout.hash].second;
// Check for conflicts (double-spend)
// This doesn't trigger the DoS code on purpose; if it did, it would make it easier
// for an attacker to attempt to split the network.
if (!txindex.vSpent[prevout.n].IsNull())
return fMiner ? false : error("ConnectInputs() : %s prev tx already used at %s", GetHash().ToString().substr(0,10).c_str(), txindex.vSpent[prevout.n].ToString().c_str());
// Skip ECDSA signature verification when connecting blocks (fBlock=true)
// before the last blockchain checkpoint. This is safe because block merkle hashes are
// still computed and checked, and any change will be caught at the next checkpoint.
if (!(fBlock && (nBestHeight < Checkpoints::GetTotalBlocksEstimate())))
{
// Verify signature
if (!VerifySignature(txPrev, *this, i, fStrictPayToScriptHash, 0))
{
// only during transition phase for P2SH: do not invoke anti-DoS code for
// potentially old clients relaying bad P2SH transactions
if (fStrictPayToScriptHash && VerifySignature(txPrev, *this, i, false, 0))
return error("ConnectInputs() : %s P2SH VerifySignature failed", GetHash().ToString().substr(0,10).c_str());
return DoS(100,error("ConnectInputs() : %s VerifySignature failed", GetHash().ToString().substr(0,10).c_str()));
}
}
// Mark outpoints as spent
txindex.vSpent[prevout.n] = posThisTx;
// Write back
if (fBlock || fMiner)
{
mapTestPool[prevout.hash] = txindex;
}
}
if (nValueIn < GetValueOut())
return DoS(100, error("ConnectInputs() : %s value in < value out", GetHash().ToString().substr(0,10).c_str()));
// Tally transaction fees
int64 nTxFee = nValueIn - GetValueOut();
if (nTxFee < 0)
return DoS(100, error("ConnectInputs() : %s nTxFee < 0", GetHash().ToString().substr(0,10).c_str()));
nFees += nTxFee;
if (!MoneyRange(nFees))
return DoS(100, error("ConnectInputs() : nFees out of range"));
}
return true;
}
bool CTransaction::ClientConnectInputs()
{
if (IsCoinBase())
return false;
// Take over previous transactions' spent pointers
{
LOCK(mempool.cs);
int64 nValueIn = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
// Get prev tx from single transactions in memory
COutPoint prevout = vin[i].prevout;
if (!mempool.exists(prevout.hash))
return false;
CTransaction& txPrev = mempool.lookup(prevout.hash);
if (prevout.n >= txPrev.vout.size())
return false;
// Verify signature
if (!VerifySignature(txPrev, *this, i, true, 0))
return error("ConnectInputs() : VerifySignature failed");
///// this is redundant with the mempool.mapNextTx stuff,
///// not sure which I want to get rid of
///// this has to go away now that posNext is gone
// // Check for conflicts
// if (!txPrev.vout[prevout.n].posNext.IsNull())
// return error("ConnectInputs() : prev tx already used");
//
// // Flag outpoints as used
// txPrev.vout[prevout.n].posNext = posThisTx;
nValueIn += txPrev.vout[prevout.n].nValue;
if (!MoneyRange(txPrev.vout[prevout.n].nValue) || !MoneyRange(nValueIn))
return error("ClientConnectInputs() : txin values out of range");
}
if (GetValueOut() > nValueIn)
return false;
}
return true;
}
bool CBlock::DisconnectBlock(CTxDB& txdb, CBlockIndex* pindex)
{
// Disconnect in reverse order
for (int i = vtx.size()-1; i >= 0; i--)
if (!vtx[i].DisconnectInputs(txdb))
return false;
// Update block index on disk without changing it in memory.
// The memory index structure will be changed after the db commits.
if (pindex->pprev)
{
CDiskBlockIndex blockindexPrev(pindex->pprev);
blockindexPrev.hashNext = 0;
if (!txdb.WriteBlockIndex(blockindexPrev))
return error("DisconnectBlock() : WriteBlockIndex failed");
}
return true;
}
bool CBlock::ConnectBlock(CTxDB& txdb, CBlockIndex* pindex)
{
// Check it again in case a previous version let a bad block in
if (!CheckBlock())
return false;
// Do not allow blocks that contain transactions which 'overwrite' older transactions,
// unless those are already completely spent.
// If such overwrites are allowed, coinbases and transactions depending upon those
// can be duplicated to remove the ability to spend the first instance -- even after
// being sent to another address.
// See BIP30 and http://r6.ca/blog/20120206T005236Z.html for more information.
// This logic is not necessary for memory pool transactions, as AcceptToMemoryPool
// already refuses previously-known transaction id's entirely.
// This rule applies to all blocks whose timestamp is after October 1, 2012, 0:00 UTC.
int64 nBIP30SwitchTime = 1349049600;
bool fEnforceBIP30 = (pindex->nTime > nBIP30SwitchTime);
// BIP16 didn't become active until October 1 2012
int64 nBIP16SwitchTime = 1349049600;
bool fStrictPayToScriptHash = (pindex->nTime >= nBIP16SwitchTime);
//// issue here: it doesn't know the version
unsigned int nTxPos = pindex->nBlockPos + ::GetSerializeSize(CBlock(), SER_DISK, CLIENT_VERSION) - 1 + GetSizeOfCompactSize(vtx.size());
map<uint256, CTxIndex> mapQueuedChanges;
int64 nFees = 0;
unsigned int nSigOps = 0;
BOOST_FOREACH(CTransaction& tx, vtx)
{
uint256 hashTx = tx.GetHash();
if (fEnforceBIP30) {
CTxIndex txindexOld;
if (txdb.ReadTxIndex(hashTx, txindexOld)) {
BOOST_FOREACH(CDiskTxPos &pos, txindexOld.vSpent)
if (pos.IsNull())
return false;
}
}
nSigOps += tx.GetLegacySigOpCount();
if (nSigOps > MAX_BLOCK_SIGOPS)
return DoS(100, error("ConnectBlock() : too many sigops"));
CDiskTxPos posThisTx(pindex->nFile, pindex->nBlockPos, nTxPos);
nTxPos += ::GetSerializeSize(tx, SER_DISK, CLIENT_VERSION);
MapPrevTx mapInputs;
if (!tx.IsCoinBase())
{
bool fInvalid;
if (!tx.FetchInputs(txdb, mapQueuedChanges, true, false, mapInputs, fInvalid))
return false;
if (fStrictPayToScriptHash)
{
// Add in sigops done by pay-to-script-hash inputs;
// this is to prevent a "rogue miner" from creating
// an incredibly-expensive-to-validate block.
nSigOps += tx.GetP2SHSigOpCount(mapInputs);
if (nSigOps > MAX_BLOCK_SIGOPS)
return DoS(100, error("ConnectBlock() : too many sigops"));
}
nFees += tx.GetValueIn(mapInputs)-tx.GetValueOut();
if (!tx.ConnectInputs(mapInputs, mapQueuedChanges, posThisTx, pindex, true, false, fStrictPayToScriptHash))
return false;
}
mapQueuedChanges[hashTx] = CTxIndex(posThisTx, tx.vout.size());
}
// Write queued txindex changes
for (map<uint256, CTxIndex>::iterator mi = mapQueuedChanges.begin(); mi != mapQueuedChanges.end(); ++mi)
{
if (!txdb.UpdateTxIndex((*mi).first, (*mi).second))
return error("ConnectBlock() : UpdateTxIndex failed");
}
uint256 prevHash = 0;
if(pindex->pprev)
{
prevHash = pindex->pprev->GetBlockHash();
}
if (vtx[0].GetValueOut() > GetBlockValue(pindex->nHeight, nFees, prevHash))
return false;
// Update block index on disk without changing it in memory.
// The memory index structure will be changed after the db commits.
if (pindex->pprev)
{
CDiskBlockIndex blockindexPrev(pindex->pprev);
blockindexPrev.hashNext = pindex->GetBlockHash();
if (!txdb.WriteBlockIndex(blockindexPrev))
return error("ConnectBlock() : WriteBlockIndex failed");
}
// Watch for transactions paying to me
BOOST_FOREACH(CTransaction& tx, vtx)
SyncWithWallets(tx, this, true);
return true;
}
bool static Reorganize(CTxDB& txdb, CBlockIndex* pindexNew)
{
printf("REORGANIZE\n");
// Find the fork
CBlockIndex* pfork = pindexBest;
CBlockIndex* plonger = pindexNew;
while (pfork != plonger)
{
while (plonger->nHeight > pfork->nHeight)
if (!(plonger = plonger->pprev))
return error("Reorganize() : plonger->pprev is null");
if (pfork == plonger)
break;
if (!(pfork = pfork->pprev))
return error("Reorganize() : pfork->pprev is null");
}
// List of what to disconnect
vector<CBlockIndex*> vDisconnect;
for (CBlockIndex* pindex = pindexBest; pindex != pfork; pindex = pindex->pprev)
vDisconnect.push_back(pindex);
// List of what to connect
vector<CBlockIndex*> vConnect;
for (CBlockIndex* pindex = pindexNew; pindex != pfork; pindex = pindex->pprev)
vConnect.push_back(pindex);
reverse(vConnect.begin(), vConnect.end());
printf("REORGANIZE: Disconnect %i blocks; %s..%s\n", vDisconnect.size(), pfork->GetBlockHash().ToString().substr(0,20).c_str(), pindexBest->GetBlockHash().ToString().substr(0,20).c_str());
printf("REORGANIZE: Connect %i blocks; %s..%s\n", vConnect.size(), pfork->GetBlockHash().ToString().substr(0,20).c_str(), pindexNew->GetBlockHash().ToString().substr(0,20).c_str());
// Disconnect shorter branch
vector<CTransaction> vResurrect;
BOOST_FOREACH(CBlockIndex* pindex, vDisconnect)
{
CBlock block;
if (!block.ReadFromDisk(pindex))
return error("Reorganize() : ReadFromDisk for disconnect failed");
if (!block.DisconnectBlock(txdb, pindex))
return error("Reorganize() : DisconnectBlock %s failed", pindex->GetBlockHash().ToString().substr(0,20).c_str());
// Queue memory transactions to resurrect
BOOST_FOREACH(const CTransaction& tx, block.vtx)
if (!tx.IsCoinBase())
vResurrect.push_back(tx);
}
// Connect longer branch
vector<CTransaction> vDelete;
for (unsigned int i = 0; i < vConnect.size(); i++)
{
CBlockIndex* pindex = vConnect[i];
CBlock block;
if (!block.ReadFromDisk(pindex))
return error("Reorganize() : ReadFromDisk for connect failed");
if (!block.ConnectBlock(txdb, pindex))
{
// Invalid block
return error("Reorganize() : ConnectBlock %s failed", pindex->GetBlockHash().ToString().substr(0,20).c_str());
}
// Queue memory transactions to delete
BOOST_FOREACH(const CTransaction& tx, block.vtx)
vDelete.push_back(tx);
}
if (!txdb.WriteHashBestChain(pindexNew->GetBlockHash()))
return error("Reorganize() : WriteHashBestChain failed");
// Make sure it's successfully written to disk before changing memory structure
if (!txdb.TxnCommit())
return error("Reorganize() : TxnCommit failed");
// Disconnect shorter branch
BOOST_FOREACH(CBlockIndex* pindex, vDisconnect)
if (pindex->pprev)
pindex->pprev->pnext = NULL;
// Connect longer branch
BOOST_FOREACH(CBlockIndex* pindex, vConnect)
if (pindex->pprev)
pindex->pprev->pnext = pindex;
// Resurrect memory transactions that were in the disconnected branch
BOOST_FOREACH(CTransaction& tx, vResurrect)
tx.AcceptToMemoryPool(txdb, false);
// Delete redundant memory transactions that are in the connected branch
BOOST_FOREACH(CTransaction& tx, vDelete)
mempool.remove(tx);
printf("REORGANIZE: done\n");
return true;
}
// Called from inside SetBestChain: attaches a block to the new best chain being built
bool CBlock::SetBestChainInner(CTxDB& txdb, CBlockIndex *pindexNew)
{
uint256 hash = GetHash();
// Adding to current best branch
if (!ConnectBlock(txdb, pindexNew) || !txdb.WriteHashBestChain(hash))
{
txdb.TxnAbort();
InvalidChainFound(pindexNew);
return false;
}
if (!txdb.TxnCommit())
return error("SetBestChain() : TxnCommit failed");
// Add to current best branch
pindexNew->pprev->pnext = pindexNew;
// Delete redundant memory transactions
BOOST_FOREACH(CTransaction& tx, vtx)
mempool.remove(tx);
return true;
}
bool CBlock::SetBestChain(CTxDB& txdb, CBlockIndex* pindexNew)
{
uint256 hash = GetHash();
if (!txdb.TxnBegin())
return error("SetBestChain() : TxnBegin failed");
if (pindexGenesisBlock == NULL && hash == hashGenesisBlock)
{
txdb.WriteHashBestChain(hash);
if (!txdb.TxnCommit())
return error("SetBestChain() : TxnCommit failed");
pindexGenesisBlock = pindexNew;
}
else if (hashPrevBlock == hashBestChain)
{
if (!SetBestChainInner(txdb, pindexNew))
return error("SetBestChain() : SetBestChainInner failed");
}
else
{
// the first block in the new chain that will cause it to become the new best chain
CBlockIndex *pindexIntermediate = pindexNew;
// list of blocks that need to be connected afterwards
std::vector<CBlockIndex*> vpindexSecondary;
// Reorganize is costly in terms of db load, as it works in a single db transaction.
// Try to limit how much needs to be done inside
while (pindexIntermediate->pprev && pindexIntermediate->pprev->bnChainWork > pindexBest->bnChainWork)
{
vpindexSecondary.push_back(pindexIntermediate);
pindexIntermediate = pindexIntermediate->pprev;
}
if (!vpindexSecondary.empty())
printf("Postponing %i reconnects\n", vpindexSecondary.size());
// Switch to new best branch
if (!Reorganize(txdb, pindexIntermediate))
{
txdb.TxnAbort();
InvalidChainFound(pindexNew);
return error("SetBestChain() : Reorganize failed");
}
// Connect futher blocks
BOOST_REVERSE_FOREACH(CBlockIndex *pindex, vpindexSecondary)
{
CBlock block;
if (!block.ReadFromDisk(pindex))
{
printf("SetBestChain() : ReadFromDisk failed\n");
break;
}
if (!txdb.TxnBegin()) {
printf("SetBestChain() : TxnBegin 2 failed\n");
break;
}
// errors now are not fatal, we still did a reorganisation to a new chain in a valid way
if (!block.SetBestChainInner(txdb, pindex))
break;
}
}
// Update best block in wallet (so we can detect restored wallets)
bool fIsInitialDownload = IsInitialBlockDownload();
if (!fIsInitialDownload)
{
const CBlockLocator locator(pindexNew);
::SetBestChain(locator);
}
// New best block
hashBestChain = hash;
pindexBest = pindexNew;
nBestHeight = pindexBest->nHeight;
bnBestChainWork = pindexNew->bnChainWork;
nTimeBestReceived = GetTime();
nTransactionsUpdated++;
printf("SetBestChain: new best=%s height=%d work=%s date=%s\n",
hashBestChain.ToString().substr(0,20).c_str(), nBestHeight, bnBestChainWork.ToString().c_str(),
DateTimeStrFormat("%x %H:%M:%S", pindexBest->GetBlockTime()).c_str());
// Check the version of the last 100 blocks to see if we need to upgrade:
if (!fIsInitialDownload)
{
int nUpgraded = 0;
const CBlockIndex* pindex = pindexBest;
for (int i = 0; i < 100 && pindex != NULL; i++)
{
if (pindex->nVersion > CBlock::CURRENT_VERSION)
++nUpgraded;
pindex = pindex->pprev;
}
if (nUpgraded > 0)
printf("SetBestChain: %d of last 100 blocks above version %d\n", nUpgraded, CBlock::CURRENT_VERSION);
// if (nUpgraded > 100/2)
// strMiscWarning is read by GetWarnings(), called by Qt and the JSON-RPC code to warn the user:
// strMiscWarning = _("Warning: this version is obsolete, upgrade required");
}
std::string strCmd = GetArg("-blocknotify", "");
if (!fIsInitialDownload && !strCmd.empty())
{
boost::replace_all(strCmd, "%s", hashBestChain.GetHex());
boost::thread t(runCommand, strCmd); // thread runs free
}
return true;
}
bool CBlock::AddToBlockIndex(unsigned int nFile, unsigned int nBlockPos)
{
// Check for duplicate
uint256 hash = GetHash();
if (mapBlockIndex.count(hash))
return error("AddToBlockIndex() : %s already exists", hash.ToString().substr(0,20).c_str());
// Construct new block index object
CBlockIndex* pindexNew = new CBlockIndex(nFile, nBlockPos, *this);
if (!pindexNew)
return error("AddToBlockIndex() : new CBlockIndex failed");
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.insert(make_pair(hash, pindexNew)).first;
pindexNew->phashBlock = &((*mi).first);
map<uint256, CBlockIndex*>::iterator miPrev = mapBlockIndex.find(hashPrevBlock);
if (miPrev != mapBlockIndex.end())
{
pindexNew->pprev = (*miPrev).second;
pindexNew->nHeight = pindexNew->pprev->nHeight + 1;
}
pindexNew->bnChainWork = (pindexNew->pprev ? pindexNew->pprev->bnChainWork : 0) + pindexNew->GetBlockWork();
CTxDB txdb;
if (!txdb.TxnBegin())
return false;
txdb.WriteBlockIndex(CDiskBlockIndex(pindexNew));
if (!txdb.TxnCommit())
return false;
// New best
if (pindexNew->bnChainWork > bnBestChainWork)
if (!SetBestChain(txdb, pindexNew))
return false;
txdb.Close();
if (pindexNew == pindexBest)
{
// Notify UI to display prev block's coinbase if it was ours
static uint256 hashPrevBestCoinBase;
UpdatedTransaction(hashPrevBestCoinBase);
hashPrevBestCoinBase = vtx[0].GetHash();
}
uiInterface.NotifyBlocksChanged();
return true;
}
bool CBlock::CheckBlock() const
{
// These are checks that are independent of context
// that can be verified before saving an orphan block.
// Size limits
if (vtx.empty() || vtx.size() > MAX_BLOCK_SIZE || ::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION) > MAX_BLOCK_SIZE)
return DoS(100, error("CheckBlock() : size limits failed"));
// Check proof of work matches claimed amount
if (!CheckProofOfWork(GetPoWHash(), nBits))
return DoS(50, error("CheckBlock() : proof of work failed"));
// Check timestamp
if (GetBlockTime() > GetAdjustedTime() + 2 * 60 * 60)
return error("CheckBlock() : block timestamp too far in the future");
// First transaction must be coinbase, the rest must not be
if (vtx.empty() || !vtx[0].IsCoinBase())
return DoS(100, error("CheckBlock() : first tx is not coinbase"));
for (unsigned int i = 1; i < vtx.size(); i++)
if (vtx[i].IsCoinBase())
return DoS(100, error("CheckBlock() : more than one coinbase"));
// Check transactions
BOOST_FOREACH(const CTransaction& tx, vtx)
if (!tx.CheckTransaction())
return DoS(tx.nDoS, error("CheckBlock() : CheckTransaction failed"));
// Check for duplicate txids. This is caught by ConnectInputs(),
// but catching it earlier avoids a potential DoS attack:
set<uint256> uniqueTx;
BOOST_FOREACH(const CTransaction& tx, vtx)
{
uniqueTx.insert(tx.GetHash());
}
if (uniqueTx.size() != vtx.size())
return DoS(100, error("CheckBlock() : duplicate transaction"));
unsigned int nSigOps = 0;
BOOST_FOREACH(const CTransaction& tx, vtx)
{
nSigOps += tx.GetLegacySigOpCount();
}
if (nSigOps > MAX_BLOCK_SIGOPS)
return DoS(100, error("CheckBlock() : out-of-bounds SigOpCount"));
// Check merkleroot
if (hashMerkleRoot != BuildMerkleTree())
return DoS(100, error("CheckBlock() : hashMerkleRoot mismatch"));
return true;
}
bool CBlock::AcceptBlock()
{
// Check for duplicate
uint256 hash = GetHash();
if (mapBlockIndex.count(hash))
return error("AcceptBlock() : block already in mapBlockIndex");
// Get prev block index
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashPrevBlock);
if (mi == mapBlockIndex.end())
return DoS(10, error("AcceptBlock() : prev block not found"));
CBlockIndex* pindexPrev = (*mi).second;
int nHeight = pindexPrev->nHeight+1;
// Check proof of work
if (nBits != GetNextWorkRequired(pindexPrev, this))
return DoS(100, error("AcceptBlock() : incorrect proof of work"));
// Check timestamp against prev
if (GetBlockTime() <= pindexPrev->GetMedianTimePast())
return error("AcceptBlock() : block's timestamp is too early");
// Check that all transactions are finalized
BOOST_FOREACH(const CTransaction& tx, vtx)
if (!tx.IsFinal(nHeight, GetBlockTime()))
return DoS(10, error("AcceptBlock() : contains a non-final transaction"));
// Check that the block chain matches the known block chain up to a checkpoint
if (!Checkpoints::CheckBlock(nHeight, hash))
return DoS(100, error("AcceptBlock() : rejected by checkpoint lockin at %d", nHeight));
// Write block to history file
if (!CheckDiskSpace(::GetSerializeSize(*this, SER_DISK, CLIENT_VERSION)))
return error("AcceptBlock() : out of disk space");
unsigned int nFile = -1;
unsigned int nBlockPos = 0;
if (!WriteToDisk(nFile, nBlockPos))
return error("AcceptBlock() : WriteToDisk failed");
if (!AddToBlockIndex(nFile, nBlockPos))
return error("AcceptBlock() : AddToBlockIndex failed");
// Relay inventory, but don't relay old inventory during initial block download
int nBlockEstimate = Checkpoints::GetTotalBlocksEstimate();
if (hashBestChain == hash)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (nBestHeight > (pnode->nStartingHeight != -1 ? pnode->nStartingHeight - 2000 : nBlockEstimate))
pnode->PushInventory(CInv(MSG_BLOCK, hash));
}
return true;
}
bool ProcessBlock(CNode* pfrom, CBlock* pblock)
{
// Check for duplicate
uint256 hash = pblock->GetHash();
if (mapBlockIndex.count(hash))
return error("ProcessBlock() : already have block %d %s", mapBlockIndex[hash]->nHeight, hash.ToString().substr(0,20).c_str());
if (mapOrphanBlocks.count(hash))
return error("ProcessBlock() : already have block (orphan) %s", hash.ToString().substr(0,20).c_str());
// Preliminary checks
if (!pblock->CheckBlock())
return error("ProcessBlock() : CheckBlock FAILED");
CBlockIndex* pcheckpoint = Checkpoints::GetLastCheckpoint(mapBlockIndex);
if (pcheckpoint && pblock->hashPrevBlock != hashBestChain)
{
// Extra checks to prevent "fill up memory by spamming with bogus blocks"
int64 deltaTime = pblock->GetBlockTime() - pcheckpoint->nTime;
if (deltaTime < 0)
{
if (pfrom)
pfrom->Misbehaving(100);
return error("ProcessBlock() : block with timestamp before last checkpoint");
}
CBigNum bnNewBlock;
bnNewBlock.SetCompact(pblock->nBits);
CBigNum bnRequired;
bnRequired.SetCompact(ComputeMinWork(pcheckpoint->nBits, deltaTime));
if (bnNewBlock > bnRequired)
{
if (pfrom)
pfrom->Misbehaving(100);
return error("ProcessBlock() : block with too little proof-of-work");
}
}
// If don't already have its previous block, shunt it off to holding area until we get it
if (!mapBlockIndex.count(pblock->hashPrevBlock))
{
printf("ProcessBlock: ORPHAN BLOCK, prev=%s\n", pblock->hashPrevBlock.ToString().substr(0,20).c_str());
CBlock* pblock2 = new CBlock(*pblock);
mapOrphanBlocks.insert(make_pair(hash, pblock2));
mapOrphanBlocksByPrev.insert(make_pair(pblock2->hashPrevBlock, pblock2));
// Ask this guy to fill in what we're missing
if (pfrom)
pfrom->PushGetBlocks(pindexBest, GetOrphanRoot(pblock2));
return true;
}
// Store to disk
if (!pblock->AcceptBlock())
return error("ProcessBlock() : AcceptBlock FAILED");
// Recursively process any orphan blocks that depended on this one
vector<uint256> vWorkQueue;
vWorkQueue.push_back(hash);
for (unsigned int i = 0; i < vWorkQueue.size(); i++)
{
uint256 hashPrev = vWorkQueue[i];
for (multimap<uint256, CBlock*>::iterator mi = mapOrphanBlocksByPrev.lower_bound(hashPrev);
mi != mapOrphanBlocksByPrev.upper_bound(hashPrev);
++mi)
{
CBlock* pblockOrphan = (*mi).second;
if (pblockOrphan->AcceptBlock())
vWorkQueue.push_back(pblockOrphan->GetHash());
mapOrphanBlocks.erase(pblockOrphan->GetHash());
delete pblockOrphan;
}
mapOrphanBlocksByPrev.erase(hashPrev);
}
printf("ProcessBlock: ACCEPTED\n");
return true;
}
bool CheckDiskSpace(uint64 nAdditionalBytes)
{
uint64 nFreeBytesAvailable = filesystem::space(GetDataDir()).available;
// Check for nMinDiskSpace bytes (currently 50MB)
if (nFreeBytesAvailable < nMinDiskSpace + nAdditionalBytes)
{
fShutdown = true;
string strMessage = _("Warning: Disk space is low");
strMiscWarning = strMessage;
printf("*** %s\n", strMessage.c_str());
uiInterface.ThreadSafeMessageBox(strMessage, "PiggieCoin", CClientUIInterface::OK | CClientUIInterface::ICON_EXCLAMATION | CClientUIInterface::MODAL);
StartShutdown();
return false;
}
return true;
}
FILE* OpenBlockFile(unsigned int nFile, unsigned int nBlockPos, const char* pszMode)
{
if ((nFile < 1) || (nFile == (unsigned int) -1))
return NULL;
FILE* file = fopen((GetDataDir() / strprintf("blk%04d.dat", nFile)).string().c_str(), pszMode);
if (!file)
return NULL;
if (nBlockPos != 0 && !strchr(pszMode, 'a') && !strchr(pszMode, 'w'))
{
if (fseek(file, nBlockPos, SEEK_SET) != 0)
{
fclose(file);
return NULL;
}
}
return file;
}
static unsigned int nCurrentBlockFile = 1;
FILE* AppendBlockFile(unsigned int& nFileRet)
{
nFileRet = 0;
loop
{
FILE* file = OpenBlockFile(nCurrentBlockFile, 0, "ab");
if (!file)
return NULL;
if (fseek(file, 0, SEEK_END) != 0)
return NULL;
// FAT32 filesize max 4GB, fseek and ftell max 2GB, so we must stay under 2GB
if (ftell(file) < 0x7F000000 - MAX_SIZE)
{
nFileRet = nCurrentBlockFile;
return file;
}
fclose(file);
nCurrentBlockFile++;
}
}
bool LoadBlockIndex(bool fAllowNew)
{
if (fTestNet)
{
pchMessageStart[0] = 0xe3;
pchMessageStart[1] = 0xc2;
pchMessageStart[2] = 0xee;
pchMessageStart[3] = 0xaf;
hashGenesisBlock = uint256("0x3932037a7d96bc26d6379cdb6b3ecf1f78a1203ff667f06298d181f0ecbd2be7");
}
//
// Load block index
//
CTxDB txdb("cr");
if (!txdb.LoadBlockIndex())
return false;
txdb.Close();
//
// Init with genesis block
//
if (mapBlockIndex.empty())
{
if (!fAllowNew)
return false;
// Genesis block:
// block.nTime = 1366559428
// block.nNonce = 2085386442
// block.GetHash = 384b060671f4a93948e9c168216dadb0ca2fbc54aa11c86b0345b6af1c59b2f5
// CBlock(hash=384b060671f4a93948e9, PoW=00000951e146b0026411, ver=1,
// hashPrevBlock=00000000000000000000, hashMerkleRoot=5a2e19825b,
// nTime=1366559428, nBits=1e0ffff0, nNonce=2085386442, vtx=1)
// CTransaction(hash=5a2e19825b, ver=1, vin.size=1, vout.size=1, nLockTime=0)
// CTxIn(COutPoint(0000000000, -1), coinbase 04ffff001d010441746f646f3a207265706c616365207769746820736f6d657468696e67207468617420656e7375726573206e6f207072656d696e696e6720746f6f6b20706c616365)
// CTxOut(error)
// vMerkleTree: 5a2e19825b
// Genesis block
const char* pszTimestamp = "Democrats See Minimum Wage as Key to 2014 Strategy";
CTransaction txNew;
txNew.vin.resize(1);
txNew.vout.resize(1);
txNew.vin[0].scriptSig = CScript() << 486604799 << CBigNum(4) << vector<unsigned char>((const unsigned char*)pszTimestamp, (const unsigned char*)pszTimestamp + strlen(pszTimestamp));
txNew.vout[0].nValue = 0;
txNew.vout[0].scriptPubKey = CScript() << 0x0 << OP_CHECKSIG; // a privkey for that 'vanity' pubkey would be interesting ;)
CBlock block;
block.vtx.push_back(txNew);
block.hashPrevBlock = 0;
block.hashMerkleRoot = block.BuildMerkleTree();
block.nVersion = 1;
block.nTime = 1388410492;
block.nBits = 0x1e0ffff0;
block.nNonce = 3890000544;
if (fTestNet)
{
block.nTime = 1388410492;
block.nNonce = 3890000544;
}
//// debug print
printf("%s\n", block.GetHash().ToString().c_str());
printf("%s\n", hashGenesisBlock.ToString().c_str());
printf("%s\n", block.hashMerkleRoot.ToString().c_str());
assert(block.hashMerkleRoot == uint256("0x0fc4b5859260a2381e5f55db8959a14e28e6229e8f59042edda3272378d2b17c"));
// If genesis block hash does not match, then generate new genesis hash.
if (true && block.GetHash() != hashGenesisBlock)
{
printf("Searching for genesis block...\n");
// This will figure out a valid hash and Nonce if you're
// creating a different genesis block:
uint256 hashTarget = CBigNum().SetCompact(block.nBits).getuint256();
uint256 thash;
char scratchpad[SCRYPT_SCRATCHPAD_SIZE];
loop
{
scrypt_1024_1_1_256_sp(BEGIN(block.nVersion), BEGIN(thash), scratchpad);
if (thash <= hashTarget)
break;
if ((block.nNonce & 0xFFF) == 0)
{
printf("nonce %08X: hash = %s (target = %s)\n", block.nNonce, thash.ToString().c_str(), hashTarget.ToString().c_str());
}
++block.nNonce;
if (block.nNonce == 0)
{
printf("NONCE WRAPPED, incrementing time\n");
++block.nTime;
}
}
printf("block.nTime = %u \n", block.nTime);
printf("block.nNonce = %u \n", block.nNonce);
printf("block.GetHash = %s\n", block.GetHash().ToString().c_str());
}
block.print();
assert(block.GetHash() == hashGenesisBlock);
// Start new block file
unsigned int nFile;
unsigned int nBlockPos;
if (!block.WriteToDisk(nFile, nBlockPos))
return error("LoadBlockIndex() : writing genesis block to disk failed");
if (!block.AddToBlockIndex(nFile, nBlockPos))
return error("LoadBlockIndex() : genesis block not accepted");
}
return true;
}
void PrintBlockTree()
{
// precompute tree structure
map<CBlockIndex*, vector<CBlockIndex*> > mapNext;
for (map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.begin(); mi != mapBlockIndex.end(); ++mi)
{
CBlockIndex* pindex = (*mi).second;
mapNext[pindex->pprev].push_back(pindex);
// test
//while (rand() % 3 == 0)
// mapNext[pindex->pprev].push_back(pindex);
}
vector<pair<int, CBlockIndex*> > vStack;
vStack.push_back(make_pair(0, pindexGenesisBlock));
int nPrevCol = 0;
while (!vStack.empty())
{
int nCol = vStack.back().first;
CBlockIndex* pindex = vStack.back().second;
vStack.pop_back();
// print split or gap
if (nCol > nPrevCol)
{
for (int i = 0; i < nCol-1; i++)
printf("| ");
printf("|\\\n");
}
else if (nCol < nPrevCol)
{
for (int i = 0; i < nCol; i++)
printf("| ");
printf("|\n");
}
nPrevCol = nCol;
// print columns
for (int i = 0; i < nCol; i++)
printf("| ");
// print item
CBlock block;
block.ReadFromDisk(pindex);
printf("%d (%u,%u) %s %s tx %d",
pindex->nHeight,
pindex->nFile,
pindex->nBlockPos,
block.GetHash().ToString().substr(0,20).c_str(),
DateTimeStrFormat("%x %H:%M:%S", block.GetBlockTime()).c_str(),
block.vtx.size());
PrintWallets(block);
// put the main timechain first
vector<CBlockIndex*>& vNext = mapNext[pindex];
for (unsigned int i = 0; i < vNext.size(); i++)
{
if (vNext[i]->pnext)
{
swap(vNext[0], vNext[i]);
break;
}
}
// iterate children
for (unsigned int i = 0; i < vNext.size(); i++)
vStack.push_back(make_pair(nCol+i, vNext[i]));
}
}
bool LoadExternalBlockFile(FILE* fileIn)
{
int nLoaded = 0;
{
LOCK(cs_main);
try {
CAutoFile blkdat(fileIn, SER_DISK, CLIENT_VERSION);
unsigned int nPos = 0;
while (nPos != (unsigned int)-1 && blkdat.good() && !fRequestShutdown)
{
unsigned char pchData[65536];
do {
fseek(blkdat, nPos, SEEK_SET);
int nRead = fread(pchData, 1, sizeof(pchData), blkdat);
if (nRead <= 8)
{
nPos = (unsigned int)-1;
break;
}
void* nFind = memchr(pchData, pchMessageStart[0], nRead+1-sizeof(pchMessageStart));
if (nFind)
{
if (memcmp(nFind, pchMessageStart, sizeof(pchMessageStart))==0)
{
nPos += ((unsigned char*)nFind - pchData) + sizeof(pchMessageStart);
break;
}
nPos += ((unsigned char*)nFind - pchData) + 1;
}
else
nPos += sizeof(pchData) - sizeof(pchMessageStart) + 1;
} while(!fRequestShutdown);
if (nPos == (unsigned int)-1)
break;
fseek(blkdat, nPos, SEEK_SET);
unsigned int nSize;
blkdat >> nSize;
if (nSize > 0 && nSize <= MAX_BLOCK_SIZE)
{
CBlock block;
blkdat >> block;
if (ProcessBlock(NULL,&block))
{
nLoaded++;
nPos += 4 + nSize;
}
}
}
}
catch (std::exception &e) {
printf("%s() : Deserialize or I/O error caught during load\n",
__PRETTY_FUNCTION__);
}
}
printf("Loaded %i blocks from external file\n", nLoaded);
return nLoaded > 0;
}
//////////////////////////////////////////////////////////////////////////////
//
// CAlert
//
map<uint256, CAlert> mapAlerts;
CCriticalSection cs_mapAlerts;
string GetWarnings(string strFor)
{
int nPriority = 0;
string strStatusBar;
string strRPC;
if (GetBoolArg("-testsafemode"))
strRPC = "test";
// Misc warnings like out of disk space and clock is wrong
if (strMiscWarning != "")
{
nPriority = 1000;
strStatusBar = strMiscWarning;
}
// Longer invalid proof-of-work chain
if (pindexBest && bnBestInvalidWork > bnBestChainWork + pindexBest->GetBlockWork() * 6)
{
nPriority = 2000;
strStatusBar = strRPC = "WARNING: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.";
}
// Alerts
{
LOCK(cs_mapAlerts);
BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts)
{
const CAlert& alert = item.second;
if (alert.AppliesToMe() && alert.nPriority > nPriority)
{
nPriority = alert.nPriority;
strStatusBar = alert.strStatusBar;
}
}
}
if (strFor == "statusbar")
return strStatusBar;
else if (strFor == "rpc")
return strRPC;
assert(!"GetWarnings() : invalid parameter");
return "error";
}
CAlert CAlert::getAlertByHash(const uint256 &hash)
{
CAlert retval;
{
LOCK(cs_mapAlerts);
map<uint256, CAlert>::iterator mi = mapAlerts.find(hash);
if(mi != mapAlerts.end())
retval = mi->second;
}
return retval;
}
bool CAlert::ProcessAlert()
{
if (!CheckSignature())
return false;
if (!IsInEffect())
return false;
{
LOCK(cs_mapAlerts);
// Cancel previous alerts
for (map<uint256, CAlert>::iterator mi = mapAlerts.begin(); mi != mapAlerts.end();)
{
const CAlert& alert = (*mi).second;
if (Cancels(alert))
{
printf("cancelling alert %d\n", alert.nID);
uiInterface.NotifyAlertChanged((*mi).first, CT_DELETED);
mapAlerts.erase(mi++);
}
else if (!alert.IsInEffect())
{
printf("expiring alert %d\n", alert.nID);
uiInterface.NotifyAlertChanged((*mi).first, CT_DELETED);
mapAlerts.erase(mi++);
}
else
mi++;
}
// Check if this alert has been cancelled
BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts)
{
const CAlert& alert = item.second;
if (alert.Cancels(*this))
{
printf("alert already cancelled by %d\n", alert.nID);
return false;
}
}
// Add to mapAlerts
mapAlerts.insert(make_pair(GetHash(), *this));
// Notify UI if it applies to me
if(AppliesToMe())
uiInterface.NotifyAlertChanged(GetHash(), CT_NEW);
}
printf("accepted alert %d, AppliesToMe()=%d\n", nID, AppliesToMe());
return true;
}
//////////////////////////////////////////////////////////////////////////////
//
// Messages
//
bool static AlreadyHave(CTxDB& txdb, const CInv& inv)
{
switch (inv.type)
{
case MSG_TX:
{
bool txInMap = false;
{
LOCK(mempool.cs);
txInMap = (mempool.exists(inv.hash));
}
return txInMap ||
mapOrphanTransactions.count(inv.hash) ||
txdb.ContainsTx(inv.hash);
}
case MSG_BLOCK:
return mapBlockIndex.count(inv.hash) ||
mapOrphanBlocks.count(inv.hash);
}
// Don't know what it is, just say we already got one
return true;
}
// The message start string is designed to be unlikely to occur in normal data.
// The characters are rarely used upper ascii, not valid as UTF-8, and produce
// a large 4-byte int at any alignment.
unsigned char pchMessageStart[4] = { 0xfc, 0xd9, 0xb7, 0xdd };
bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv)
{
static map<CService, CPubKey> mapReuseKey;
RandAddSeedPerfmon();
if (fDebug)
printf("received: %s (%d bytes)\n", strCommand.c_str(), vRecv.size());
if (mapArgs.count("-dropmessagestest") && GetRand(atoi(mapArgs["-dropmessagestest"])) == 0)
{
printf("dropmessagestest DROPPING RECV MESSAGE\n");
return true;
}
if (strCommand == "version")
{
// Each connection can only send one version message
if (pfrom->nVersion != 0)
{
pfrom->Misbehaving(1);
return false;
}
int64 nTime;
CAddress addrMe;
CAddress addrFrom;
uint64 nNonce = 1;
vRecv >> pfrom->nVersion >> pfrom->nServices >> nTime >> addrMe;
if (pfrom->nVersion < MIN_PROTO_VERSION)
{
// Since February 20, 2012, the protocol is initiated at version 209,
// and earlier versions are no longer supported
printf("partner %s using obsolete version %i; disconnecting\n", pfrom->addr.ToString().c_str(), pfrom->nVersion);
pfrom->fDisconnect = true;
return false;
}
if (pfrom->nVersion == 10300)
pfrom->nVersion = 300;
if (!vRecv.empty())
vRecv >> addrFrom >> nNonce;
if (!vRecv.empty())
vRecv >> pfrom->strSubVer;
if (!vRecv.empty())
vRecv >> pfrom->nStartingHeight;
if (pfrom->fInbound && addrMe.IsRoutable())
{
pfrom->addrLocal = addrMe;
SeenLocal(addrMe);
}
// Disconnect if we connected to ourself
if (nNonce == nLocalHostNonce && nNonce > 1)
{
printf("connected to self at %s, disconnecting\n", pfrom->addr.ToString().c_str());
pfrom->fDisconnect = true;
return true;
}
// Be shy and don't send version until we hear
if (pfrom->fInbound)
pfrom->PushVersion();
pfrom->fClient = !(pfrom->nServices & NODE_NETWORK);
AddTimeData(pfrom->addr, nTime);
// Change version
pfrom->PushMessage("verack");
pfrom->vSend.SetVersion(min(pfrom->nVersion, PROTOCOL_VERSION));
if (!pfrom->fInbound)
{
// Advertise our address
if (!fNoListen && !IsInitialBlockDownload())
{
CAddress addr = GetLocalAddress(&pfrom->addr);
if (addr.IsRoutable())
pfrom->PushAddress(addr);
}
// Get recent addresses
if (pfrom->fOneShot || pfrom->nVersion >= CADDR_TIME_VERSION || addrman.size() < 1000)
{
pfrom->PushMessage("getaddr");
pfrom->fGetAddr = true;
}
addrman.Good(pfrom->addr);
} else {
if (((CNetAddr)pfrom->addr) == (CNetAddr)addrFrom)
{
addrman.Add(addrFrom, addrFrom);
addrman.Good(addrFrom);
}
}
// Ask the first connected node for block updates
static int nAskedForBlocks = 0;
if (!pfrom->fClient && !pfrom->fOneShot &&
(pfrom->nVersion < NOBLKS_VERSION_START ||
pfrom->nVersion >= NOBLKS_VERSION_END) &&
(nAskedForBlocks < 1 || vNodes.size() <= 1))
{
nAskedForBlocks++;
pfrom->PushGetBlocks(pindexBest, uint256(0));
}
// Relay alerts
{
LOCK(cs_mapAlerts);
BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts)
item.second.RelayTo(pfrom);
}
pfrom->fSuccessfullyConnected = true;
printf("receive version message: version %d, blocks=%d, us=%s, them=%s, peer=%s\n", pfrom->nVersion, pfrom->nStartingHeight, addrMe.ToString().c_str(), addrFrom.ToString().c_str(), pfrom->addr.ToString().c_str());
cPeerBlockCounts.input(pfrom->nStartingHeight);
}
else if (pfrom->nVersion == 0)
{
// Must have a version message before anything else
pfrom->Misbehaving(1);
return false;
}
else if (strCommand == "verack")
{
pfrom->vRecv.SetVersion(min(pfrom->nVersion, PROTOCOL_VERSION));
}
else if (strCommand == "addr")
{
vector<CAddress> vAddr;
vRecv >> vAddr;
// Don't want addr from older versions unless seeding
if (pfrom->nVersion < CADDR_TIME_VERSION && addrman.size() > 1000)
return true;
if (vAddr.size() > 1000)
{
pfrom->Misbehaving(20);
return error("message addr size() = %d", vAddr.size());
}
// Store the new addresses
vector<CAddress> vAddrOk;
int64 nNow = GetAdjustedTime();
int64 nSince = nNow - 10 * 60;
BOOST_FOREACH(CAddress& addr, vAddr)
{
if (fShutdown)
return true;
if (addr.nTime <= 100000000 || addr.nTime > nNow + 10 * 60)
addr.nTime = nNow - 5 * 24 * 60 * 60;
pfrom->AddAddressKnown(addr);
bool fReachable = IsReachable(addr);
if (addr.nTime > nSince && !pfrom->fGetAddr && vAddr.size() <= 10 && addr.IsRoutable())
{
// Relay to a limited number of other nodes
{
LOCK(cs_vNodes);
// Use deterministic randomness to send to the same nodes for 24 hours
// at a time so the setAddrKnowns of the chosen nodes prevent repeats
static uint256 hashSalt;
if (hashSalt == 0)
hashSalt = GetRandHash();
uint64 hashAddr = addr.GetHash();
uint256 hashRand = hashSalt ^ (hashAddr<<32) ^ ((GetTime()+hashAddr)/(24*60*60));
hashRand = Hash(BEGIN(hashRand), END(hashRand));
multimap<uint256, CNode*> mapMix;
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->nVersion < CADDR_TIME_VERSION)
continue;
unsigned int nPointer;
memcpy(&nPointer, &pnode, sizeof(nPointer));
uint256 hashKey = hashRand ^ nPointer;
hashKey = Hash(BEGIN(hashKey), END(hashKey));
mapMix.insert(make_pair(hashKey, pnode));
}
int nRelayNodes = fReachable ? 2 : 1; // limited relaying of addresses outside our network(s)
for (multimap<uint256, CNode*>::iterator mi = mapMix.begin(); mi != mapMix.end() && nRelayNodes-- > 0; ++mi)
((*mi).second)->PushAddress(addr);
}
}
// Do not store addresses outside our network
if (fReachable)
vAddrOk.push_back(addr);
}
addrman.Add(vAddrOk, pfrom->addr, 2 * 60 * 60);
if (vAddr.size() < 1000)
pfrom->fGetAddr = false;
if (pfrom->fOneShot)
pfrom->fDisconnect = true;
}
else if (strCommand == "inv")
{
vector<CInv> vInv;
vRecv >> vInv;
if (vInv.size() > 50000)
{
pfrom->Misbehaving(20);
return error("message inv size() = %d", vInv.size());
}
// find last block in inv vector
unsigned int nLastBlock = (unsigned int)(-1);
for (unsigned int nInv = 0; nInv < vInv.size(); nInv++) {
if (vInv[vInv.size() - 1 - nInv].type == MSG_BLOCK) {
nLastBlock = vInv.size() - 1 - nInv;
break;
}
}
CTxDB txdb("r");
for (unsigned int nInv = 0; nInv < vInv.size(); nInv++)
{
const CInv &inv = vInv[nInv];
if (fShutdown)
return true;
pfrom->AddInventoryKnown(inv);
bool fAlreadyHave = AlreadyHave(txdb, inv);
if (fDebug)
printf(" got inventory: %s %s\n", inv.ToString().c_str(), fAlreadyHave ? "have" : "new");
if (!fAlreadyHave)
pfrom->AskFor(inv);
else if (inv.type == MSG_BLOCK && mapOrphanBlocks.count(inv.hash)) {
pfrom->PushGetBlocks(pindexBest, GetOrphanRoot(mapOrphanBlocks[inv.hash]));
} else if (nInv == nLastBlock) {
// In case we are on a very long side-chain, it is possible that we already have
// the last block in an inv bundle sent in response to getblocks. Try to detect
// this situation and push another getblocks to continue.
std::vector<CInv> vGetData(1,inv);
pfrom->PushGetBlocks(mapBlockIndex[inv.hash], uint256(0));
if (fDebug)
printf("force request: %s\n", inv.ToString().c_str());
}
// Track requests for our stuff
Inventory(inv.hash);
}
}
else if (strCommand == "getdata")
{
vector<CInv> vInv;
vRecv >> vInv;
if (vInv.size() > 50000)
{
pfrom->Misbehaving(20);
return error("message getdata size() = %d", vInv.size());
}
if (fDebugNet || (vInv.size() != 1))
printf("received getdata (%d invsz)\n", vInv.size());
BOOST_FOREACH(const CInv& inv, vInv)
{
if (fShutdown)
return true;
if (fDebugNet || (vInv.size() == 1))
printf("received getdata for: %s\n", inv.ToString().c_str());
if (inv.type == MSG_BLOCK)
{
// Send block from disk
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(inv.hash);
if (mi != mapBlockIndex.end())
{
CBlock block;
block.ReadFromDisk((*mi).second);
pfrom->PushMessage("block", block);
// Trigger them to send a getblocks request for the next batch of inventory
if (inv.hash == pfrom->hashContinue)
{
// Bypass PushInventory, this must send even if redundant,
// and we want it right after the last block so they don't
// wait for other stuff first.
vector<CInv> vInv;
vInv.push_back(CInv(MSG_BLOCK, hashBestChain));
pfrom->PushMessage("inv", vInv);
pfrom->hashContinue = 0;
}
}
}
else if (inv.IsKnownType())
{
// Send stream from relay memory
{
LOCK(cs_mapRelay);
map<CInv, CDataStream>::iterator mi = mapRelay.find(inv);
if (mi != mapRelay.end())
pfrom->PushMessage(inv.GetCommand(), (*mi).second);
}
}
// Track requests for our stuff
Inventory(inv.hash);
}
}
else if (strCommand == "getblocks")
{
CBlockLocator locator;
uint256 hashStop;
vRecv >> locator >> hashStop;
// Find the last block the caller has in the main chain
CBlockIndex* pindex = locator.GetBlockIndex();
// Send the rest of the chain
if (pindex)
pindex = pindex->pnext;
int nLimit = 500;
printf("getblocks %d to %s limit %d\n", (pindex ? pindex->nHeight : -1), hashStop.ToString().substr(0,20).c_str(), nLimit);
for (; pindex; pindex = pindex->pnext)
{
if (pindex->GetBlockHash() == hashStop)
{
printf(" getblocks stopping at %d %s\n", pindex->nHeight, pindex->GetBlockHash().ToString().substr(0,20).c_str());
break;
}
pfrom->PushInventory(CInv(MSG_BLOCK, pindex->GetBlockHash()));
if (--nLimit <= 0)
{
// When this block is requested, we'll send an inv that'll make them
// getblocks the next batch of inventory.
printf(" getblocks stopping at limit %d %s\n", pindex->nHeight, pindex->GetBlockHash().ToString().substr(0,20).c_str());
pfrom->hashContinue = pindex->GetBlockHash();
break;
}
}
}
else if (strCommand == "getheaders")
{
CBlockLocator locator;
uint256 hashStop;
vRecv >> locator >> hashStop;
CBlockIndex* pindex = NULL;
if (locator.IsNull())
{
// If locator is null, return the hashStop block
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashStop);
if (mi == mapBlockIndex.end())
return true;
pindex = (*mi).second;
}
else
{
// Find the last block the caller has in the main chain
pindex = locator.GetBlockIndex();
if (pindex)
pindex = pindex->pnext;
}
vector<CBlock> vHeaders;
int nLimit = 2000;
printf("getheaders %d to %s\n", (pindex ? pindex->nHeight : -1), hashStop.ToString().substr(0,20).c_str());
for (; pindex; pindex = pindex->pnext)
{
vHeaders.push_back(pindex->GetBlockHeader());
if (--nLimit <= 0 || pindex->GetBlockHash() == hashStop)
break;
}
pfrom->PushMessage("headers", vHeaders);
}
else if (strCommand == "tx")
{
vector<uint256> vWorkQueue;
vector<uint256> vEraseQueue;
CDataStream vMsg(vRecv);
CTxDB txdb("r");
CTransaction tx;
vRecv >> tx;
CInv inv(MSG_TX, tx.GetHash());
pfrom->AddInventoryKnown(inv);
bool fMissingInputs = false;
if (tx.AcceptToMemoryPool(txdb, true, &fMissingInputs))
{
SyncWithWallets(tx, NULL, true);
RelayMessage(inv, vMsg);
mapAlreadyAskedFor.erase(inv);
vWorkQueue.push_back(inv.hash);
vEraseQueue.push_back(inv.hash);
// Recursively process any orphan transactions that depended on this one
for (unsigned int i = 0; i < vWorkQueue.size(); i++)
{
uint256 hashPrev = vWorkQueue[i];
for (map<uint256, CDataStream*>::iterator mi = mapOrphanTransactionsByPrev[hashPrev].begin();
mi != mapOrphanTransactionsByPrev[hashPrev].end();
++mi)
{
const CDataStream& vMsg = *((*mi).second);
CTransaction tx;
CDataStream(vMsg) >> tx;
CInv inv(MSG_TX, tx.GetHash());
bool fMissingInputs2 = false;
if (tx.AcceptToMemoryPool(txdb, true, &fMissingInputs2))
{
printf(" accepted orphan tx %s\n", inv.hash.ToString().substr(0,10).c_str());
SyncWithWallets(tx, NULL, true);
RelayMessage(inv, vMsg);
mapAlreadyAskedFor.erase(inv);
vWorkQueue.push_back(inv.hash);
vEraseQueue.push_back(inv.hash);
}
else if (!fMissingInputs2)
{
// invalid orphan
vEraseQueue.push_back(inv.hash);
printf(" removed invalid orphan tx %s\n", inv.hash.ToString().substr(0,10).c_str());
}
}
}
BOOST_FOREACH(uint256 hash, vEraseQueue)
EraseOrphanTx(hash);
}
else if (fMissingInputs)
{
AddOrphanTx(vMsg);
// DoS prevention: do not allow mapOrphanTransactions to grow unbounded
unsigned int nEvicted = LimitOrphanTxSize(MAX_ORPHAN_TRANSACTIONS);
if (nEvicted > 0)
printf("mapOrphan overflow, removed %u tx\n", nEvicted);
}
if (tx.nDoS) pfrom->Misbehaving(tx.nDoS);
}
else if (strCommand == "block")
{
CBlock block;
vRecv >> block;
printf("received block %s\n", block.GetHash().ToString().substr(0,20).c_str());
// block.print();
CInv inv(MSG_BLOCK, block.GetHash());
pfrom->AddInventoryKnown(inv);
if (ProcessBlock(pfrom, &block))
mapAlreadyAskedFor.erase(inv);
if (block.nDoS) pfrom->Misbehaving(block.nDoS);
}
else if (strCommand == "getaddr")
{
pfrom->vAddrToSend.clear();
vector<CAddress> vAddr = addrman.GetAddr();
BOOST_FOREACH(const CAddress &addr, vAddr)
pfrom->PushAddress(addr);
}
else if (strCommand == "checkorder")
{
uint256 hashReply;
vRecv >> hashReply;
if (!GetBoolArg("-allowreceivebyip"))
{
pfrom->PushMessage("reply", hashReply, (int)2, string(""));
return true;
}
CWalletTx order;
vRecv >> order;
/// we have a chance to check the order here
// Keep giving the same key to the same ip until they use it
if (!mapReuseKey.count(pfrom->addr))
pwalletMain->GetKeyFromPool(mapReuseKey[pfrom->addr], true);
// Send back approval of order and pubkey to use
CScript scriptPubKey;
scriptPubKey << mapReuseKey[pfrom->addr] << OP_CHECKSIG;
pfrom->PushMessage("reply", hashReply, (int)0, scriptPubKey);
}
else if (strCommand == "reply")
{
uint256 hashReply;
vRecv >> hashReply;
CRequestTracker tracker;
{
LOCK(pfrom->cs_mapRequests);
map<uint256, CRequestTracker>::iterator mi = pfrom->mapRequests.find(hashReply);
if (mi != pfrom->mapRequests.end())
{
tracker = (*mi).second;
pfrom->mapRequests.erase(mi);
}
}
if (!tracker.IsNull())
tracker.fn(tracker.param1, vRecv);
}
else if (strCommand == "ping")
{
if (pfrom->nVersion > BIP0031_VERSION)
{
uint64 nonce = 0;
vRecv >> nonce;
// Echo the message back with the nonce. This allows for two useful features:
//
// 1) A remote node can quickly check if the connection is operational
// 2) Remote nodes can measure the latency of the network thread. If this node
// is overloaded it won't respond to pings quickly and the remote node can
// avoid sending us more work, like chain download requests.
//
// The nonce stops the remote getting confused between different pings: without
// it, if the remote node sends a ping once per second and this node takes 5
// seconds to respond to each, the 5th ping the remote sends would appear to
// return very quickly.
pfrom->PushMessage("pong", nonce);
}
}
else if (strCommand == "alert")
{
CAlert alert;
vRecv >> alert;
if (alert.ProcessAlert())
{
// Relay
pfrom->setKnown.insert(alert.GetHash());
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
alert.RelayTo(pnode);
}
}
}
else
{
// Ignore unknown commands for extensibility
}
// Update the last seen time for this node's address
if (pfrom->fNetworkNode)
if (strCommand == "version" || strCommand == "addr" || strCommand == "inv" || strCommand == "getdata" || strCommand == "ping")
AddressCurrentlyConnected(pfrom->addr);
return true;
}
bool ProcessMessages(CNode* pfrom)
{
CDataStream& vRecv = pfrom->vRecv;
if (vRecv.empty())
return true;
//if (fDebug)
// printf("ProcessMessages(%u bytes)\n", vRecv.size());
//
// Message format
// (4) message start
// (12) command
// (4) size
// (4) checksum
// (x) data
//
loop
{
// Don't bother if send buffer is too full to respond anyway
if (pfrom->vSend.size() >= SendBufferSize())
break;
// Scan for message start
CDataStream::iterator pstart = search(vRecv.begin(), vRecv.end(), BEGIN(pchMessageStart), END(pchMessageStart));
int nHeaderSize = vRecv.GetSerializeSize(CMessageHeader());
if (vRecv.end() - pstart < nHeaderSize)
{
if ((int)vRecv.size() > nHeaderSize)
{
printf("\n\nPROCESSMESSAGE MESSAGESTART NOT FOUND\n\n");
vRecv.erase(vRecv.begin(), vRecv.end() - nHeaderSize);
}
break;
}
if (pstart - vRecv.begin() > 0)
printf("\n\nPROCESSMESSAGE SKIPPED %d BYTES\n\n", pstart - vRecv.begin());
vRecv.erase(vRecv.begin(), pstart);
// Read header
vector<char> vHeaderSave(vRecv.begin(), vRecv.begin() + nHeaderSize);
CMessageHeader hdr;
vRecv >> hdr;
if (!hdr.IsValid())
{
printf("\n\nPROCESSMESSAGE: ERRORS IN HEADER %s\n\n\n", hdr.GetCommand().c_str());
continue;
}
string strCommand = hdr.GetCommand();
// Message size
unsigned int nMessageSize = hdr.nMessageSize;
if (nMessageSize > MAX_SIZE)
{
printf("ProcessMessages(%s, %u bytes) : nMessageSize > MAX_SIZE\n", strCommand.c_str(), nMessageSize);
continue;
}
if (nMessageSize > vRecv.size())
{
// Rewind and wait for rest of message
vRecv.insert(vRecv.begin(), vHeaderSave.begin(), vHeaderSave.end());
break;
}
// Checksum
uint256 hash = Hash(vRecv.begin(), vRecv.begin() + nMessageSize);
unsigned int nChecksum = 0;
memcpy(&nChecksum, &hash, sizeof(nChecksum));
if (nChecksum != hdr.nChecksum)
{
printf("ProcessMessages(%s, %u bytes) : CHECKSUM ERROR nChecksum=%08x hdr.nChecksum=%08x\n",
strCommand.c_str(), nMessageSize, nChecksum, hdr.nChecksum);
continue;
}
// Copy message to its own buffer
CDataStream vMsg(vRecv.begin(), vRecv.begin() + nMessageSize, vRecv.nType, vRecv.nVersion);
vRecv.ignore(nMessageSize);
// Process message
bool fRet = false;
try
{
{
LOCK(cs_main);
fRet = ProcessMessage(pfrom, strCommand, vMsg);
}
if (fShutdown)
return true;
}
catch (std::ios_base::failure& e)
{
if (strstr(e.what(), "end of data"))
{
// Allow exceptions from underlength message on vRecv
printf("ProcessMessages(%s, %u bytes) : Exception '%s' caught, normally caused by a message being shorter than its stated length\n", strCommand.c_str(), nMessageSize, e.what());
}
else if (strstr(e.what(), "size too large"))
{
// Allow exceptions from overlong size
printf("ProcessMessages(%s, %u bytes) : Exception '%s' caught\n", strCommand.c_str(), nMessageSize, e.what());
}
else
{
PrintExceptionContinue(&e, "ProcessMessages()");
}
}
catch (std::exception& e) {
PrintExceptionContinue(&e, "ProcessMessages()");
} catch (...) {
PrintExceptionContinue(NULL, "ProcessMessages()");
}
if (!fRet)
printf("ProcessMessage(%s, %u bytes) FAILED\n", strCommand.c_str(), nMessageSize);
}
vRecv.Compact();
return true;
}
bool SendMessages(CNode* pto, bool fSendTrickle)
{
TRY_LOCK(cs_main, lockMain);
if (lockMain) {
// Don't send anything until we get their version message
if (pto->nVersion == 0)
return true;
// Keep-alive ping. We send a nonce of zero because we don't use it anywhere
// right now.
if (pto->nLastSend && GetTime() - pto->nLastSend > 30 * 60 && pto->vSend.empty()) {
uint64 nonce = 0;
if (pto->nVersion > BIP0031_VERSION)
pto->PushMessage("ping", nonce);
else
pto->PushMessage("ping");
}
// Resend wallet transactions that haven't gotten in a block yet
ResendWalletTransactions();
// Address refresh broadcast
static int64 nLastRebroadcast;
if (!IsInitialBlockDownload() && (GetTime() - nLastRebroadcast > 24 * 60 * 60))
{
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
// Periodically clear setAddrKnown to allow refresh broadcasts
if (nLastRebroadcast)
pnode->setAddrKnown.clear();
// Rebroadcast our address
if (!fNoListen)
{
CAddress addr = GetLocalAddress(&pnode->addr);
if (addr.IsRoutable())
pnode->PushAddress(addr);
}
}
}
nLastRebroadcast = GetTime();
}
//
// Message: addr
//
if (fSendTrickle)
{
vector<CAddress> vAddr;
vAddr.reserve(pto->vAddrToSend.size());
BOOST_FOREACH(const CAddress& addr, pto->vAddrToSend)
{
// returns true if wasn't already contained in the set
if (pto->setAddrKnown.insert(addr).second)
{
vAddr.push_back(addr);
// receiver rejects addr messages larger than 1000
if (vAddr.size() >= 1000)
{
pto->PushMessage("addr", vAddr);
vAddr.clear();
}
}
}
pto->vAddrToSend.clear();
if (!vAddr.empty())
pto->PushMessage("addr", vAddr);
}
//
// Message: inventory
//
vector<CInv> vInv;
vector<CInv> vInvWait;
{
LOCK(pto->cs_inventory);
vInv.reserve(pto->vInventoryToSend.size());
vInvWait.reserve(pto->vInventoryToSend.size());
BOOST_FOREACH(const CInv& inv, pto->vInventoryToSend)
{
if (pto->setInventoryKnown.count(inv))
continue;
// trickle out tx inv to protect privacy
if (inv.type == MSG_TX && !fSendTrickle)
{
// 1/4 of tx invs blast to all immediately
static uint256 hashSalt;
if (hashSalt == 0)
hashSalt = GetRandHash();
uint256 hashRand = inv.hash ^ hashSalt;
hashRand = Hash(BEGIN(hashRand), END(hashRand));
bool fTrickleWait = ((hashRand & 3) != 0);
// always trickle our own transactions
if (!fTrickleWait)
{
CWalletTx wtx;
if (GetTransaction(inv.hash, wtx))
if (wtx.fFromMe)
fTrickleWait = true;
}
if (fTrickleWait)
{
vInvWait.push_back(inv);
continue;
}
}
// returns true if wasn't already contained in the set
if (pto->setInventoryKnown.insert(inv).second)
{
vInv.push_back(inv);
if (vInv.size() >= 1000)
{
pto->PushMessage("inv", vInv);
vInv.clear();
}
}
}
pto->vInventoryToSend = vInvWait;
}
if (!vInv.empty())
pto->PushMessage("inv", vInv);
//
// Message: getdata
//
vector<CInv> vGetData;
int64 nNow = GetTime() * 1000000;
CTxDB txdb("r");
while (!pto->mapAskFor.empty() && (*pto->mapAskFor.begin()).first <= nNow)
{
const CInv& inv = (*pto->mapAskFor.begin()).second;
if (!AlreadyHave(txdb, inv))
{
if (fDebugNet)
printf("sending getdata: %s\n", inv.ToString().c_str());
vGetData.push_back(inv);
if (vGetData.size() >= 1000)
{
pto->PushMessage("getdata", vGetData);
vGetData.clear();
}
mapAlreadyAskedFor[inv] = nNow;
}
pto->mapAskFor.erase(pto->mapAskFor.begin());
}
if (!vGetData.empty())
pto->PushMessage("getdata", vGetData);
}
return true;
}
//////////////////////////////////////////////////////////////////////////////
//
// BitcoinMiner
//
int static FormatHashBlocks(void* pbuffer, unsigned int len)
{
unsigned char* pdata = (unsigned char*)pbuffer;
unsigned int blocks = 1 + ((len + 8) / 64);
unsigned char* pend = pdata + 64 * blocks;
memset(pdata + len, 0, 64 * blocks - len);
pdata[len] = 0x80;
unsigned int bits = len * 8;
pend[-1] = (bits >> 0) & 0xff;
pend[-2] = (bits >> 8) & 0xff;
pend[-3] = (bits >> 16) & 0xff;
pend[-4] = (bits >> 24) & 0xff;
return blocks;
}
static const unsigned int pSHA256InitState[8] =
{0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19};
void SHA256Transform(void* pstate, void* pinput, const void* pinit)
{
SHA256_CTX ctx;
unsigned char data[64];
SHA256_Init(&ctx);
for (int i = 0; i < 16; i++)
((uint32_t*)data)[i] = ByteReverse(((uint32_t*)pinput)[i]);
for (int i = 0; i < 8; i++)
ctx.h[i] = ((uint32_t*)pinit)[i];
SHA256_Update(&ctx, data, sizeof(data));
for (int i = 0; i < 8; i++)
((uint32_t*)pstate)[i] = ctx.h[i];
}
//
// ScanHash scans nonces looking for a hash with at least some zero bits.
// It operates on big endian data. Caller does the byte reversing.
// All input buffers are 16-byte aligned. nNonce is usually preserved
// between calls, but periodically or if nNonce is 0xffff0000 or above,
// the block is rebuilt and nNonce starts over at zero.
//
unsigned int static ScanHash_CryptoPP(char* pmidstate, char* pdata, char* phash1, char* phash, unsigned int& nHashesDone)
{
unsigned int& nNonce = *(unsigned int*)(pdata + 12);
for (;;)
{
// Crypto++ SHA-256
// Hash pdata using pmidstate as the starting state into
// preformatted buffer phash1, then hash phash1 into phash
nNonce++;
SHA256Transform(phash1, pdata, pmidstate);
SHA256Transform(phash, phash1, pSHA256InitState);
// Return the nonce if the hash has at least some zero bits,
// caller will check if it has enough to reach the target
if (((unsigned short*)phash)[14] == 0)
return nNonce;
// If nothing found after trying for a while, return -1
if ((nNonce & 0xffff) == 0)
{
nHashesDone = 0xffff+1;
return (unsigned int) -1;
}
}
}
// Some explaining would be appreciated
class COrphan
{
public:
CTransaction* ptx;
set<uint256> setDependsOn;
double dPriority;
COrphan(CTransaction* ptxIn)
{
ptx = ptxIn;
dPriority = 0;
}
void print() const
{
printf("COrphan(hash=%s, dPriority=%.1f)\n", ptx->GetHash().ToString().substr(0,10).c_str(), dPriority);
BOOST_FOREACH(uint256 hash, setDependsOn)
printf(" setDependsOn %s\n", hash.ToString().substr(0,10).c_str());
}
};
uint64 nLastBlockTx = 0;
uint64 nLastBlockSize = 0;
CBlock* CreateNewBlock(CReserveKey& reservekey)
{
CBlockIndex* pindexPrev = pindexBest;
// Create new block
auto_ptr<CBlock> pblock(new CBlock());
if (!pblock.get())
return NULL;
// Create coinbase tx
CTransaction txNew;
txNew.vin.resize(1);
txNew.vin[0].prevout.SetNull();
txNew.vout.resize(1);
txNew.vout[0].scriptPubKey << reservekey.GetReservedKey() << OP_CHECKSIG;
// Add our coinbase tx as first transaction
pblock->vtx.push_back(txNew);
// Collect memory pool transactions into the block
int64 nFees = 0;
{
LOCK2(cs_main, mempool.cs);
CTxDB txdb("r");
// Priority order to process transactions
list<COrphan> vOrphan; // list memory doesn't move
map<uint256, vector<COrphan*> > mapDependers;
multimap<double, CTransaction*> mapPriority;
for (map<uint256, CTransaction>::iterator mi = mempool.mapTx.begin(); mi != mempool.mapTx.end(); ++mi)
{
CTransaction& tx = (*mi).second;
if (tx.IsCoinBase() || !tx.IsFinal())
continue;
COrphan* porphan = NULL;
double dPriority = 0;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
// Read prev transaction
CTransaction txPrev;
CTxIndex txindex;
if (!txPrev.ReadFromDisk(txdb, txin.prevout, txindex))
{
// Has to wait for dependencies
if (!porphan)
{
// Use list for automatic deletion
vOrphan.push_back(COrphan(&tx));
porphan = &vOrphan.back();
}
mapDependers[txin.prevout.hash].push_back(porphan);
porphan->setDependsOn.insert(txin.prevout.hash);
continue;
}
int64 nValueIn = txPrev.vout[txin.prevout.n].nValue;
// Read block header
int nConf = txindex.GetDepthInMainChain();
dPriority += (double)nValueIn * nConf;
if (fDebug && GetBoolArg("-printpriority"))
printf("priority nValueIn=%-12"PRI64d" nConf=%-5d dPriority=%-20.1f\n", nValueIn, nConf, dPriority);
}
// Priority is sum(valuein * age) / txsize
dPriority /= ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (porphan)
porphan->dPriority = dPriority;
else
mapPriority.insert(make_pair(-dPriority, &(*mi).second));
if (fDebug && GetBoolArg("-printpriority"))
{
printf("priority %-20.1f %s\n%s", dPriority, tx.GetHash().ToString().substr(0,10).c_str(), tx.ToString().c_str());
if (porphan)
porphan->print();
printf("\n");
}
}
// Collect transactions into block
map<uint256, CTxIndex> mapTestPool;
uint64 nBlockSize = 1000;
uint64 nBlockTx = 0;
int nBlockSigOps = 100;
while (!mapPriority.empty())
{
// Take highest priority transaction off priority queue
double dPriority = -(*mapPriority.begin()).first;
CTransaction& tx = *(*mapPriority.begin()).second;
mapPriority.erase(mapPriority.begin());
// Size limits
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (nBlockSize + nTxSize >= MAX_BLOCK_SIZE_GEN)
continue;
// Legacy limits on sigOps:
unsigned int nTxSigOps = tx.GetLegacySigOpCount();
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
// Transaction fee required depends on block size
// Litecoind: Reduce the exempted free transactions to 500 bytes (from Bitcoin's 3000 bytes)
bool fAllowFree = (nBlockSize + nTxSize < 1500 || CTransaction::AllowFree(dPriority));
int64 nMinFee = tx.GetMinFee(nBlockSize, fAllowFree, GMF_BLOCK);
// Connecting shouldn't fail due to dependency on other memory pool transactions
// because we're already processing them in order of dependency
map<uint256, CTxIndex> mapTestPoolTmp(mapTestPool);
MapPrevTx mapInputs;
bool fInvalid;
if (!tx.FetchInputs(txdb, mapTestPoolTmp, false, true, mapInputs, fInvalid))
continue;
int64 nTxFees = tx.GetValueIn(mapInputs)-tx.GetValueOut();
if (nTxFees < nMinFee)
continue;
nTxSigOps += tx.GetP2SHSigOpCount(mapInputs);
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
if (!tx.ConnectInputs(mapInputs, mapTestPoolTmp, CDiskTxPos(1,1,1), pindexPrev, false, true))
continue;
mapTestPoolTmp[tx.GetHash()] = CTxIndex(CDiskTxPos(1,1,1), tx.vout.size());
swap(mapTestPool, mapTestPoolTmp);
// Added
pblock->vtx.push_back(tx);
nBlockSize += nTxSize;
++nBlockTx;
nBlockSigOps += nTxSigOps;
nFees += nTxFees;
// Add transactions that depend on this one to the priority queue
uint256 hash = tx.GetHash();
if (mapDependers.count(hash))
{
BOOST_FOREACH(COrphan* porphan, mapDependers[hash])
{
if (!porphan->setDependsOn.empty())
{
porphan->setDependsOn.erase(hash);
if (porphan->setDependsOn.empty())
mapPriority.insert(make_pair(-porphan->dPriority, porphan->ptx));
}
}
}
}
nLastBlockTx = nBlockTx;
nLastBlockSize = nBlockSize;
printf("CreateNewBlock(): total size %lu\n", nBlockSize);
}
pblock->vtx[0].vout[0].nValue = GetBlockValue(pindexPrev->nHeight+1, nFees, pindexPrev->GetBlockHash());
// Fill in header
pblock->hashPrevBlock = pindexPrev->GetBlockHash();
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
pblock->UpdateTime(pindexPrev);
pblock->nBits = GetNextWorkRequired(pindexPrev, pblock.get());
pblock->nNonce = 0;
return pblock.release();
}
void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce)
{
// Update nExtraNonce
static uint256 hashPrevBlock;
if (hashPrevBlock != pblock->hashPrevBlock)
{
nExtraNonce = 0;
hashPrevBlock = pblock->hashPrevBlock;
}
++nExtraNonce;
pblock->vtx[0].vin[0].scriptSig = (CScript() << pblock->nTime << CBigNum(nExtraNonce)) + COINBASE_FLAGS;
assert(pblock->vtx[0].vin[0].scriptSig.size() <= 100);
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
}
void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1)
{
//
// Prebuild hash buffers
//
struct
{
struct unnamed2
{
int nVersion;
uint256 hashPrevBlock;
uint256 hashMerkleRoot;
unsigned int nTime;
unsigned int nBits;
unsigned int nNonce;
}
block;
unsigned char pchPadding0[64];
uint256 hash1;
unsigned char pchPadding1[64];
}
tmp;
memset(&tmp, 0, sizeof(tmp));
tmp.block.nVersion = pblock->nVersion;
tmp.block.hashPrevBlock = pblock->hashPrevBlock;
tmp.block.hashMerkleRoot = pblock->hashMerkleRoot;
tmp.block.nTime = pblock->nTime;
tmp.block.nBits = pblock->nBits;
tmp.block.nNonce = pblock->nNonce;
FormatHashBlocks(&tmp.block, sizeof(tmp.block));
FormatHashBlocks(&tmp.hash1, sizeof(tmp.hash1));
// Byte swap all the input buffer
for (unsigned int i = 0; i < sizeof(tmp)/4; i++)
((unsigned int*)&tmp)[i] = ByteReverse(((unsigned int*)&tmp)[i]);
// Precalc the first half of the first hash, which stays constant
SHA256Transform(pmidstate, &tmp.block, pSHA256InitState);
memcpy(pdata, &tmp.block, 128);
memcpy(phash1, &tmp.hash1, 64);
}
bool CheckWork(CBlock* pblock, CWallet& wallet, CReserveKey& reservekey)
{
uint256 hash = pblock->GetPoWHash();
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
if (hash > hashTarget)
return false;
//// debug print
printf("BitcoinMiner:\n");
printf("proof-of-work found \n hash: %s \ntarget: %s\n", hash.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("generated %s\n", FormatMoney(pblock->vtx[0].vout[0].nValue).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("BitcoinMiner : generated block is stale");
// Remove key from key pool
reservekey.KeepKey();
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[pblock->GetHash()] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("BitcoinMiner : ProcessBlock, block not accepted");
}
return true;
}
void static ThreadBitcoinMiner(void* parg);
static bool fGenerateBitcoins = false;
static bool fLimitProcessors = false;
static int nLimitProcessors = -1;
void static BitcoinMiner(CWallet *pwallet)
{
printf("BitcoinMiner started\n");
SetThreadPriority(THREAD_PRIORITY_LOWEST);
// Make this thread recognisable as the mining thread
RenameThread("bitcoin-miner");
// Each thread has its own key and counter
CReserveKey reservekey(pwallet);
unsigned int nExtraNonce = 0;
while (fGenerateBitcoins)
{
if (fShutdown)
return;
while (vNodes.empty() || IsInitialBlockDownload())
{
Sleep(1000);
if (fShutdown)
return;
if (!fGenerateBitcoins)
return;
}
//
// Create new block
//
unsigned int nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrev = pindexBest;
auto_ptr<CBlock> pblock(CreateNewBlock(reservekey));
if (!pblock.get())
return;
IncrementExtraNonce(pblock.get(), pindexPrev, nExtraNonce);
printf("Running BitcoinMiner with %d transactions in block\n", pblock->vtx.size());
//
// Prebuild hash buffers
//
char pmidstatebuf[32+16]; char* pmidstate = alignup<16>(pmidstatebuf);
char pdatabuf[128+16]; char* pdata = alignup<16>(pdatabuf);
char phash1buf[64+16]; char* phash1 = alignup<16>(phash1buf);
FormatHashBuffers(pblock.get(), pmidstate, pdata, phash1);
unsigned int& nBlockTime = *(unsigned int*)(pdata + 64 + 4);
unsigned int& nBlockBits = *(unsigned int*)(pdata + 64 + 8);
//unsigned int& nBlockNonce = *(unsigned int*)(pdata + 64 + 12);
//
// Search
//
int64 nStart = GetTime();
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
loop
{
unsigned int nHashesDone = 0;
//unsigned int nNonceFound;
uint256 thash;
char scratchpad[SCRYPT_SCRATCHPAD_SIZE];
loop
{
scrypt_1024_1_1_256_sp(BEGIN(pblock->nVersion), BEGIN(thash), scratchpad);
if (thash <= hashTarget)
{
// Found a solution
SetThreadPriority(THREAD_PRIORITY_NORMAL);
CheckWork(pblock.get(), *pwalletMain, reservekey);
SetThreadPriority(THREAD_PRIORITY_LOWEST);
break;
}
pblock->nNonce += 1;
nHashesDone += 1;
if ((pblock->nNonce & 0xFF) == 0)
break;
}
// Meter hashes/sec
static int64 nHashCounter;
if (nHPSTimerStart == 0)
{
nHPSTimerStart = GetTimeMillis();
nHashCounter = 0;
}
else
nHashCounter += nHashesDone;
if (GetTimeMillis() - nHPSTimerStart > 4000)
{
static CCriticalSection cs;
{
LOCK(cs);
if (GetTimeMillis() - nHPSTimerStart > 4000)
{
dHashesPerSec = 1000.0 * nHashCounter / (GetTimeMillis() - nHPSTimerStart);
nHPSTimerStart = GetTimeMillis();
nHashCounter = 0;
string strStatus = strprintf(" %.0f khash/s", dHashesPerSec/1000.0);
static int64 nLogTime;
if (GetTime() - nLogTime > 30 * 60)
{
nLogTime = GetTime();
printf("%s ", DateTimeStrFormat("%x %H:%M", GetTime()).c_str());
printf("hashmeter %3d CPUs %6.0f khash/s\n", vnThreadsRunning[THREAD_MINER], dHashesPerSec/1000.0);
}
}
}
}
// Check for stop or if block needs to be rebuilt
if (fShutdown)
return;
if (!fGenerateBitcoins)
return;
if (fLimitProcessors && vnThreadsRunning[THREAD_MINER] > nLimitProcessors)
return;
if (vNodes.empty())
break;
if (pblock->nNonce >= 0xffff0000)
break;
if (nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60)
break;
if (pindexPrev != pindexBest)
break;
// Update nTime every few seconds
pblock->UpdateTime(pindexPrev);
nBlockTime = ByteReverse(pblock->nTime);
if (fTestNet)
{
// Changing pblock->nTime can change work required on testnet:
nBlockBits = ByteReverse(pblock->nBits);
hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
}
}
}
}
void static ThreadBitcoinMiner(void* parg)
{
CWallet* pwallet = (CWallet*)parg;
try
{
vnThreadsRunning[THREAD_MINER]++;
BitcoinMiner(pwallet);
vnThreadsRunning[THREAD_MINER]--;
}
catch (std::exception& e) {
vnThreadsRunning[THREAD_MINER]--;
PrintException(&e, "ThreadBitcoinMiner()");
} catch (...) {
vnThreadsRunning[THREAD_MINER]--;
PrintException(NULL, "ThreadBitcoinMiner()");
}
nHPSTimerStart = 0;
if (vnThreadsRunning[THREAD_MINER] == 0)
dHashesPerSec = 0;
printf("ThreadBitcoinMiner exiting, %d threads remaining\n", vnThreadsRunning[THREAD_MINER]);
}
void GenerateBitcoins(bool fGenerate, CWallet* pwallet)
{
fGenerateBitcoins = fGenerate;
nLimitProcessors = GetArg("-genproclimit", -1);
if (nLimitProcessors == 0)
fGenerateBitcoins = false;
fLimitProcessors = (nLimitProcessors != -1);
if (fGenerate)
{
int nProcessors = boost::thread::hardware_concurrency();
printf("%d processors\n", nProcessors);
if (nProcessors < 1)
nProcessors = 1;
if (fLimitProcessors && nProcessors > nLimitProcessors)
nProcessors = nLimitProcessors;
int nAddThreads = nProcessors - vnThreadsRunning[THREAD_MINER];
printf("Starting %d BitcoinMiner threads\n", nAddThreads);
for (int i = 0; i < nAddThreads; i++)
{
if (!CreateThread(ThreadBitcoinMiner, pwallet))
printf("Error: CreateThread(ThreadBitcoinMiner) failed\n");
Sleep(10);
}
}
}
| {
"content_hash": "541167d6561c8bf44546712183d10062",
"timestamp": "",
"source": "github",
"line_count": 3909,
"max_line_length": 280,
"avg_line_length": 33.3983115886416,
"alnum_prop": 0.5728664001102992,
"repo_name": "piggiecoin/piggiecoin",
"id": "7731ec3b5e2c762241e00fb526e111cebfd390b7",
"size": "131160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "78622"
},
{
"name": "C++",
"bytes": "1381958"
},
{
"name": "IDL",
"bytes": "10997"
},
{
"name": "Objective-C",
"bytes": "2463"
},
{
"name": "Python",
"bytes": "47538"
},
{
"name": "Shell",
"bytes": "1402"
},
{
"name": "TypeScript",
"bytes": "3810608"
}
],
"symlink_target": ""
} |
const ActionWindow = require('../../../server/game/gamesteps/actionwindow.js');
const Game = require('../../../server/game/game.js');
const Player = require('../../../server/game/player.js');
const Settings = require('../../../server/settings.js');
describe('ActionWindow', function() {
beforeEach(function() {
this.gameService = jasmine.createSpyObj('gameService', ['save']);
this.game = new Game({ owner: {} }, { gameService: this.gameService });
this.player1 = new Player('1', Settings.getUserWithDefaultsSet({ username: 'Player 1' }), true, this.game);
this.player2 = new Player('2', Settings.getUserWithDefaultsSet({ username: 'Player 2' }), false, this.game);
this.player2.firstPlayer = true;
this.game.playersAndSpectators[this.player1.name] = this.player1;
this.game.playersAndSpectators[this.player2.name] = this.player2;
this.player1.promptedActionWindows['test'] = true;
this.prompt = new ActionWindow(this.game, 'Test Window', 'test');
});
it('should prompt in first player order', function() {
expect(this.prompt.currentPlayer).toBe(this.player2);
});
describe('onMenuCommand()', function() {
describe('when it is the current player', function() {
beforeEach(function() {
this.prompt.onMenuCommand(this.player2);
});
it('should make the next player be the current player', function() {
expect(this.prompt.currentPlayer).toBe(this.player1);
});
});
describe('when it is not the current player', function() {
beforeEach(function() {
this.prompt.onMenuCommand(this.player1);
});
it('should not change the current player', function() {
expect(this.prompt.currentPlayer).toBe(this.player2);
});
});
});
describe('markActionAsTaken()', function() {
describe('when a player takes an action', function() {
beforeEach(function() {
// Complete the window for player 2
this.prompt.onMenuCommand(this.player2);
// Player 1 takes an action
this.prompt.markActionAsTaken(this.player1);
});
it('should rotate the current player', function() {
expect(this.prompt.currentPlayer).toBe(this.player2);
});
it('should re-prompt other players once the current player is done', function() {
this.prompt.onMenuCommand(this.player2);
expect(this.prompt.currentPlayer).toBe(this.player1);
expect(this.prompt.isComplete()).toBe(false);
});
it('should require two consecutive passes before completing', function() {
// Complete without taking action
this.prompt.onMenuCommand(this.player2);
this.prompt.onMenuCommand(this.player1);
expect(this.prompt.isComplete()).toBe(true);
});
});
describe('when someone other than the current player takes an action', function() {
beforeEach(function() {
// Player 2 is first player, so player 1 takes their action out
// of turn.
this.prompt.markActionAsTaken(this.player1);
});
it('should rotate the current player', function() {
// Since player 1 took their action out of turn, player 2 should
// be prompted again for their action.
expect(this.prompt.currentPlayer).toBe(this.player2);
});
});
});
describe('continue()', function() {
describe('when not all players are done', function() {
beforeEach(function() {
this.prompt.onMenuCommand(this.player2);
});
it('should return false', function() {
expect(this.prompt.continue()).toBe(false);
});
});
describe('when all players are done', function() {
beforeEach(function() {
this.prompt.onMenuCommand(this.player2);
this.prompt.onMenuCommand(this.player1);
});
it('should return true', function() {
expect(this.prompt.continue()).toBe(true);
});
});
describe('when only the second player has the window enabled', function() {
beforeEach(function() {
this.player1.promptedActionWindows['test'] = true;
this.player2.promptedActionWindows['test'] = false;
});
it('should prompt the first player even though the window is off', function() {
this.prompt.continue();
expect(this.prompt.currentPlayer).toBe(this.player2);
});
it('should not complete the prompt', function() {
expect(this.prompt.continue()).toBe(false);
});
});
describe('when both players have the window disabled', function() {
beforeEach(function() {
this.player1.promptedActionWindows['test'] = false;
this.player2.promptedActionWindows['test'] = false;
});
it('should complete the prompt', function() {
expect(this.prompt.continue()).toBe(true);
});
});
});
});
| {
"content_hash": "9cecd7bb0def66b887e8aa926f469502",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 116,
"avg_line_length": 38.95035460992908,
"alnum_prop": 0.5593590677348871,
"repo_name": "cryogen/throneteki",
"id": "7ad5c0445f8f185221d6785d4a4f92eea327a5f5",
"size": "5492",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/server/gamesteps/actionwindow.spec.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1182"
},
{
"name": "JavaScript",
"bytes": "2370647"
}
],
"symlink_target": ""
} |
<!--doctype html-->
<html>
<head>
<title> TFI test</title>
<link rel="stylesheet" type="text/css" href="css/test.css">
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css">
</head>
<body>
<div class="container">
<h1> Protocolos de Transferencia de Informacion - TFI</h1>
<div class='row'>
<div class='col-md-4'>
<div class=".form-control">
<strong>Server:</strong><input type='text' id='wsUri' placeholder='http://server:port'>
<br><br>
<button type='button' class='btn btn-success' id='connect' >Connect</button>
<button type='button' class='btn btn-danger' id='disconnect' >Disconnect</button><br>
<h4>
<span id='latency'>Latency Check Test </span> <br />
Latency msg: <span id="latency-msg"></span>ms <br />
Latency cursor: <span id="latency-cur"></span>ms
</h4>
</div>
</div>
<div class="col-md-8">
<div id='cont' style="border:4px solid black;width:600px;height:200px;overflow:scroll;" >
<table class='table' id='box'>
</table>
</div><br>
<div id='mydiv'></div>
</div>
</div>
<div class='row'>
<div class='col-md-6'>
<h3>WEBSOCKET TEST</h3>
<div id='chat'>
<strong>Message Test:</strong>
<input type="text" id="msg" placeholder='message'>
<button type='button' class='btn btn-success' id="sendMsgWs" >Send!</button>
</div>
<br>
<br><br>
<strong>Cursor Follow Test :</strong><br>
<input type='button' id='enableCursorWs'> Cursor follow (WS) </input> <input type='button' id='disableCursorWs'> Disable cursor</input>
</div>
<div class='col-md-6'>
<h3>AJAX-HTTP TEST</h3>
<strong>Message Test :</strong>
<input type="text" id="msgAjax" placeholder='message'>
<button type='button' class='btn btn-success' id="sendMsgAjax">Send!</button>
<br><br>
<strong>Cursor Follow Test :</strong><br>
<input type='button' id='enableCursorAjax'> Cursor follow (Ajax)</input>
<input type='button' id='disableCursorAjax'> Disable cursor</input>
<br><br>
<strong>GET loop (for msgs and positions):</strong>
<input type='text' id='getMsg' placeholder='get every ... (ms)'>
</div>
</div>
<script src="/socket.io/socket.io.js"></script>
<script type='text/javascript' src='wsClientHandler.js'></script>
<script type="text/javascript" src='httpClientHandler.js'></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
<script src="//netdna.bootstrapcdn.com/bootstrap/3.1.1/js/bootstrap.min.js"></script>
</body>
</html> | {
"content_hash": "90926db9cb6a26295b1c949e5038ef52",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 148,
"avg_line_length": 38.14666666666667,
"alnum_prop": 0.5784690667598742,
"repo_name": "lgargantini/tfi",
"id": "36bc6ef086ad63e2ee40d6f8ea4cf3da34dca504",
"size": "2861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "views/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7093"
},
{
"name": "HTML",
"bytes": "38118"
},
{
"name": "JavaScript",
"bytes": "19734"
},
{
"name": "Shell",
"bytes": "2548"
}
],
"symlink_target": ""
} |
package org.usfirst.frc.team339.HardwareInterfaces.transmission;
import edu.wpi.first.wpilibj.SpeedController;
public class TransmissionFourWheel extends Transmission
{
private final SpeedController rightRearSpeedController;
private MotorDirection rightRearMotorDirection =
MotorDirection.REVERSED;
private final SpeedController leftRearSpeedController;
private MotorDirection leftRearMotorDirection = MotorDirection.FORWARD;
/**
* Transmission object to control a four-wheel drive.
*
* @param rightFrontSpeedController
* @param rightRearSpeedController
* @param leftFrontSpeedController
* @param leftRearSpeedController
*
* @author Noah Golmant
* @written 23 July 2015
*/
public TransmissionFourWheel (SpeedController rightFrontSpeedController,
SpeedController rightRearSpeedController,
SpeedController leftFrontSpeedController,
SpeedController leftRearSpeedController)
{
super(rightFrontSpeedController, leftFrontSpeedController);
this.rightRearSpeedController = rightRearSpeedController;
this.leftRearSpeedController = leftRearSpeedController;
}
/**
* Drives the transmission in a four wheel drive .
* rightJoystickVal controls both right motors, and vice versa for the left.
* It scales it according to our deadband and the current gear, then
* makes sure we're not out of our allowed motor value ranges.
*
* @param rightJoystickVal
* joystick input for the right motor(s)
* @param leftJoystickVal
* joystick input for the left motor(s)
*
* @author Noah Golmant
* @written 9 July 2015
*/
@Override
public void drive (double rightJoystickVal, double leftJoystickVal)
{
// Get the scaled versions of our joystick values
double scaledRightVal = this.scaleJoystickValue(rightJoystickVal);
double scaledLeftVal = this.scaleJoystickValue(leftJoystickVal);
// Make sure they fit within our allowed motor ranges (just in case)
// make them a max/min of +1.0/-1.0 to send to the motor
scaledRightVal = this.limit(scaledRightVal);
scaledLeftVal = this.limit(scaledLeftVal);
// check if either joystick is reversed
if (this.isLeftJoystickReversed() == true)
{
scaledRightVal *= -1.0;
}
if (this.isRightJoystickReversed() == true)
{
scaledLeftVal *= -1.0;
}
if ((this.getDebugState() == DebugState.DEBUG_MOTOR_DATA) ||
(this.getDebugState() == DebugState.DEBUG_ALL))
{
System.out
.println("drive():\tRF: " + scaledRightVal + "\tLF: " +
scaledLeftVal);
}
// send the scaled values to the motors
this.driveRightMotor(scaledRightVal);
this.driveRightRearMotor(scaledRightVal);
this.driveLeftMotor(scaledLeftVal);
this.driveLeftRearMotor(scaledLeftVal);
}
/**
* Sets the left motor to the given value based on
* its given direction.
*
* @param motorValue
* The motor value we want to send
*
* @author Noah Golmant
* @date 9 July 2015
*/
//"Anything can be solved with a big enough hammer, if not elegantly." -Michael
protected void driveLeftRearMotor (double motorValue)
{
if (this.leftRearSpeedController == null)
{
if (this.getDebugState() == DebugState.DEBUG_MOTOR_DATA)
{
System.out
.println(
"Left rear motor is null in driveLeftRearMotor()");
}
return;
}
motorValue = this.limit(motorValue);
this.leftRearSpeedController.set(motorValue *
this.leftRearMotorDirection.val);
}
/**
* Sets the right motor to the given value based on
* its given direction.
*
* @param motorValue
* The motor value we want to send
*
* @author Noah Golmant
* @date 9 July 2015
*/
protected void driveRightRearMotor (double motorValue)
{
if (this.rightRearSpeedController == null)
{
if (this.getDebugState() == DebugState.DEBUG_MOTOR_DATA)
{
System.out
.println(
"Right rear motor is null in driveRightRearMotor()");
}
return;
}
motorValue = this.limit(motorValue);
this.rightRearSpeedController.set(motorValue *
this.rightRearMotorDirection.val);
}
/**
* Gets whether or not the left motor is reversed
*
* @return the direction of the left (front) motor
*/
public MotorDirection getLeftRearMotorDirection ()
{
return this.leftRearMotorDirection;
}
/**
* Gets whether or not the right motor is reversed
*
* @return the direction of the right (front) motor
*/
public MotorDirection getRightRearMotorDirection ()
{
return this.rightRearMotorDirection;
}
/**
* Sets whether or not the left (front) motor is reversed
*
* @param direction
* new direction of the left (front) motor
*/
public void setLeftRearMotorDirection (MotorDirection direction)
{
this.leftRearMotorDirection = direction;
}
/**
* Sets whether or not the right (front) motor is reversed
*
* @param direction
* new direction of the right (front) motor
*/
public void setRightRearMotorDirection (MotorDirection direction)
{
this.rightRearMotorDirection = direction;
}
}
| {
"content_hash": "42ce0a0a3d7ecc14c2542f0086b19004",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 81,
"avg_line_length": 28.087179487179487,
"alnum_prop": 0.6574767208325726,
"repo_name": "FIRST-Team-339/2016",
"id": "8ca50adf31abb341ce9e40f2953c7b6f567a37af",
"size": "5477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/org/usfirst/frc/team339/HardwareInterfaces/transmission/TransmissionFourWheel.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "759043"
}
],
"symlink_target": ""
} |
import { resetStyle } from '../../style';
import { repaint } from './repaint';
export const TRANSITION_END_EVENTS: { [key: string]: string } = {
'WebkitTransition': 'webkitTransitionEnd',
'MozTransition': 'transitionend',
'OTransition': 'oTransitionEnd',
'msTransition': 'MSTransitionEnd',
'transition': 'transitionend'
};
/**
* Ignore the transition lifecycle to perform a callback, then restore the element's original transitions.
*/
export function ignoreTransitions(element: HTMLElement, transitionProperty: string, callback: () => void): void {
let style: any = element.style;
style[transitionProperty] = 'initial';
callback();
repaint(element);
resetStyle(style, transitionProperty);
}
| {
"content_hash": "d934090d441c23c566a8056c1794355d",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 113,
"avg_line_length": 33.5,
"alnum_prop": 0.6974219810040706,
"repo_name": "MikeBull94/zoom.ts",
"id": "20cc3dbbeee5621b71a6da46c7941011130ecb5a",
"size": "737",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/dom/element/ignoreTransitions.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2210"
},
{
"name": "HTML",
"bytes": "6651"
},
{
"name": "JavaScript",
"bytes": "2347"
},
{
"name": "TypeScript",
"bytes": "21507"
}
],
"symlink_target": ""
} |
package com.chartsack.charts;
import android.content.ContentProvider;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.text.TextUtils;
/**
*
* @author zkhan
*
*/
public class AddressProvider extends ContentProvider {
// fields for my content provider
static final String PROVIDER_NAME = "com.chartsack.charts.addrinfo";
static final String URL = "content://" + PROVIDER_NAME + "/address";
static final Uri CONTENT_URI = Uri.parse(URL);
// fields for the database
static final String ID = "id";
static final String ADDRESS = "address";
static final String LONGITUDE = "longitude";
static final String LATITUDE = "latitude";
DBHelper dbHelper;
// database declarations
private SQLiteDatabase mSqliteDatabase;
static final String DATABASE_NAME = "dbAddress";
static final String TABLE_NAME = "address";
static final int DATABASE_VERSION = 1;
static final String CREATE_TABLE =
" CREATE TABLE " + TABLE_NAME +
" (" + ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
" " + ADDRESS + " TEXT NOT NULL, " +
" " + LATITUDE + " DOUBLE NOT NULL, " +
" " + LONGITUDE + " DOUBLE NOT NULL);";
// integer values used in content URI
static final int ADDRESS_ALL = 1;
static final int ADDRESS_SINGLE = 2;
static final UriMatcher uriMatcher;
static {
uriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
uriMatcher.addURI(PROVIDER_NAME, "address", ADDRESS_ALL);
uriMatcher.addURI(PROVIDER_NAME, "address/#", ADDRESS_SINGLE);
}
// class that creates and manages the provider's database
private static class DBHelper extends SQLiteOpenHelper {
public DBHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_TABLE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + TABLE_NAME);
onCreate(db);
}
}
@Override
public boolean onCreate() {
Context context = getContext();
dbHelper = new DBHelper(context);
// permissions to be writable
mSqliteDatabase = dbHelper.getWritableDatabase();
if(mSqliteDatabase == null) {
return false;
}
return true;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder();
// the TABLE_NAME to query on
queryBuilder.setTables(TABLE_NAME);
switch(uriMatcher.match(uri)) {
// maps all database column names
case ADDRESS_ALL:
break;
case ADDRESS_SINGLE:
queryBuilder.appendWhere(ID + "=" + uri.getLastPathSegment());
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
if (sortOrder == null || sortOrder == "") {
// No sorting-> sort on names by default
sortOrder = ADDRESS;
}
Cursor cursor = queryBuilder.query(mSqliteDatabase, projection, selection,
selectionArgs, null, null, sortOrder);
/**
* register to watch a content URI for changes
*/
cursor.setNotificationUri(getContext().getContentResolver(), uri);
return cursor;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
long row = mSqliteDatabase.insert(TABLE_NAME, "", values);
// If record is added successfully
if(row > 0) {
Uri newUri = ContentUris.withAppendedId(CONTENT_URI, row);
getContext().getContentResolver().notifyChange(newUri, null);
return newUri;
}
throw new SQLException("Fail to add a new record into " + uri);
}
@Override
public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
int count = 0;
switch (uriMatcher.match(uri)){
case ADDRESS_ALL:
count = mSqliteDatabase.update(TABLE_NAME, values, selection, selectionArgs);
break;
case ADDRESS_SINGLE:
count = mSqliteDatabase.update(TABLE_NAME, values, ID +
" = " + uri.getLastPathSegment() +
(!TextUtils.isEmpty(selection) ? " AND (" +
selection + ')' : ""), selectionArgs);
break;
default:
throw new IllegalArgumentException("Unsupported URI " + uri );
}
getContext().getContentResolver().notifyChange(uri, null);
return count;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
int count = 0;
switch(uriMatcher.match(uri)) {
case ADDRESS_ALL:
count = mSqliteDatabase.delete(TABLE_NAME, selection, selectionArgs);
break;
case ADDRESS_SINGLE:
String id = uri.getLastPathSegment(); //gets the id
count = mSqliteDatabase.delete(TABLE_NAME, ID + " = " + id +
(!TextUtils.isEmpty(selection) ? " AND (" + selection + ')' : ""), selectionArgs);
break;
default:
throw new IllegalArgumentException("Unsupported URI " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return count;
}
@Override
public String getType(Uri uri) {
switch (uriMatcher.match(uri)){
// Get all records
case ADDRESS_ALL:
return ContentResolver.CURSOR_DIR_BASE_TYPE + "/vnd.com.chartsack.charts.addrinfo.address";
// Get a particular records
case ADDRESS_SINGLE:
return ContentResolver.CURSOR_ITEM_BASE_TYPE + "/vnd.com.chartsack.charts.addrinfo.address";
default:
throw new IllegalArgumentException("Unsupported URI: " + uri);
}
}
}
| {
"content_hash": "a31ff154b8727ac680d976dd0130009c",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 105,
"avg_line_length": 35.33160621761658,
"alnum_prop": 0.5931954832086817,
"repo_name": "apps4av/MyCharts",
"id": "8254264ff0f7fc344b0133af0e76582f7e9cff13",
"size": "8161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/chartsack/charts/AddressProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "223974"
}
],
"symlink_target": ""
} |
using Microsoft.AspNetCore.Mvc;
using Spark.Web.Services;
namespace Spark.Web.Controllers
{
public class ResourcesController : Controller
{
public IActionResult Index()
{
return View();
}
}
}
| {
"content_hash": "d14d7e885672caa39449a2938c7e67b4",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 49,
"avg_line_length": 16.2,
"alnum_prop": 0.6172839506172839,
"repo_name": "furore-fhir/spark",
"id": "d9c5ba00dc5accde9d60c5fed9637dd5f85a82bb",
"size": "243",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Spark.Web/Controllers/ResourcesController.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "99"
},
{
"name": "Batchfile",
"bytes": "49"
},
{
"name": "C#",
"bytes": "912293"
},
{
"name": "CSS",
"bytes": "5040"
},
{
"name": "HTML",
"bytes": "5069"
},
{
"name": "JavaScript",
"bytes": "136201"
},
{
"name": "XSLT",
"bytes": "10021"
}
],
"symlink_target": ""
} |
$packageName = 'kvrt'
$url = 'http://devbuilds.kaspersky-labs.com/devbuilds/KVRT/latest/full/KVRT.exe'
$checksum = '4b55b20802088c7fd952023646845ed60c6fa68f'
$checksumType = 'sha1'
$toolsPath = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)"
$installFile = Join-Path $toolsPath "kvrt.exe"
try {
Get-ChocolateyWebFile -PackageName "$packageName" `
-FileFullPath "$installFile" `
-Url "$url" `
-Checksum "$checksum" `
-ChecksumType "$checksumType"
# create empty sidecars so shimgen only creates one shim
Set-Content -Path ("$installFile.ignore") `
-Value $null
# create batch to start executable
$batchStart = Join-Path $toolsPath "kvrt.bat"
'start kvrt.exe -accepteula' | Out-File -FilePath $batchStart -Encoding ASCII
Install-BinFile "kvrt" "$batchStart"
} catch {
throw $_.Exception
} | {
"content_hash": "60bb82da52ae3f80bbb35dd55a001ba5",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 80,
"avg_line_length": 38.541666666666664,
"alnum_prop": 0.6486486486486487,
"repo_name": "dtgm/chocolatey-packages",
"id": "3af6e82833c144da1507dd7c6c75806511364bbd",
"size": "925",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "automatic/_output/kvrt/2015.12.22.0005/tools/chocolateyInstall.ps1",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AutoHotkey",
"bytes": "347616"
},
{
"name": "AutoIt",
"bytes": "13530"
},
{
"name": "Batchfile",
"bytes": "1404"
},
{
"name": "C#",
"bytes": "8134"
},
{
"name": "HTML",
"bytes": "80818"
},
{
"name": "PowerShell",
"bytes": "13124493"
}
],
"symlink_target": ""
} |
import React, {Component} from 'react'
import {Link, withRouter} from 'react-router-dom'
import MenuItem from 'material-ui/Menu/MenuItem';
import Divider from 'material-ui/Divider';
class NavBar extends Component {
constructor(props) {
super(props);
this.state = {
expandedSection: ""
}
this.toggleSection = this.toggleSection.bind(this);
}
render() {
return (
//TODO Replace this with an appropriate Expandanble Nested List
<div>
<MenuItem style={{marginLeft: '-8px', marginTop: '10px'}} onClick={this.toggleSection}>
<b>TASKS</b>
</MenuItem>
{((this.state.expandedSection).includes("TASKS")) ?
(<div>
<MenuItem>
<Link name="application_creation" to="/tasks/application_creation">APPLICATION CREATION</Link>
</MenuItem>
<MenuItem>
<Link name="subscription_creation" to="/tasks/subscription_creation">SUBSCRIPTION CREATION</Link>
</MenuItem>
<MenuItem>
<Link name="application_registration" to="/tasks/application_update">APPLICATION UPDATE</Link>
</MenuItem>
<MenuItem>
<Link name="api_state_change" to="/tasks/api_state">API STATE CHANGE</Link>
</MenuItem>
</div>) : <div/>
}
<Divider inset={true} style={{margin: '5px'}}/>
<MenuItem style={{marginLeft: '-8px', marginTop: '10px'}} onClick={this.toggleSection}><b>THROTTLING_POLICIES</b></MenuItem>
{((this.state.expandedSection).includes("THROTTLING_POLICIES")) ?
(<div>
<MenuItem>
<Link name="advanced_throttling" to="/advanced_throttling">ADVANCED THROTTLING</Link>
</MenuItem>
<MenuItem>
<Link name="application_tiers" to="/application_tiers">APPLICATION TIERS</Link>
</MenuItem>
<MenuItem>
<Link name="subscription_tiers" to="/subscription_tiers">SUBSCRIPTION TIERS</Link>
</MenuItem>
<MenuItem>
<Link name="custom_rules" to="/custom_rules">CUSTOM RULES</Link>
</MenuItem>
<MenuItem>
<Link name="black_list" to="/black_list">BLACK LIST</Link>
</MenuItem>
</div>) : <div/>
}
<Divider inset={true} style={{margin: '5px'}}/>
<MenuItem style={{marginLeft: '-8px', marginTop: '10px'}} onClick={this.toggleSection}>
<b>SETTINGS</b>
</MenuItem>
<Divider inset={true} style={{margin: '5px'}}/>
<MenuItem style={{marginLeft: '-8px', marginTop: '10px'}} onClick={this.toggleSection}>
<b>LOG ANALYZER</b>
</MenuItem>
<Divider inset={true} style={{margin: '5px'}}/>
</div>
)
}
toggleSection(event) {
console.log("inner text:" + event.target.innerText);
this.setState({
expandedSection: event.target.innerText
});
}
}
export default withRouter(NavBar)
| {
"content_hash": "e5c20dc3a62e530c1e82af94095e6b58",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 140,
"avg_line_length": 42.16279069767442,
"alnum_prop": 0.47876447876447875,
"repo_name": "lakmali/carbon-apimgt",
"id": "0f69f349e5626523a1806f2bb348df8c0e014d8b",
"size": "4295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "features/apimgt/org.wso2.carbon.apimgt.admin.feature/src/main/resources/admin/source/src/app/components/NavBar.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1100901"
},
{
"name": "Groovy",
"bytes": "3173"
},
{
"name": "HTML",
"bytes": "274853"
},
{
"name": "Java",
"bytes": "5174356"
},
{
"name": "JavaScript",
"bytes": "4720824"
},
{
"name": "PLSQL",
"bytes": "19363"
}
],
"symlink_target": ""
} |
<?php
namespace Google\AdsApi\AdWords\v201702\cm;
/**
* This file was generated from WSDL. DO NOT EDIT.
*/
class TextLabel extends \Google\AdsApi\AdWords\v201702\cm\Label
{
/**
* @param int $id
* @param string $name
* @param string $status
* @param \Google\AdsApi\AdWords\v201702\cm\LabelAttribute $attribute
* @param string $LabelType
*/
public function __construct($id = null, $name = null, $status = null, $attribute = null, $LabelType = null)
{
parent::__construct($id, $name, $status, $attribute, $LabelType);
}
}
| {
"content_hash": "8a4590ec195548dc88509405aa6832ea",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 111,
"avg_line_length": 24.083333333333332,
"alnum_prop": 0.6349480968858131,
"repo_name": "jeraldfeller/jbenterprises",
"id": "46b3fd980b6773459ae6a82a815587532e14a592",
"size": "578",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "google-adwords/vendor/googleads/googleads-php-lib/src/Google/AdsApi/AdWords/v201702/cm/TextLabel.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "137"
},
{
"name": "CSS",
"bytes": "4465774"
},
{
"name": "CoffeeScript",
"bytes": "83631"
},
{
"name": "HTML",
"bytes": "2549782"
},
{
"name": "JavaScript",
"bytes": "17552996"
},
{
"name": "PHP",
"bytes": "3092947"
},
{
"name": "Shell",
"bytes": "444"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/switchfly.iml" filepath="$PROJECT_DIR$/switchfly.iml" />
<module fileurl="file://$PROJECT_DIR$/compress/switchfly-compress.iml" filepath="$PROJECT_DIR$/compress/switchfly-compress.iml" />
<module fileurl="file://$PROJECT_DIR$/inputvalidation/switchfly-inputvalidation.iml" filepath="$PROJECT_DIR$/inputvalidation/switchfly-inputvalidation.iml" />
</modules>
</component>
</project>
| {
"content_hash": "f3dfcedfcc67ce47bd79b1582d8f5792",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 164,
"avg_line_length": 51.09090909090909,
"alnum_prop": 0.7135231316725978,
"repo_name": "switchfly/switchfly-java",
"id": "220a85d2dc30ffb67ad192c71cd5efc350800f64",
"size": "562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".idea/modules.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "179045"
},
{
"name": "JavaScript",
"bytes": "68280"
}
],
"symlink_target": ""
} |
/*
* This is a manifest file that'll automatically include all the stylesheets available in this directory
* and any sub-directories. You're free to add application-wide styles to this file and they'll appear at
* the top of the compiled file, but it's generally better to create a new file per style scope.
*= require_self
*= require_tree .
*= require chosen
*/
| {
"content_hash": "eba4c778e232baae005411cc9830f260",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 105,
"avg_line_length": 46.125,
"alnum_prop": 0.7479674796747967,
"repo_name": "lucianot/dealbook",
"id": "bb62629f138706474efc93614a0b720c179023ee",
"size": "369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/assets/stylesheets/application.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10007"
},
{
"name": "CoffeeScript",
"bytes": "4398"
},
{
"name": "HTML",
"bytes": "42565"
},
{
"name": "JavaScript",
"bytes": "27663"
},
{
"name": "Ruby",
"bytes": "145497"
}
],
"symlink_target": ""
} |
using MtgApiManager.Lib.Service;
using Xunit;
namespace MtgApiManager.Lib.Test.Service
{
public class SetQueryParameterTests
{
[Fact]
public void SetQueryParameter_BlockSet_Success()
{
// arrange
const string BLOCK = "block1";
var queryParams = new SetQueryParameter
{
// act
Block = BLOCK
};
// assert
Assert.Equal(BLOCK, queryParams.Block);
}
[Fact]
public void SetQueryParameter_NameSet_Success()
{
// arrange
const string NAME = "name1";
var queryParams = new SetQueryParameter
{
// act
Name = NAME
};
// assert
Assert.Equal(NAME, queryParams.Name);
}
}
} | {
"content_hash": "31507100506bc348be988e520c087cf8",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 56,
"avg_line_length": 22.94736842105263,
"alnum_prop": 0.481651376146789,
"repo_name": "MagicTheGathering/mtg-sdk-dotnet",
"id": "01e5c8682963b97b7eb855339acb2d11ee45caee",
"size": "874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/MtgApiManager.Lib.Test/Service/SetQueryParameterTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "207"
},
{
"name": "C#",
"bytes": "172259"
},
{
"name": "PowerShell",
"bytes": "2958"
},
{
"name": "Shell",
"bytes": "2280"
}
],
"symlink_target": ""
} |
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<ListView
android:id="@+id/weatherListView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentLeft="true"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true" >
</ListView>
</RelativeLayout>
| {
"content_hash": "a1f05f36b462b4dcbb1b9dd8b7d1f7b7",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 74,
"avg_line_length": 32,
"alnum_prop": 0.6875,
"repo_name": "UO-CIS/CIS399AndroidDemos",
"id": "78537cdfb053dab57065caff2a55ecbc8eb041c7",
"size": "480",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WeatherForecast-SAX+ListView/app/src/main/res/layout/activity_main.xml",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "140880"
}
],
"symlink_target": ""
} |
using CharacterGen.CharacterClasses;
using CharacterGen.Domain.Tables;
using CharacterGen.Feats;
using NUnit.Framework;
namespace CharacterGen.Tests.Integration.Tables.Feats.Requirements.Classes
{
[TestFixture]
public class SpellMasteryClassRequirementsTests : AdjustmentsTests
{
protected override string tableName
{
get
{
return string.Format(TableNameConstants.Formattable.Adjustments.FEATClassRequirements, FeatConstants.SpellMastery);
}
}
[Test]
public override void CollectionNames()
{
var classes = new[] { CharacterClassConstants.Wizard };
AssertCollectionNames(classes);
}
[TestCase(CharacterClassConstants.Wizard, 1)]
public override void Adjustment(string name, int adjustment)
{
base.Adjustment(name, adjustment);
}
}
}
| {
"content_hash": "dd32780975f514a3f3758ecde39c05e4",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 131,
"avg_line_length": 29,
"alnum_prop": 0.6519396551724138,
"repo_name": "DnDGen/CharacterGen",
"id": "ce18caacc572c8e8d74a7c02e7e809a3988d1e35",
"size": "930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CharacterGen.Tests.Integration.Tables/Feats/Requirements/Classes/SpellMasteryClassRequirementsTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "4214731"
},
{
"name": "Shell",
"bytes": "689"
}
],
"symlink_target": ""
} |
package org.jsimpledb.kv.raft;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Contains the information required to commit a new entry to the log.
*/
class NewLogEntry {
private final LogEntry.Data data;
private final File tempFile;
/**
* Create an instance from a transaction and a temporary file
*
* @param data log entry mutations
* @throws Exception if an error occurs
*/
public NewLogEntry(RaftKVTransaction tx, File tempFile) throws IOException {
this.data = new LogEntry.Data(tx.getMutableView().getWrites(), tx.getConfigChange());
this.tempFile = tempFile;
}
/**
* Create an instance from a transaction.
*
* @param data log entry mutations
* @throws Exception if an error occurs
*/
public NewLogEntry(RaftKVTransaction tx) throws IOException {
this.data = new LogEntry.Data(tx.getMutableView().getWrites(), tx.getConfigChange());
this.tempFile = File.createTempFile(RaftKVDatabase.TEMP_FILE_PREFIX,
RaftKVDatabase.TEMP_FILE_SUFFIX, tx.getKVDatabase().logDir);
try (FileWriter output = new FileWriter(this.tempFile)) {
LogEntry.writeData(output, data);
}
}
/**
* Create an instance from a {@link LogEntry.Data} object.
*
* @param data mutation data
* @throws Exception if an error occurs
*/
public NewLogEntry(RaftKVDatabase raft, LogEntry.Data data) throws IOException {
this.data = data;
this.tempFile = File.createTempFile(RaftKVDatabase.TEMP_FILE_PREFIX, RaftKVDatabase.TEMP_FILE_SUFFIX, raft.logDir);
try (FileWriter output = new FileWriter(this.tempFile)) {
LogEntry.writeData(output, data);
}
}
/**
* Create an instance from a serialized data in a {@link ByteBuffer}.
*
* @param buf buffer containing serialized mutations
* @throws Exception if an error occurs
*/
public NewLogEntry(RaftKVDatabase raft, ByteBuffer dataBuf) throws IOException {
// Copy data to temporary file
this.tempFile = File.createTempFile(RaftKVDatabase.TEMP_FILE_PREFIX,
RaftKVDatabase.TEMP_FILE_SUFFIX, raft.logDir);
try (FileWriter output = new FileWriter(this.tempFile)) {
while (dataBuf.hasRemaining())
output.getFileOutputStream().getChannel().write(dataBuf);
}
// Avoid having two copies of the data in memory at once
dataBuf = null;
// Deserialize data from file back into memory
try (BufferedInputStream input = new BufferedInputStream(new FileInputStream(tempFile), 4096)) {
this.data = LogEntry.readData(input);
}
}
public LogEntry.Data getData() {
return this.data;
}
public File getTempFile() {
return this.tempFile;
}
public void cancel() {
this.tempFile.delete();
}
}
| {
"content_hash": "f7af04526e06cb9f04a0e0ef7c42dd1e",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 123,
"avg_line_length": 31.257731958762886,
"alnum_prop": 0.6553430079155673,
"repo_name": "tempbottle/jsimpledb",
"id": "d0f1fe049c9655d6c774446e89803eef031a1b45",
"size": "3098",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/java/org/jsimpledb/kv/raft/NewLogEntry.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11303"
},
{
"name": "HTML",
"bytes": "29770969"
},
{
"name": "Java",
"bytes": "3695737"
},
{
"name": "JavaScript",
"bytes": "25330"
},
{
"name": "XSLT",
"bytes": "26413"
}
],
"symlink_target": ""
} |
package mechanisms_test
| {
"content_hash": "02136aad4b3f5d28ac823c3e88663241",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 23,
"avg_line_length": 24,
"alnum_prop": 0.875,
"repo_name": "goxmpp/goxmpp",
"id": "185143db15e2ab92b03d704e84d2426cde4cb25d",
"size": "24",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extensions/features/auth/mechanisms/mechanisms_test.go",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Go",
"bytes": "52169"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace DtWorkshop.Plugin.ID3
{
public delegate void OnRootChanged();
public class DtTree
{
public event OnRootChanged RootChanged = delegate { };
private DtBranch root;
public DtBranch Root
{
get { return root; }
set {
root = value;
if(root != null)
RootChanged();
}
}
}
//
public struct DtAttribute
{
public string Name;
public int Index;
public DtAttribute(string name, int index)
{
Name = name;
Index = index;
}
public override string ToString()
{
return Name;
}
}
//
public enum DtElementKind
{
Branch,
Edge,
Leaf
}
public abstract class DtElement
{
public DtElementKind Kind;
}
public class DtEdge : DtElement
{
public DtBranch From;
public DtNode To;
public object Condition;
//
public DtEdge(DtBranch from, object condition, DtNode to)
{
Kind = DtElementKind.Edge;
From = from;
Condition = condition;
To = to;
}
public override string ToString()
{
return Condition.ToString();
}
}
public abstract class DtNode : DtElement
{
public DtEdge Edge;
public DtBranch Parent
{
get
{
if (Edge == null)
return null;
//else
return Edge.From;
}
}
public bool IsBranch
{
get { return Kind == DtElementKind.Branch; }
}
public bool IsLeaf
{
get { return Kind == DtElementKind.Leaf; }
}
}
public class DtBranch : DtNode
{
public DtAttribute Attribute;
public List<DtEdge> Edges = new List<DtEdge>();
//
public DtBranch(DtAttribute attribute)
{
Kind = DtElementKind.Branch;
Attribute = attribute;
}
public void AddChild(object condition, DtNode node)
{
DtEdge edge = new DtEdge(this, condition, node);
node.Edge = edge;
Edges.Add(edge);
}
public void AddChild(DtNode node)
{
DtEdge edge = node.Edge;
edge.From = this;
Edges.Add(edge);
}
public void RemoveChild(DtNode node)
{
DtEdge edge = Edges.Find( (e) => e.To == node);
Edges.Remove(edge);
}
public IEnumerable<DtNode> Children
{
get
{
return new ChildEnumerable(this);
}
}
public bool HasSingleParent { get { return Parent != null; } }
public bool HasSingleChild
{
get
{
if (Edges.Count != 1)
return false;
/*try
{
DtBranch child = Children.Single() as DtBranch;
}
catch (System.InvalidOperationException)
{
//Console.WriteLine("The collection does not contain exactly one element.");
return false;
}*/
return true;
}
}
/*public bool HasLeafChild
{
get
{
if (!Edges.Any())
return false;
if (Children.First().Kind != DtElementKind.Leaf)
return false;
return true;
}
}*/
public bool HasAllLeafChildren
{
get
{
if (!Edges.Any())
return false;
foreach (DtNode child in Children)
if (!child.IsLeaf)
return false;
return true;
}
}
public override string ToString()
{
return Attribute.ToString();
}
}
public class ChildEnumerable : IEnumerable<DtNode>
{
DtBranch Parent;
public ChildEnumerable(DtBranch parent){
Parent = parent;
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public IEnumerator<DtNode> GetEnumerator()
{
foreach (DtEdge edge in Parent.Edges)
{
yield return edge.To;
}
}
}
public class DtLeaf : DtNode
{
public object Value;
//
public DtLeaf(object value)
{
Kind = DtElementKind.Leaf;
Value = value;
}
public override string ToString()
{
return Value.ToString();
}
}
}
| {
"content_hash": "e71870a19fcd7d2d8b662c815c51b63c",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 96,
"avg_line_length": 25.068627450980394,
"alnum_prop": 0.45815408682049275,
"repo_name": "kfields/decision-tree-workshop",
"id": "fcc5d7461aa1a3eb9a09a663734f2e98b68e95ae",
"size": "5116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DecisionTreeWorkshop/Plugin/ID3/DtTree.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "113640"
}
],
"symlink_target": ""
} |
package org.myrobotlab.kinematics;
import java.util.HashMap;
import org.myrobotlab.service.IntegratedMovement;
/**
* This class will compute the center of gravity of the links composing a robot
*
* @author chris
*
*/
public class GravityCenter extends Thread {
private HashMap<String, Double> masses = new HashMap<String, Double>();
private HashMap<String, Double> cogs = new HashMap<String, Double>();
private transient IntegratedMovement service;
private Point cog;
private Point cogTarget = new Point(0, 0, 0, 0, 0, 0);
private int maxDistanceToCog = 25;
public GravityCenter(IntegratedMovement im) {
super("GravityCenter");
service = im;
}
/**
* Set the mass and center of mass of a link
*
* @param name
* name
* @param mass
* mass
* @param centerOfMass
* (0.0 - 1.0) representing where the center of mass is located, from
* the origin point. If you don't know, it's safe to put 0.5
*/
public void setLinkMass(String name, double mass, double centerOfMass) {
masses.put(name, mass);
cogs.put(name, centerOfMass);
}
public synchronized Point computeCoG(CollisionDectection cd) {
if (cd == null) {
cd = service.collisionItems;
}
double totalMass = 0;
for (double mass : masses.values()) {
totalMass += mass;
}
cog = new Point(0, 0, 0, 0, 0, 0);
for (CollisionItem ci : cd.getItems().values()) {
if (cogs.containsKey(ci.getName())) {
Point icog = ci.getEnd().subtract(ci.getOrigin()).unitVector(1).multiplyXYZ(cogs.get(ci.getName())).multiplyXYZ(ci.getLength()).add(ci.getOrigin());
double m = masses.get(ci.getName()) / totalMass;
Point ic = icog.multiplyXYZ(m);
Point c = cog.add(ic);
cog = c;
}
}
// Log.info(cog.toString()+"gc");
if (cog.getZ() <= 0.1) {
int x = 0;
}
return cog;
}
public Point getCoG() {
return cog;
}
public Point getCoGTarget() {
return cogTarget;
}
public double getMaxDistanceToCog() {
return maxDistanceToCog;
}
public void setCoGTarget(double x, double y, double z) {
cogTarget = new Point(x, y, z, 0, 0, 0);
}
}
| {
"content_hash": "7ca6af973e5cc5d029926134b38ad4b6",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 156,
"avg_line_length": 27.11764705882353,
"alnum_prop": 0.6030368763557483,
"repo_name": "MyRobotLab/myrobotlab",
"id": "f7fa8e87917b0d0e6d4ab58ae8b32529f324b5bd",
"size": "2305",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/main/java/org/myrobotlab/kinematics/GravityCenter.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1542"
},
{
"name": "C",
"bytes": "6677"
},
{
"name": "C++",
"bytes": "274868"
},
{
"name": "CSS",
"bytes": "83744"
},
{
"name": "GLSL",
"bytes": "757"
},
{
"name": "HTML",
"bytes": "374401"
},
{
"name": "Java",
"bytes": "7100082"
},
{
"name": "JavaScript",
"bytes": "1536187"
},
{
"name": "Propeller Spin",
"bytes": "14406"
},
{
"name": "Python",
"bytes": "191671"
},
{
"name": "Shell",
"bytes": "3547"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>parse_options::allow_comments</title>
<link rel="stylesheet" href="../../../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.79.1">
<link rel="home" href="../../../index.html" title="Chapter 1. Boost.JSON">
<link rel="up" href="../boost__json__parse_options.html" title="parse_options">
<link rel="prev" href="max_depth.html" title="parse_options::max_depth">
<link rel="next" href="allow_trailing_commas.html" title="parse_options::allow_trailing_commas">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../../../boost.png"></td>
<td align="center"><a href="../../../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="max_depth.html"><img src="../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../boost__json__parse_options.html"><img src="../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="allow_trailing_commas.html"><img src="../../../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h5 class="title">
<a name="json.ref.boost__json__parse_options.allow_comments"></a><a class="link" href="allow_comments.html" title="parse_options::allow_comments">parse_options::allow_comments</a>
</h5></div></div></div>
<p>
<a class="indexterm" name="idm27219"></a>
</p>
<p>
Non-standard extension option.
</p>
<h6>
<a name="json.ref.boost__json__parse_options.allow_comments.h0"></a>
<span class="phrase"><a name="json.ref.boost__json__parse_options.allow_comments.synopsis"></a></span><a class="link" href="allow_comments.html#json.ref.boost__json__parse_options.allow_comments.synopsis">Synopsis</a>
</h6>
<pre class="programlisting"><span class="keyword">bool</span> <span class="identifier">allow_comments</span> <span class="special">=</span> <span class="keyword">false</span><span class="special">;</span>
</pre>
<h6>
<a name="json.ref.boost__json__parse_options.allow_comments.h1"></a>
<span class="phrase"><a name="json.ref.boost__json__parse_options.allow_comments.description"></a></span><a class="link" href="allow_comments.html#json.ref.boost__json__parse_options.allow_comments.description">Description</a>
</h6>
<p>
Allow C and C++ style comments to appear anywhere that whitespace is permissible.
</p>
<h6>
<a name="json.ref.boost__json__parse_options.allow_comments.h2"></a>
<span class="phrase"><a name="json.ref.boost__json__parse_options.allow_comments.see_also"></a></span><a class="link" href="allow_comments.html#json.ref.boost__json__parse_options.allow_comments.see_also">See
Also</a>
</h6>
<p>
<a class="link" href="../boost__json__basic_parser.html" title="basic_parser"><code class="computeroutput"><span class="identifier">basic_parser</span></code></a>,
<a class="link" href="../boost__json__stream_parser.html" title="stream_parser"><code class="computeroutput"><span class="identifier">stream_parser</span></code></a>.
</p>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2019, 2020 Vinnie Falco<br>Copyright © 2020 Krystian Stasiowski<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="max_depth.html"><img src="../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../boost__json__parse_options.html"><img src="../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="allow_trailing_commas.html"><img src="../../../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "d92c00698490c24fb1d1bc3be688f5a9",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 473,
"avg_line_length": 68.8169014084507,
"alnum_prop": 0.6295538272615636,
"repo_name": "arangodb/arangodb",
"id": "dc4bc0e40ffc963164d922d2bf8697f09d440e9a",
"size": "4890",
"binary": false,
"copies": "3",
"ref": "refs/heads/devel",
"path": "3rdParty/boost/1.78.0/libs/json/doc/html/json/ref/boost__json__parse_options/allow_comments.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "61827"
},
{
"name": "C",
"bytes": "311036"
},
{
"name": "C++",
"bytes": "35149373"
},
{
"name": "CMake",
"bytes": "387268"
},
{
"name": "CSS",
"bytes": "210549"
},
{
"name": "EJS",
"bytes": "232160"
},
{
"name": "HTML",
"bytes": "23114"
},
{
"name": "JavaScript",
"bytes": "33841256"
},
{
"name": "LLVM",
"bytes": "15003"
},
{
"name": "NASL",
"bytes": "381737"
},
{
"name": "NSIS",
"bytes": "47138"
},
{
"name": "Pascal",
"bytes": "75391"
},
{
"name": "Perl",
"bytes": "9811"
},
{
"name": "PowerShell",
"bytes": "6806"
},
{
"name": "Python",
"bytes": "190515"
},
{
"name": "SCSS",
"bytes": "255542"
},
{
"name": "Shell",
"bytes": "133576"
},
{
"name": "TypeScript",
"bytes": "179074"
},
{
"name": "Yacc",
"bytes": "79620"
}
],
"symlink_target": ""
} |
<?php
/** Tests for MediaWiki languages/LanguageBe.php */
class LanguageBeTest extends LanguageClassesTestCase {
/**
* @dataProvider providePlural
* @covers Language::convertPlural
*/
public function testPlural( $result, $value ) {
$forms = array( 'one', 'few', 'many', 'other' );
$this->assertEquals( $result, $this->getLang()->convertPlural( $value, $forms ) );
}
/**
* @dataProvider providePlural
* @covers Language::getPluralRuleType
*/
public function testGetPluralRuleType( $result, $value ) {
$this->assertEquals( $result, $this->getLang()->getPluralRuleType( $value ) );
}
public static function providePlural() {
return array(
array( 'one', 1 ),
array( 'many', 11 ),
array( 'one', 91 ),
array( 'one', 121 ),
array( 'few', 2 ),
array( 'few', 3 ),
array( 'few', 4 ),
array( 'few', 334 ),
array( 'many', 5 ),
array( 'many', 15 ),
array( 'many', 120 ),
);
}
}
| {
"content_hash": "843c29f03d2e04af4a514886a165d35e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 84,
"avg_line_length": 24.526315789473685,
"alnum_prop": 0.6137339055793991,
"repo_name": "BRL-CAD/web",
"id": "7bd586aff71f912b59370aefb3b9ad1f65a26d4f",
"size": "1032",
"binary": false,
"copies": "100",
"ref": "refs/heads/master",
"path": "wiki/tests/phpunit/languages/LanguageBeTest.php",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "46"
},
{
"name": "CSS",
"bytes": "2791896"
},
{
"name": "Gherkin",
"bytes": "8953"
},
{
"name": "HTML",
"bytes": "3386533"
},
{
"name": "Hack",
"bytes": "8554"
},
{
"name": "JavaScript",
"bytes": "6034530"
},
{
"name": "Less",
"bytes": "228870"
},
{
"name": "Makefile",
"bytes": "7564"
},
{
"name": "PHP",
"bytes": "85649990"
},
{
"name": "PLSQL",
"bytes": "47123"
},
{
"name": "PLpgSQL",
"bytes": "31942"
},
{
"name": "Perl",
"bytes": "28087"
},
{
"name": "Pug",
"bytes": "1857"
},
{
"name": "Python",
"bytes": "54812"
},
{
"name": "Ruby",
"bytes": "30411"
},
{
"name": "SCSS",
"bytes": "27565"
},
{
"name": "Shell",
"bytes": "8990"
},
{
"name": "Smarty",
"bytes": "9612"
}
],
"symlink_target": ""
} |
'''
from mpc33 import Check
print(Check('MinecraftNameInHere'))
'''
# for python 2.7:
'''
from mpc27 import Check
print(Check('MinecraftNameInHere'))
''' | {
"content_hash": "154f6e049dfd40cdfa2ad82c95663a51",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 35,
"avg_line_length": 15.4,
"alnum_prop": 0.7077922077922078,
"repo_name": "Woolworths/MinecraftPremiumChecker",
"id": "fca69214a03fdc555c1f7adeaba4bb1e1806ec6d",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1941"
}
],
"symlink_target": ""
} |
<?php
namespace ApiBundle\Exception;
use Symfony\Component\HttpKernel\Exception\HttpExceptionInterface;
/**
* Интерфейс APIException
*
* @author Roman Belousov <romanandreyvich@gmail.com>
*/
interface APIExceptionInterface extends HttpExceptionInterface
{
}
| {
"content_hash": "f7e6336c4639cca3913924804781a89a",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 66,
"avg_line_length": 18,
"alnum_prop": 0.7851851851851852,
"repo_name": "romanandreyvich/FamilyBudgetManagementSystem",
"id": "e3eaa6be4ed55605312c9958f0e704ce1b1fa757",
"size": "279",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ApiBundle/Exception/APIExceptionInterface.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "593"
},
{
"name": "CSS",
"bytes": "1338"
},
{
"name": "HTML",
"bytes": "24800"
},
{
"name": "JavaScript",
"bytes": "9734"
},
{
"name": "PHP",
"bytes": "107854"
}
],
"symlink_target": ""
} |
<?php
use yii\db\Schema;
use yii\db\Migration;
class m150807_111927_books extends Migration
{
public function safeUp()
{
$this->createTable('books', [
'id' => $this->primaryKey(),
'name' => $this->string()->notNull(),
'date_create' => $this->integer()->notNull()->defaultValue(0),
'date_update' => $this->integer()->notNull()->defaultValue(0),
'preview' => $this->string()->notNull()->defaultValue(''),
'date' => $this->date()->notNull(),
'author_id' => $this->integer(),
]);
$this->addForeignKey('fk_books_author_id', 'books', 'author_id', 'authors', 'id', 'cascade', 'cascade');
}
public function safeDown()
{
$this->dropTable('books');
}
}
| {
"content_hash": "b9e0ccd0c04381c489f9eb5fa09cc696",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 112,
"avg_line_length": 30.23076923076923,
"alnum_prop": 0.5267175572519084,
"repo_name": "liverenemy/rgktest",
"id": "816a1361798ab948d3671ddeb3a20c9721860721",
"size": "786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "console/migrations/m150807_111927_books.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1541"
},
{
"name": "CSS",
"bytes": "21178"
},
{
"name": "HTML",
"bytes": "28154"
},
{
"name": "JavaScript",
"bytes": "47027"
},
{
"name": "PHP",
"bytes": "179900"
}
],
"symlink_target": ""
} |
// Copyright (c) 2014 The Trident Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
//
#include <trident/timeout_manager_impl.h>
namespace trident {
TimeoutManagerImpl::TimeoutManagerImpl()
: _is_running(false)
, _epoch_time(ptime_now())
, _last_ticks(0)
, _rectify_ticks(time_duration_milliseconds(kTimerGranularity).ticks())
, _next_id(1)
{
}
TimeoutManagerImpl::~TimeoutManagerImpl()
{
stop();
}
bool TimeoutManagerImpl::is_running()
{
return _is_running;
}
void TimeoutManagerImpl::start()
{
ScopedLocker<MutexLock> _(_start_stop_lock);
if (_is_running)
return;
_is_running = true;
_thread_group.reset(new ThreadGroupImpl(kThreadCount));
_thread_group->start();
_timer_worker.reset(new TimerWorker(_thread_group->io_service()));
_timer_worker->set_time_duration(time_duration_milliseconds(kTimerGranularity));
_timer_worker->set_work_routine(boost::bind(
&TimeoutManagerImpl::timer_run, shared_from_this(), _1));
_timer_worker->start();
}
void TimeoutManagerImpl::stop()
{
ScopedLocker<MutexLock> _(_start_stop_lock);
if (!_is_running) return;
_is_running = false;
_timer_worker->stop();
_thread_group->stop();
clear();
_timer_worker.reset();
_thread_group.reset();
}
void TimeoutManagerImpl::clear()
{
EventVec cleared;
{
ScopedLocker<MutexLock> _(_timeouts_lock);
if (!_timeouts.empty()) {
IdIndex& by_id = _timeouts.get<BY_ID>();
cleared.insert(cleared.end(), by_id.begin(), by_id.end());
_timeouts.clear();
}
}
for (EventVec::iterator it = cleared.begin(); it != cleared.end(); ++it) {
Callback* callback = it->callback;
bool should_delete = !callback->IsSelfDelete();
callback->Run(it->id, TimeoutManager::CLEARED);
if (should_delete) delete callback;
}
}
TimeoutManagerImpl::Id TimeoutManagerImpl::add(int64 interval, Callback* callback)
{
SCHECK_GE(interval, 0);
SCHECK(callback->IsSelfDelete());
ScopedLocker<MutexLock> _(_timeouts_lock);
Id id = _next_id++;
_timeouts.insert(Event(id, calc_expiration(interval), -1, callback));
return id;
}
TimeoutManagerImpl::Id TimeoutManagerImpl::add_repeating(int64 interval, Callback* callback)
{
SCHECK_GE(interval, 0);
SCHECK(!callback->IsSelfDelete());
ScopedLocker<MutexLock> _(_timeouts_lock);
Id id = _next_id++;
_timeouts.insert(Event(id, calc_expiration(interval), interval, callback));
return id;
}
bool TimeoutManagerImpl::erase(Id id)
{
Callback* callback = NULL;
{
ScopedLocker<MutexLock> _(_timeouts_lock);
IdIndex& by_id = _timeouts.get<BY_ID>();
IdIndex::iterator find = by_id.find(id);
if (find == by_id.end()) return false;
callback = find->callback;
by_id.erase(find);
}
bool should_delete = !callback->IsSelfDelete();
callback->Run(id, TimeoutManager::ERASED);
if (should_delete) delete callback;
return true;
}
void TimeoutManagerImpl::timer_run(const PTime& now)
{
if (!_is_running) return;
int64 now_ticks = (now - _epoch_time).ticks();
_last_ticks = now_ticks;
EventVec expired;
{
ScopedLocker<MutexLock> _(_timeouts_lock);
if (!_timeouts.empty()) {
ExpirationIndex& by_expiration = _timeouts.get<BY_EXPIRATION>();
ExpirationIndex::iterator exp_end = by_expiration.upper_bound(now_ticks);
expired.insert(expired.end(), by_expiration.begin(), exp_end);
by_expiration.erase(by_expiration.begin(), exp_end);
}
}
if (expired.empty()) return;
// Reinsert if repeating, do this before executing callbacks
// so the callbacks have a chance to call erase
EventVec repeated;
for (EventVec::iterator it = expired.begin(); it != expired.end(); ++it) {
if (it->repeat_interval >= 0) {
repeated.push_back(Event(it->id, calc_expiration(it->repeat_interval),
it->repeat_interval, it->callback));
}
}
if (!repeated.empty()) {
ScopedLocker<MutexLock> _(_timeouts_lock);
_timeouts.insert(repeated.begin(), repeated.end());
}
// Execute callbacks
for (EventVec::iterator it = expired.begin(); it != expired.end(); ++it) {
it->callback->Run(it->id, TimeoutManager::TIMEOUTED);
}
}
} // namespace trident
/* vim: set ts=4 sw=4 sts=4 tw=100 */
| {
"content_hash": "497d8c0b6be6f7b03bb6fa813feb65b7",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 92,
"avg_line_length": 28.459627329192546,
"alnum_prop": 0.6268005237887385,
"repo_name": "anqin/trident",
"id": "c40c3c504146bec48eccf4d2a5b35df49edea02e",
"size": "4582",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "timeout_manager_impl.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4559"
},
{
"name": "C++",
"bytes": "14704394"
},
{
"name": "Makefile",
"bytes": "7265"
},
{
"name": "Perl",
"bytes": "22654"
},
{
"name": "Python",
"bytes": "3714"
},
{
"name": "Shell",
"bytes": "3744"
}
],
"symlink_target": ""
} |
package com.oneliang.tools.builder.android.base;
import java.util.List;
import com.oneliang.tools.builder.android.base.AndroidProject;
public class AndroidProjectForGradle extends AndroidProject {
private List<String> gradleDependencyList = null;
private List<BuildConfig> buildConfigList = null;
public AndroidProjectForGradle(String workspace, String name, String outputHome) {
super(workspace, name, outputHome, BUILD_TYPE_DEFAULT);
}
/**
* @return the gradleDependencyList
*/
public List<String> getGradleDependencyList() {
return gradleDependencyList;
}
/**
* @param gradleDependencyList
* the gradleDependencyList to set
*/
public void setGradleDependencyList(List<String> gradleDependencyList) {
this.gradleDependencyList = gradleDependencyList;
}
/**
* @return the buildConfigList
*/
public List<BuildConfig> getBuildConfigList() {
return buildConfigList;
}
/**
* @param buildConfigList
* the buildConfigList to set
*/
public void setBuildConfigList(List<BuildConfig> buildConfigList) {
this.buildConfigList = buildConfigList;
}
public static class BuildConfig {
public final String type;
public final String name;
public final String value;
public BuildConfig(String type, String name, String value) {
this.type = type;
this.name = name;
this.value = value;
}
public String toString() {
return "public static final " + this.type + " " + this.name + " = " + this.value + ";";
}
}
}
| {
"content_hash": "f182eb43028d96774c65c9f75d6740ab",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 99,
"avg_line_length": 27.704918032786885,
"alnum_prop": 0.6408284023668639,
"repo_name": "oneliang/builder-android",
"id": "f15a59edc3302f67abedf04053098f56f172e24e",
"size": "1690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/oneliang/tools/builder/android/base/AndroidProjectForGradle.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "284472"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_45) on Tue Apr 14 10:14:45 CEST 2015 -->
<TITLE>
R.bool
</TITLE>
<META NAME="date" CONTENT="2015-04-14">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="R.bool";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/R.bool.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../com/lowsbroadcast/R.attr.html" title="class in com.lowsbroadcast"><B>PREV CLASS</B></A>
<A HREF="../../com/lowsbroadcast/R.color.html" title="class in com.lowsbroadcast"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../index.html?com/lowsbroadcast/R.bool.html" target="_top"><B>FRAMES</B></A>
<A HREF="R.bool.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#field_summary">FIELD</A> | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#methods_inherited_from_class_java.lang.Object">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: <A HREF="#field_detail">FIELD</A> | <A HREF="#constructor_detail">CONSTR</A> | METHOD</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
com.lowsbroadcast</FONT>
<BR>
Class R.bool</H2>
<PRE>
java.lang.Object
<IMG SRC="../../resources/inherit.gif" ALT="extended by "><B>com.lowsbroadcast.R.bool</B>
</PRE>
<DL>
<DT><B>Enclosing class:</B><DD><A HREF="../../com/lowsbroadcast/R.html" title="class in com.lowsbroadcast">R</A></DD>
</DL>
<HR>
<DL>
<DT><PRE>public static final class <B>R.bool</B><DT>extends java.lang.Object</DL>
</PRE>
<P>
<HR>
<P>
<!-- =========== FIELD SUMMARY =========== -->
<A NAME="field_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Field Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#abc_action_bar_embed_tabs_pre_jb">abc_action_bar_embed_tabs_pre_jb</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#abc_action_bar_expanded_action_views_exclusive">abc_action_bar_expanded_action_views_exclusive</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#abc_config_actionMenuItemAllCaps">abc_config_actionMenuItemAllCaps</A></B></CODE>
<BR>
Whether action menu items should be displayed in ALLCAPS or not.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#abc_config_allowActionMenuItemTextWithIcon">abc_config_allowActionMenuItemTextWithIcon</A></B></CODE>
<BR>
Whether action menu items should obey the "withText" showAsAction
flag.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#abc_config_showMenuShortcutsWhenKeyboardPresent">abc_config_showMenuShortcutsWhenKeyboardPresent</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#abc_split_action_bar_is_narrow">abc_split_action_bar_is_narrow</A></B></CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<A NAME="constructor_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../com/lowsbroadcast/R.bool.html#R.bool()">R.bool</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</CODE></TD>
</TR>
</TABLE>
<P>
<!-- ============ FIELD DETAIL =========== -->
<A NAME="field_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Field Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="abc_action_bar_embed_tabs_pre_jb"><!-- --></A><H3>
abc_action_bar_embed_tabs_pre_jb</H3>
<PRE>
public static final int <B>abc_action_bar_embed_tabs_pre_jb</B></PRE>
<DL>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../constant-values.html#com.lowsbroadcast.R.bool.abc_action_bar_embed_tabs_pre_jb">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="abc_action_bar_expanded_action_views_exclusive"><!-- --></A><H3>
abc_action_bar_expanded_action_views_exclusive</H3>
<PRE>
public static final int <B>abc_action_bar_expanded_action_views_exclusive</B></PRE>
<DL>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../constant-values.html#com.lowsbroadcast.R.bool.abc_action_bar_expanded_action_views_exclusive">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="abc_config_actionMenuItemAllCaps"><!-- --></A><H3>
abc_config_actionMenuItemAllCaps</H3>
<PRE>
public static final int <B>abc_config_actionMenuItemAllCaps</B></PRE>
<DL>
<DD>Whether action menu items should be displayed in ALLCAPS or not.
Defaults to true. If this is not appropriate for specific locales
it should be disabled in that locale's resources.
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../constant-values.html#com.lowsbroadcast.R.bool.abc_config_actionMenuItemAllCaps">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="abc_config_allowActionMenuItemTextWithIcon"><!-- --></A><H3>
abc_config_allowActionMenuItemTextWithIcon</H3>
<PRE>
public static final int <B>abc_config_allowActionMenuItemTextWithIcon</B></PRE>
<DL>
<DD>Whether action menu items should obey the "withText" showAsAction
flag. This may be set to false for situations where space is
extremely limited.
Whether action menu items should obey the "withText" showAsAction.
This may be set to false for situations where space is
extremely limited.
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../constant-values.html#com.lowsbroadcast.R.bool.abc_config_allowActionMenuItemTextWithIcon">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="abc_config_showMenuShortcutsWhenKeyboardPresent"><!-- --></A><H3>
abc_config_showMenuShortcutsWhenKeyboardPresent</H3>
<PRE>
public static final int <B>abc_config_showMenuShortcutsWhenKeyboardPresent</B></PRE>
<DL>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../constant-values.html#com.lowsbroadcast.R.bool.abc_config_showMenuShortcutsWhenKeyboardPresent">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="abc_split_action_bar_is_narrow"><!-- --></A><H3>
abc_split_action_bar_is_narrow</H3>
<PRE>
public static final int <B>abc_split_action_bar_is_narrow</B></PRE>
<DL>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../constant-values.html#com.lowsbroadcast.R.bool.abc_split_action_bar_is_narrow">Constant Field Values</A></DL>
</DL>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<A NAME="constructor_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="R.bool()"><!-- --></A><H3>
R.bool</H3>
<PRE>
public <B>R.bool</B>()</PRE>
<DL>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/R.bool.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../com/lowsbroadcast/R.attr.html" title="class in com.lowsbroadcast"><B>PREV CLASS</B></A>
<A HREF="../../com/lowsbroadcast/R.color.html" title="class in com.lowsbroadcast"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../index.html?com/lowsbroadcast/R.bool.html" target="_top"><B>FRAMES</B></A>
<A HREF="R.bool.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#field_summary">FIELD</A> | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#methods_inherited_from_class_java.lang.Object">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: <A HREF="#field_detail">FIELD</A> | <A HREF="#constructor_detail">CONSTR</A> | METHOD</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| {
"content_hash": "eaee615072c4bc446f5ba828180b6c0c",
"timestamp": "",
"source": "github",
"line_count": 365,
"max_line_length": 215,
"avg_line_length": 40.15890410958904,
"alnum_prop": 0.6483831354891527,
"repo_name": "lows/lows",
"id": "c26716690e1f7e269e71be20fa83077d098bc27e",
"size": "14658",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LoWS-Receiver-Application/LoWS-javadoc/com/lowsbroadcast/R.bool.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7784771"
},
{
"name": "C++",
"bytes": "7297"
},
{
"name": "CSS",
"bytes": "5006"
},
{
"name": "Groff",
"bytes": "3367"
},
{
"name": "HTML",
"bytes": "3717349"
},
{
"name": "Java",
"bytes": "679991"
},
{
"name": "Makefile",
"bytes": "172082"
},
{
"name": "Objective-C",
"bytes": "585"
},
{
"name": "PHP",
"bytes": "1709305"
},
{
"name": "PLpgSQL",
"bytes": "2529"
},
{
"name": "Perl",
"bytes": "1575"
},
{
"name": "Python",
"bytes": "17934"
},
{
"name": "Shell",
"bytes": "217"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
#%L
Wildfly Camel Patch
%%
Copyright (C) 2013 - 2014 RedHat
%%
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#L%
-->
<modules xmlns="http://smartics.de/ns/jboss-modules-descriptor/2">
<module name="org.apache.camel.component.jclouds">
<include artifact="org.apache.camel:camel-jclouds"/>
<apply-to-dependencies skip="true">
<include module="org.apache.camel.apt"/>
<include module="org.springframework.boot"/>
</apply-to-dependencies>
<dependencies>
<module name="javax.api"/>
<module name="com.google.guava" slot="16.0"/>
<module name="org.apache.jclouds" export="true" services="export">
<exports>
<exclude path="org/jclouds/apis/internal"/>
<exclude path="org/jclouds/blobstore/internal"/>
<exclude path="org/jclouds/blobstore/domain/internal"/>
<exclude path="org/jclouds/blobstore/strategy/internal"/>
<exclude path="org/jclouds/blobstore/util/internal"/>
<exclude path="org/jclouds/collect/internal"/>
<exclude path="org/jclouds/compute/internal"/>
<exclude path="org/jclouds/compute/domain/internal"/>
<exclude path="org/jclouds/compute/extensions/internal"/>
<exclude path="org/jclouds/compute/predicates/internal"/>
<exclude path="org/jclouds/date/internal"/>
<exclude path="org/jclouds/domain/internal"/>
<exclude path="org/jclouds/encryption/internal"/>
<exclude path="org/jclouds/http/internal"/>
<exclude path="org/jclouds/internal"/>
<exclude path="org/jclouds/io/internal"/>
<exclude path="org/jclouds/json/internal"/>
<exclude path="org/jclouds/logging/internal"/>
<exclude path="org/jclouds/ovf/internal"/>
<exclude path="org/jclouds/ovf/xml/internal"/>
<exclude path="org/jclouds/predicates/internal"/>
<exclude path="org/jclouds/providers/internal"/>
<exclude path="org/jclouds/proxy/internal"/>
<exclude path="org/jclouds/rest/internal"/>
<exclude path="org/jclouds/ssh/internal"/>
<exclude path="org/jclouds/strategy/internal"/>
<exclude path="org/jclouds/util/internal"/>
<exclude path="org/jclouds/xml/internal"/>
</exports>
</module>
<module name="org.slf4j"/>
</dependencies>
</module>
</modules>
| {
"content_hash": "25759ecb675cade2cec5c15374a18283",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 78,
"avg_line_length": 48.5735294117647,
"alnum_prop": 0.5821980018165305,
"repo_name": "tadayosi/wildfly-camel",
"id": "a0c5e81ea2e1e643a34563a7ffc6f46afabbb8d3",
"size": "3303",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feature/extrasC/etc/smartics/camel-modules.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "257"
},
{
"name": "Dockerfile",
"bytes": "602"
},
{
"name": "FreeMarker",
"bytes": "675"
},
{
"name": "Groovy",
"bytes": "25235"
},
{
"name": "HTML",
"bytes": "418995"
},
{
"name": "Hack",
"bytes": "305938"
},
{
"name": "Java",
"bytes": "2926410"
},
{
"name": "JavaScript",
"bytes": "1447"
},
{
"name": "Python",
"bytes": "60"
},
{
"name": "Ruby",
"bytes": "61"
},
{
"name": "Shell",
"bytes": "4829"
},
{
"name": "Tcl",
"bytes": "34"
},
{
"name": "XSLT",
"bytes": "8098"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="ISO-8859-1"?>
<LEOSimulation version="2.5.0">
<STELAVersion>2.5.1</STELAVersion>
<SpaceObject>
<mass unit="kg">2.0</mass>
<dragArea>0.045</dragArea>
<reflectingArea>0.18</reflectingArea>
<reflectivityCoefficient>1.5</reflectivityCoefficient>
<orbitType>LEO</orbitType>
<ConstantDragCoef>
<cstDragCoef>2.1</cstDragCoef>
</ConstantDragCoef>
<name>KG1WYORI</name>
</SpaceObject>
<EphemerisManager version="2.5.0">
<initState>
<bulletin version="2.5.0">
<date>2014-07-14T14:50:18.000</date>
<Type2PosVel>
<frame>CELESTIAL_MEAN_OF_DATE</frame>
<nature>MEAN</nature>
<semiMajorAxis unit="m">7000000.0</semiMajorAxis>
<eccentricity>0.001</eccentricity>
<inclination unit="rad">1.65806278939</inclination>
<rAAN unit="rad">0.0</rAAN>
<argOfPerigee unit="rad">0.0</argOfPerigee>
<meanAnomaly unit="rad">0.0</meanAnomaly>
</Type2PosVel>
</bulletin>
</initState>
<finalState>
<bulletin version="2.5.0">
<date>2024-02-03T02:47:32.333</date>
<Type2PosVel>
<frame>CELESTIAL_MEAN_OF_DATE</frame>
<nature>MEAN</nature>
<semiMajorAxis unit="m">6515004.81817825</semiMajorAxis>
<eccentricity>5.839690019038693E-4</eccentricity>
<inclination unit="rad">1.6568207376715889</inclination>
<rAAN unit="rad">1.1383050192083237</rAAN>
<argOfPerigee unit="rad">1.2909575912162676</argOfPerigee>
<meanAnomaly unit="rad">0.7571689129406591</meanAnomaly>
</Type2PosVel>
</bulletin>
</finalState>
</EphemerisManager>
<author>CNES</author>
<comment>LEO example simulation</comment>
<simulationDuration unit="years">100.0</simulationDuration>
<ephemerisStep unit="s">86400.0</ephemerisStep>
<ttMinusUT1 unit="s">67.184</ttMinusUT1>
<srpSwitch>true</srpSwitch>
<sunSwitch>true</sunSwitch>
<moonSwitch>true</moonSwitch>
<extrapolationDuration>10718</extrapolationDuration>
<warningFlag>false</warningFlag>
<iterativeMode>false</iterativeMode>
<GTOIntegrator>
<Integrator>
<effective_duration unit="years">9.557831848239408</effective_duration>
<LOSCriteria1 version="2.5.0">
<status_criteria1>1</status_criteria1>
<comment_criteria1>Lifetime :9.56 years</comment_criteria1>
</LOSCriteria1>
<LOSCriteria2 version="2.5.0">
<status_criteria2>3</status_criteria2>
</LOSCriteria2>
<LOSCriteria3 version="2.5.0">
<status_criteria3>3</status_criteria3>
</LOSCriteria3>
<LOSCriteria4 version="2.5.0">
<status_criteria4>3</status_criteria4>
</LOSCriteria4>
</Integrator>
</GTOIntegrator>
<modelType>GTO</modelType>
<atmosModel>NRLMSISE-00</atmosModel>
<VariableSolarActivity>
<solActType>VARIABLE</solActType>
</VariableSolarActivity>
<integrationStep unit="s">86400.0</integrationStep>
<dragSwitch>true</dragSwitch>
<dragQuadPoints>33</dragQuadPoints>
<atmosDragRecomputeStep>1</atmosDragRecomputeStep>
<srpQuadPoints>11</srpQuadPoints>
<reentryAltitude unit="m">120000.0</reentryAltitude>
<iterationData>
<funcValueAccuracy unit="days">10.0</funcValueAccuracy>
<expDuration unit="years">24.75</expDuration>
<simMinusExpDuration unit="years">75.25</simMinusExpDuration>
<iterationMethod>FrozenOrbit</iterationMethod>
</iterationData>
<nbIntegrationStepTesseral>5.0</nbIntegrationStepTesseral>
<zonalOrder>7</zonalOrder>
</LEOSimulation> | {
"content_hash": "5316b4ac57192bc6baa9ead210d11c93",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 74,
"avg_line_length": 35.36842105263158,
"alnum_prop": 0.7267857142857143,
"repo_name": "pouyana/satgen",
"id": "f31fa1bcc9c314d70b46632c61ef8385295d4c2c",
"size": "3360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sim/KG1WYORI_a_sim.xml_out_sim.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "552816"
},
{
"name": "Matlab",
"bytes": "1015"
},
{
"name": "Python",
"bytes": "227028"
}
],
"symlink_target": ""
} |
MIDDLEWARE_CLASSES = (
# 'pangolin.core.middleware.CoorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
| {
"content_hash": "6138bd11ef95c35e081a6bfc406e543b",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 69,
"avg_line_length": 49.166666666666664,
"alnum_prop": 0.7898305084745763,
"repo_name": "skylifewww/pangolin_new",
"id": "e43baa6c97f0b002d65570050552c36e21dbfe23",
"size": "590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "settings/middleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "116074"
},
{
"name": "HTML",
"bytes": "220326"
},
{
"name": "JavaScript",
"bytes": "48383"
},
{
"name": "Makefile",
"bytes": "1477"
},
{
"name": "Nginx",
"bytes": "626"
},
{
"name": "Python",
"bytes": "115145"
}
],
"symlink_target": ""
} |
'use strict';
/* jshint -W098 */
// The Package is past automatically as first parameter
module.exports = function(Langs, app, auth, database) {
app.get('/langs/example/anyone', function(req, res, next) {
res.send('Anyone can access this');
});
app.get('/langs/example/auth', auth.requiresLogin, function(req, res, next) {
res.send('Only authenticated users can access this');
});
app.get('/langs/example/admin', auth.requiresAdmin, function(req, res, next) {
res.send('Only users with Admin role can access this');
});
app.get('/langs/example/render', function(req, res, next) {
Langs.render('index', {
package: 'langs'
}, function(err, html) {
//Rendering a view from the Package server/views
res.send(html);
});
});
};
| {
"content_hash": "ae7f6fb81cda3d9f243a94bdcf0f58d6",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 80,
"avg_line_length": 29.074074074074073,
"alnum_prop": 0.6458598726114649,
"repo_name": "zwhitchcox/lex",
"id": "17fdfff2a98030de531b5d16e799f0ed47386b83",
"size": "785",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/custom/langs/server/routes/langs.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7397"
},
{
"name": "CoffeeScript",
"bytes": "35896"
},
{
"name": "HTML",
"bytes": "49086"
},
{
"name": "JavaScript",
"bytes": "139006"
},
{
"name": "Shell",
"bytes": "712"
}
],
"symlink_target": ""
} |
<package>
<name>usb_cam_jpeg</name>
<version>0.3.4</version>
<description>A ROS Driver for V4L USB Cameras (raw jpeg version)</description>
<maintainer email="rctoris@wpi.edu">Russell Toris</maintainer>
<author email="benjamin.pitzer@bosch.com">Benjamin Pitzer</author>
<license>BSD</license>
<url type="website">http://wiki.ros.org/usb_cam</url>
<url type="bugtracker">https://github.com/bosch-ros-pkg/usb_cam/issues</url>
<url type="repository">https://github.com/bosch-ros-pkg/usb_cam</url>
<buildtool_depend>catkin</buildtool_depend>
<build_depend>image_transport</build_depend>
<build_depend>roscpp</build_depend>
<build_depend>std_msgs</build_depend>
<build_depend>std_srvs</build_depend>
<build_depend>sensor_msgs</build_depend>
<build_depend>ffmpeg</build_depend>
<build_depend>camera_info_manager</build_depend>
<run_depend>image_transport</run_depend>
<run_depend>roscpp</run_depend>
<run_depend>std_msgs</run_depend>
<run_depend>std_srvs</run_depend>
<run_depend>sensor_msgs</run_depend>
<run_depend>ffmpeg</run_depend>
<run_depend>camera_info_manager</run_depend>
<run_depend>v4l-utils</run_depend>
</package>
| {
"content_hash": "adedb66314c03f79ca51b47345ad7766",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 80,
"avg_line_length": 36,
"alnum_prop": 0.7171717171717171,
"repo_name": "AIR-K-Tohoku/usb_cam_jpeg",
"id": "53b0feeafbbf1d0af8acbf830f4622b1262083df",
"size": "1188",
"binary": false,
"copies": "1",
"ref": "refs/heads/raw-jpeg",
"path": "package.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "34567"
},
{
"name": "CMake",
"bytes": "1805"
}
],
"symlink_target": ""
} |
NSModuleLoader.add([ 'ns.components.dataTypes.AbstractTime' ], function () {
ns.components = ns.components || {};
ns.components.dataTypes = ns.components.dataTypes || {};
/**
* @public
* @class ns.components.dataTypes.LogInfo
* @extends ns.components.dataTypes.AbstractTime
*/
ns.components.dataTypes.LogInfo = ns.components.dataTypes.AbstractTime.createChild( /** @lends {ns.components.dataTypes.LogInfo} */ {
/**
* @constructs
* @param {string} text
* @param {Array.<string>|null} stackTraceStringsArray
*/
init : function(text, stackTraceStringsArray) {
this.text = text;
this.stackTraceStringsArray = stackTraceStringsArray;
},
/**
* @public
* @returns {Function}
*/
getType : function() {
return ns.components.dataTypes.LogInfo;
}
});
});
| {
"content_hash": "16762e2191046a755179290d1ef533c0",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 134,
"avg_line_length": 25.125,
"alnum_prop": 0.6753731343283582,
"repo_name": "igor-bezkrovny/js-rdt",
"id": "204d19caeb425cbc1340eb84d640b4774bc7d85d",
"size": "804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/server/ui/src/components/dataTypes/LogInfo.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "114173"
}
],
"symlink_target": ""
} |
define(["jquery", "backbone", "models/Model", "views/View", "collections/colors"],
function($, Backbone, UserModel, View, Collection) {
var MobileRouter = Backbone.Router.extend({
initialize: function() {
// Tells Backbone to start watching for hashchange events
Backbone.history.start();
},
// All of your Backbone Routes (add more)
routes: {
// When there is no hash bang on the url, the home method is called
"": "index"
},
index: function() {
// Instantiates a new view which will render the header text to the page
new View();
}
});
// Returns the MobileRouter class
return MobileRouter;
}
); | {
"content_hash": "ab674456c32023beb8ad6d6116a85ab5",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 88,
"avg_line_length": 23.88888888888889,
"alnum_prop": 0.49883720930232556,
"repo_name": "pituki/backbone-require_listenTo-Example",
"id": "937f5cd5336db093e8583ce849df0ac0ea132bc8",
"size": "898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/js/app/routers/MobileRouter.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "488524"
},
{
"name": "JavaScript",
"bytes": "1892130"
}
],
"symlink_target": ""
} |
DROP VIEW IF EXISTS view_daftar_penerimaan_distamben;
CREATE VIEW view_daftar_penerimaan_distamben AS
SELECT
*
FROM
view_daftar_penerimaan_kabupaten
WHERE
1 = 1 AND
id_skpd = 17;
GRANT ALL PRIVILEGES ON view_daftar_penerimaan_distamben, sub_skpd TO lap_distamben;
REVOKE INSERT, UPDATE, DELETE ON view_daftar_penerimaan_distamben, sub_skpd FROM lap_distamben;
| {
"content_hash": "958afd8786c49f23dd846d84465569b5",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 95,
"avg_line_length": 22.875,
"alnum_prop": 0.7950819672131147,
"repo_name": "muntaza/Open_Persediaan",
"id": "49e9ce7ead1ff7ca5ca83541ad122c68b146f781",
"size": "366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sql1_persediaan/daftar_penerimaan_barang_sql/daftar_penerimaan_distamben.sql",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "46518"
},
{
"name": "HTML",
"bytes": "51187"
},
{
"name": "JavaScript",
"bytes": "97398"
},
{
"name": "Python",
"bytes": "67394"
},
{
"name": "Shell",
"bytes": "459021"
},
{
"name": "TSQL",
"bytes": "1744300"
}
],
"symlink_target": ""
} |
require 'helper'
require 'write_xlsx/workbook'
require 'write_xlsx/worksheet'
require 'stringio'
class TestConvertDateTime04 < Test::Unit::TestCase
def setup
@workbook = WriteXLSX.new(StringIO.new)
@worksheet = @workbook.add_worksheet('')
end
def test_convert_date_time_should_not_change_date_time_string
date_time = ' 2000-01-23T00:00:00.000Z '
@worksheet.convert_date_time(date_time)
assert_equal(' 2000-01-23T00:00:00.000Z ', date_time)
end
end
| {
"content_hash": "039103dc221b717323692a08ac1df92d",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 63,
"avg_line_length": 26.61111111111111,
"alnum_prop": 0.7181628392484343,
"repo_name": "Orphist/write_xlsx",
"id": "75ee390011e1f3d4d6621b9a02e1de257a4cb12e",
"size": "503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/worksheet/test_convert_date_time_04.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "2254278"
}
],
"symlink_target": ""
} |
<html>
<head>
<title>dr33m | View size Smoke Tests</title>
<link rel="stylesheet" href="../examples/css/example.css"/>
<script type="text/javascript" src="../lib/jquery-1.9.1.js"></script>
<script type="text/javascript" src="../lib/acorn.js"></script>
<script type="text/javascript" src="../lib/coffee-script.js"></script>
<script type="text/javascript" src="../core/layout.js"></script>
<script type="text/javascript" src="../lib/chai.js"></script>
<script type="text/javascript" src="/lib/smoke_helper.js"></script>
</head>
<body>
<view width="auto" bgcolor="#cccccc" padding="5">
<class name="block" width="100" height="50" bgcolor="#999999"></class>
<class name="item" width="25" height="25" bgcolor="#666666"></class>
<spacedlayout axis="y" spacing="5" updateparent="true"></spacedlayout>
<!-- Absolute Sizing -->
<block name="basic">
<handler event="oninit">
assert.equal(this.width, 100);
assert.equal(this.height, 50);
</handler>
</block>
<!-- Constrained Sizing -->
<block name="constrained">
<item name="inner" width="${this.parent.width - this.foo}" height="${this.parent.height - this.foo}">
<attribute name="foo" type="number" value="10"></attribute>
</item>
<handler event="oninit">
assert.equal(this.inner.width, 90);
assert.equal(this.inner.height, 40);
</handler>
</block>
<!-- Percentage Sizing -->
<block name="percentage">
<item name="inner" width="100%" height="100%"></item>
<handler event="oninit">
assert.equal(this.inner.width, 100);
assert.equal(this.inner.height, 50);
</handler>
</block>
<block>
<item name="inner" width="80%" height="70%"></item>
<handler event="oninit">
assert.equal(this.inner.width, 80);
assert.equal(this.inner.height, 35);
assert.equal($(this.inner.sprite.el).css('marginLeft'), '0px');
assert.equal($(this.inner.sprite.el).css('marginTop'), '0px');
</handler>
</block>
<!-- Auto Sizing -->
<block name="auto1" width="auto" height="auto">
<handler event="oninit">
assert.equal(this.width, 0);
assert.equal(this.height, 0);
</handler>
</block>
<block name="auto2" width="auto" height="auto">
<item name="v1" width="100" height="40"></item>
<item name="v2" x="200"></item>
<item name="v3" y="50"></item>
<handler event="oninit">
assert.equal(this.width, 225);
assert.equal(this.height, 75);
</handler>
</block>
<block name="auto3" width="auto" height="auto" border="1" bordercolor="black" padding="2">
<item name="inner" width="100" height="50"></item>
<handler event="oninit">
assert.equal(this.width, 106);
assert.equal(this.height, 56);
</handler>
</block>
<!-- Nested auto sizes -->
<block name="auto4" width="auto" height="auto" border="1" bordercolor="black" padding="2">
<item name="inner" x="5" y="10" width="auto" height="auto">
<item name="inner" x="50" y="25" bgcolor="#ffffff"></item>
</item>
<handler event="oninit">
assert.equal(this.width, 86);
assert.equal(this.height, 66);
</handler>
</block>
<!-- Auto size with percent children -->
<block name="auto5" width="auto" height="auto">
<item name="v1" x="50%" y="50%" width="50%" height="50%" bgcolor="#ff0000"></item>
<item name="v2" x="205"></item>
<item name="v3" y="55"></item>
<handler event="oninit">
assert.equal(this.width, 230);
assert.equal(this.height, 80);
</handler>
</block>
<!-- Auto size with parent constrained children -->
<block name="auto6a" width="auto" height="auto" bgcolor="green">
<item name="v1"
x="${this.parent.width / 2}" y="${this.parent.height / 2}"
width="${this.parent.width / 2}" height="${this.parent.height / 2}"
bgcolor="#ff0000"
></item>
<item name="v2" x="105"></item>
<item name="v3" y="55"></item>
<handler event="oninit">
assert.equal(this.width, 130);
assert.equal(this.height, 80);
</handler>
</block>
<!-- Auto size with a single parent constrained child should result
in a view with 0 size. -->
<block name="auto6b" width="auto" height="auto" bgcolor="orange">
<item name="v1"
width="${this.parent.width}" height="${this.parent.height}"
bgcolor="#ff0000"
></item>
<handler event="oninit">
assert.equal(this.width, 0);
assert.equal(this.height, 0);
</handler>
</block>
<!-- Auto size with right/center or middle/bottom aligned children -->
<block name="auto7" width="auto" height="auto">
<item name="v1" x="right" y="bottom" bgcolor="#ff0000"></item>
<item name="v2" x="205"></item>
<item name="v3" y="55"></item>
<handler event="oninit">
assert.equal(this.width, 230);
assert.equal(this.height, 80);
</handler>
</block>
<handler event="oninit">
//// Verify Basic Sizing ////
var block = this.basic, v1, v2, v3, v4, v5;
// Set width to a valid value
block.setAttribute('width', 1);
assert.equal(block.width, 1);
// Set width to an invalid value
block.setAttribute('width', -1);
assert.equal(block.width, 0);
block.setAttribute('width', 'foo');
assert.equal(block.width, 0);
// Set height to a valid value
block.setAttribute('height', 1);
assert.equal(block.height, 1);
// Set height to an invalid value
block.setAttribute('height', -1);
assert.equal(block.height, 0);
block.setAttribute('height', 'foo');
assert.equal(block.height, 0);
// Give it a size that we can see just to make the smoketest pretty for visual inspection
block.setAttribute('width', 100);
block.setAttribute('height', 5);
//// Verify Constrained Sizing ////
block = this.constrained;
v1 = block.inner;
// Change constraint target value
v1.setAttribute('foo', 5);
assert.equal(v1.width, 95);
assert.equal(v1.height, 45);
// Set to absolute to verify the constraint is removed
v1.setAttribute('width', 100);
assert.equal(v1.width, 100);
v1.setAttribute('foo', 15);
assert.equal(v1.width, 100);
assert.equal(v1.height, 35);
//// Verify Percentage Sizing ////
block = this.percentage;
v1 = block.inner;
// Change percentage
v1.setAttribute('width', '50%');
assert.equal(v1.width, 50);
// Change to absolute
v1.setAttribute('width', 70);
assert.equal(v1.width, 70);
block.setAttribute('width', 200);
assert.equal(v1.width, 70);
// Change back to percent
v1.setAttribute('width', '50%');
assert.equal(v1.width, 100);
//// Verify Auto Sizing ////
block = this.auto1;
// Add a view
v1 = this.createChild({class: 'item', parent:block});
assert.equal(block.width, 25);
assert.equal(block.height, 25);
// Remove a view
v1.destroy();
assert.equal(block.width, 0);
assert.equal(block.height, 0);
// Destroy a view with auto size just to make sure it doesn't throw an error
block.destroy();
// Hide a view
block = this.auto2;
v1 = block.v1;
v2 = block.v2;
v3 = block.v3;
v3.setAttribute('visible', false);
assert.equal(block.width, 225);
assert.equal(block.height, 40);
// Show a view
v3.setAttribute('visible', true);
assert.equal(block.width, 225);
assert.equal(block.height, 75);
// Move a view
v3.setAttribute('y', 60);
assert.equal(block.width, 225);
assert.equal(block.height, 85);
// Resize a view
v3.setAttribute('height', 20);
assert.equal(block.width, 225);
assert.equal(block.height, 80);
// Turn off auto
block.setAttribute('width', 100);
assert.equal(block.width, 100);
assert.equal(block.height, 80);
v2.setAttribute('x', 150);
assert.equal(block.width, 100);
assert.equal(block.height, 80);
// Turn on auto
block.setAttribute('width', 'auto');
assert.equal(block.width, 175);
assert.equal(block.height, 80);
// Change position of child of an auto sized view with percent children
// The percent children should be ignored.
block = this.auto5;
v1 = block.v1;
v2 = block.v2;
v3 = block.v3;
v2.setAttribute('x', 105);
assert.equal(block.width, 130);
assert.equal(block.height, 80);
assert.equal(v1.x, 65);
assert.equal(v1.y, 40);
assert.equal(v1.width, 65);
assert.equal(v1.height, 40);
v1.setAttribute('width', '100%');
assert.equal(block.width, 130);
assert.equal(block.height, 80);
assert.equal(v1.x, 65);
assert.equal(v1.y, 40);
assert.equal(v1.width, 130);
assert.equal(v1.height, 40);
v1.setAttribute('width', 130);
v1.setAttribute('x', 70); // Triggers an event since the value is different thus causing the AutoLayout to update
assert.equal(block.width, 200);
assert.equal(block.height, 80);
v1.setAttribute('height', 50);
v1.setAttribute('y', 45); // Triggers an event since the value is different thus causing the AutoLayout to update
assert.equal(block.width, 200);
assert.equal(block.height, 95);
// Change position of child of an auto sized view with a child
// that is constrained to the parent width/height. Because constraints
// prevent loops the constained child view will behave as expected even
// though the actual relationship is a circular reference.
block = this.auto6a;
v1 = block.v1;
v2 = block.v2;
v3 = block.v3;
v2.setAttribute('x', 50);
assert.equal(v2.x, 50);
assert.equal(v1.x, 65);
assert.equal(v1.width, 65);
assert.equal(block.width, 130);
assert.equal(block.height, 80);
v2.setAttribute('x', 155);
assert.equal(v2.x, 155);
assert.equal(v1.x, 90);
assert.equal(v1.width, 90);
assert.equal(block.width, 180);
assert.equal(block.height, 80);
// Add a view to the empty block and we should see the constrained
// view resize itself.
block = this.auto6b;
v1 = block.v1;
v2 = block.createChild({class: 'item', x:45, y:45});
assert.equal(v2.x, 45);
assert.equal(v2.y, 45);
assert.equal(v1.width, 70);
assert.equal(v1.height, 70);
assert.equal(block.width, 70);
assert.equal(block.height, 70);
// Nothing should change after view destruction
v2.destroy();
assert.equal(v2.destroyed, true);
assert.equal(v1.width, 70);
assert.equal(v1.height, 70);
assert.equal(block.width, 70);
assert.equal(block.height, 70);
// Change position of child of an auto sized view with aligned children
// The aligned children should be ignored.
block = this.auto7;
v1 = block.v1;
v2 = block.v2;
v3 = block.v3;
v2.setAttribute('x', 105);
assert.equal(block.width, 130);
assert.equal(block.height, 80);
assert.equal(v1.x, 105);
assert.equal(v1.y, 55);
v1.setAttribute('x', 'center');
assert.equal(v1.x, 52.5);
assert.equal(v1.y, 55);
v1.setAttribute('width', 130);
v1.setAttribute('x', 65);
assert.equal(block.width, 195);
assert.equal(block.height, 80);
v1.setAttribute('height', 50);
v1.setAttribute('y', 40);
assert.equal(block.width, 195);
assert.equal(block.height, 90);
// Finally, verify root view since we gave it a width of auto
assert.equal(this.width, 210);
assert.equal(this.height, 752);
</handler>
</view>
<!-- The MIT License (MIT)
Copyright ( c ) 2014 Teem2 LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. -->
</body>
</html>
| {
"content_hash": "0b4684cf4c44cca299eb39337c8043e6",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 119,
"avg_line_length": 33.97222222222222,
"alnum_prop": 0.5934735746673604,
"repo_name": "gnovos/gnovos.github.io",
"id": "beeb265e30e2e2938e5117bb397376b0bc88c659",
"size": "13453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smoke/view_size.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18214"
},
{
"name": "CoffeeScript",
"bytes": "190645"
},
{
"name": "HTML",
"bytes": "964526"
},
{
"name": "JavaScript",
"bytes": "11716205"
},
{
"name": "Ruby",
"bytes": "7258"
},
{
"name": "Shell",
"bytes": "1078"
}
],
"symlink_target": ""
} |
<?php
namespace CategoryBundle\Helpers;
use CategoryBundle\Entity\Category;
use Doctrine\Common\Collections\ArrayCollection;
/**
* This helper groups array of categories into required groups.
*
* Class GroupingHelper
* @package CategoryBundle\Helpers
*/
class GroupingHelper
{
/**
* Groups array of categories into groups of parents. Sets keys as parents ID's.
* If category don't have any parents, then this category is grouped to 0.
*
* @param $categories
* @return ArrayCollection
*/
public function groupByParent($categories)
{
$orderedCategories = new ArrayCollection();
$orderedCategories->set(0, new ArrayCollection());
foreach ($categories as $category) {
/** @var $category Category */
if ($category->getParent()) {
$parent = $category->getParent();
if (!$orderedCategories->get($parent->getId())) {
$orderedCategories->set($parent->getId(), new ArrayCollection());
$orderedCategories[$parent->getId()]->add($category);
} else {
$orderedCategories[$parent->getId()]->add($category);
}
} else {
$orderedCategories[0]->add($category);
}
}
return $orderedCategories;
}
} | {
"content_hash": "598bee2b6379f18033a490aa5ab37100",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 85,
"avg_line_length": 30.886363636363637,
"alnum_prop": 0.5864606328182487,
"repo_name": "Lezas/myBudget",
"id": "ef2f873c9de6e42dcc524c71de18d1678fef13cc",
"size": "1359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/CategoryBundle/Helpers/GroupingHelper.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3829"
},
{
"name": "HTML",
"bytes": "58920"
},
{
"name": "JavaScript",
"bytes": "97010"
},
{
"name": "PHP",
"bytes": "181750"
}
],
"symlink_target": ""
} |
use chrono::{offset::Local, DateTime};
use hyper::{header, Body, Method, StatusCode};
use std::{fs, io, net::SocketAddr, path::Path};
use crate::base::ctx::ctxs;
use crate::base::{http, response, HeaderGetStr, Request, Response};
use crate::config::Route;
use crate::consts::CONTENT_TYPE_HTML;
use crate::service::GlobalState;
use crate::views::{EntryMetadata, EntryOrder};
use super::{
compress::compress_handler,
exception::{exception_handler_sync, io_exception_handler_sync},
};
pub async fn index_handler<'a>(
route: &'a Route,
reqpath: &'a str,
path: &'a Path,
meta: &'a fs::Metadata,
req: Request,
addr: &'a SocketAddr,
state: GlobalState,
) -> Result<Response, http::Error> {
if route.disable_index {
return exception_handler_sync(403, Some("index(directory) view is closed"), &req, addr);
}
match index_handler2(route, reqpath, path, meta, &req, addr, state).await {
Ok(resp) => resp,
Err(e) => {
error!("index_handler2 faield: {:?}", e);
io_exception_handler_sync(e, &req, addr)
}
}
}
pub async fn index_handler2<'a>(
route: &'a Route,
reqpath: &'a str,
path: &'a Path,
meta: &'a fs::Metadata,
req: &'a Request,
addr: &'a SocketAddr,
state: ctxs::State,
) -> io::Result<Result<Response, http::Error>> {
let mut resp = response();
let entry_order = EntryOrder::new(req.uri().query());
let cache_secs = state.config().cache_secs;
if cache_secs > 0 {
let last_modified = meta.modified()?;
let last_modified: DateTime<Local> = last_modified.into();
let http_last_modified = last_modified.to_rfc2822();
// W/"80-5d564a70.3797f8b1@Empty"
let etag = format!(
"W/\"{:x}-{:x}.{:x}@{}\"",
meta.len(),
last_modified.timestamp_millis(),
last_modified.timestamp_subsec_nanos(),
entry_order
);
let http_etag = req.headers().get_str(header::IF_NONE_MATCH);
let if_modified_since = req
.headers()
.get_str_option(header::IF_MODIFIED_SINCE)
.and_then(|v| DateTime::parse_from_rfc2822(v).ok())
.map(|v| v.with_timezone(&Local));
if etag.as_str() == http_etag
&& if_modified_since
.map(|v| v.timestamp() == last_modified.timestamp())
.unwrap_or(true)
{
// 304
return Ok(resp.status(StatusCode::NOT_MODIFIED).body(Body::empty()));
}
resp = resp.header(header::CACHE_CONTROL, format!("public, max-age={}", cache_secs).as_str());
resp = resp.header(header::LAST_MODIFIED, http_last_modified);
resp = resp.header(header::ETAG, etag);
}
let html = render_html(addr, reqpath, path, &req, &entry_order, route)?;
resp = resp.header(header::CONTENT_TYPE, CONTENT_TYPE_HTML);
resp = resp.header(header::CONTENT_LENGTH, html.len());
match *req.method() {
Method::GET => {
let compress_level = state.config().compress_level;
Ok(compress_handler(req, addr, resp, html, compress_level).await)
}
// 204: curl -Lv -X HEAD "0.0.0.0:8000/src/main.rs"
Method::HEAD => Ok(resp.status(StatusCode::NO_CONTENT).body(Body::empty())),
_ => unreachable!(),
}
}
use crate::tools::url_for_parent;
use crate::views::IndexTemplate;
use askama::Template;
pub fn render_html(
remote_addr: &SocketAddr,
title: &str,
index: &Path,
req: &Request,
order: &EntryOrder,
config: &Route,
) -> io::Result<String> {
let metadatas = EntryMetadata::read_dir(index, config.follow_links, config.show_hider, order)?;
let next_order = order.next();
let parent = url_for_parent(req.uri().path());
let template = IndexTemplate::new(
title,
title,
&parent,
&remote_addr,
next_order,
metadatas.iter(),
config.upload,
config.mkdir,
);
let html = template.render().unwrap();
Ok(html)
}
| {
"content_hash": "c285c88c630dc0a6c6942ef5dc14932f",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 102,
"avg_line_length": 30.706766917293233,
"alnum_prop": 0.5815377081292851,
"repo_name": "biluohc/fht2p",
"id": "653cdb1801cb4cea565b788f025e4e4fb58220c3",
"size": "4086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/handlers/index.rs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "863"
},
{
"name": "HTML",
"bytes": "2932"
},
{
"name": "Makefile",
"bytes": "1483"
},
{
"name": "PowerShell",
"bytes": "920"
},
{
"name": "Rust",
"bytes": "146997"
}
],
"symlink_target": ""
} |
package org.koenighotze.vavrplayground.demo.demo4;
import static java.lang.String.format;
import static java.lang.System.out;
/**
* @author David Schmitz
*/
public class Calculator {
public int add(int a, int b) {
out.println(format("Adding %d and %d", a, b));
return a + b;
}
}
| {
"content_hash": "17b0adfbb0c2b56240491362225193d5",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 54,
"avg_line_length": 21.928571428571427,
"alnum_prop": 0.6547231270358306,
"repo_name": "koenighotze/javaslang-playground",
"id": "74286deca9007a92e0d34759e96a5d827e6d41ff",
"size": "307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/koenighotze/vavrplayground/demo/demo4/Calculator.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5006"
},
{
"name": "Java",
"bytes": "91485"
},
{
"name": "Shell",
"bytes": "7058"
}
],
"symlink_target": ""
} |
#include "curl_setup.h"
#if !defined(CURL_DISABLE_CRYPTO_AUTH)
#include <curl/curl.h>
#include "vauth/vauth.h"
#include "vauth/digest.h"
#include "urldata.h"
#include "curl_base64.h"
#include "curl_hmac.h"
#include "curl_md5.h"
#include "curl_sha256.h"
#include "vtls/vtls.h"
#include "warnless.h"
#include "strtok.h"
#include "strcase.h"
#include "non-ascii.h" /* included for Curl_convert_... prototypes */
#include "curl_printf.h"
#include "rand.h"
/* The last #include files should be: */
#include "curl_memory.h"
#include "memdebug.h"
#if !defined(USE_WINDOWS_SSPI)
#define DIGEST_QOP_VALUE_AUTH (1 << 0)
#define DIGEST_QOP_VALUE_AUTH_INT (1 << 1)
#define DIGEST_QOP_VALUE_AUTH_CONF (1 << 2)
#define DIGEST_QOP_VALUE_STRING_AUTH "auth"
#define DIGEST_QOP_VALUE_STRING_AUTH_INT "auth-int"
#define DIGEST_QOP_VALUE_STRING_AUTH_CONF "auth-conf"
/* The CURL_OUTPUT_DIGEST_CONV macro below is for non-ASCII machines.
It converts digest text to ASCII so the MD5 will be correct for
what ultimately goes over the network.
*/
#define CURL_OUTPUT_DIGEST_CONV(a, b) \
result = Curl_convert_to_network(a, (char *)b, strlen((const char *)b)); \
if(result) { \
free(b); \
return result; \
}
#endif /* !USE_WINDOWS_SSPI */
bool Curl_auth_digest_get_pair(const char *str, char *value, char *content,
const char **endptr)
{
int c;
bool starts_with_quote = FALSE;
bool escape = FALSE;
for(c = DIGEST_MAX_VALUE_LENGTH - 1; (*str && (*str != '=') && c--);)
*value++ = *str++;
*value = 0;
if('=' != *str++)
/* eek, no match */
return FALSE;
if('\"' == *str) {
/* This starts with a quote so it must end with one as well! */
str++;
starts_with_quote = TRUE;
}
for(c = DIGEST_MAX_CONTENT_LENGTH - 1; *str && c--; str++) {
switch(*str) {
case '\\':
if(!escape) {
/* possibly the start of an escaped quote */
escape = TRUE;
*content++ = '\\'; /* Even though this is an escape character, we still
store it as-is in the target buffer */
continue;
}
break;
case ',':
if(!starts_with_quote) {
/* This signals the end of the content if we didn't get a starting
quote and then we do "sloppy" parsing */
c = 0; /* the end */
continue;
}
break;
case '\r':
case '\n':
/* end of string */
c = 0;
continue;
case '\"':
if(!escape && starts_with_quote) {
/* end of string */
c = 0;
continue;
}
break;
}
escape = FALSE;
*content++ = *str;
}
*content = 0;
*endptr = str;
return TRUE;
}
#if !defined(USE_WINDOWS_SSPI)
/* Convert md5 chunk to RFC2617 (section 3.1.3) -suitable ascii string*/
static void auth_digest_md5_to_ascii(unsigned char *source, /* 16 bytes */
unsigned char *dest) /* 33 bytes */
{
int i;
for(i = 0; i < 16; i++)
snprintf((char *) &dest[i * 2], 3, "%02x", source[i]);
}
/* Convert sha256 chunk to RFC7616 -suitable ascii string*/
static void auth_digest_sha256_to_ascii(unsigned char *source, /* 32 bytes */
unsigned char *dest) /* 65 bytes */
{
int i;
for(i = 0; i < 32; i++)
snprintf((char *) &dest[i * 2], 3, "%02x", source[i]);
}
/* Perform quoted-string escaping as described in RFC2616 and its errata */
static char *auth_digest_string_quoted(const char *source)
{
char *dest, *d;
const char *s = source;
size_t n = 1; /* null terminator */
/* Calculate size needed */
while(*s) {
++n;
if(*s == '"' || *s == '\\') {
++n;
}
++s;
}
dest = malloc(n);
if(dest) {
s = source;
d = dest;
while(*s) {
if(*s == '"' || *s == '\\') {
*d++ = '\\';
}
*d++ = *s++;
}
*d = 0;
}
return dest;
}
/* Retrieves the value for a corresponding key from the challenge string
* returns TRUE if the key could be found, FALSE if it does not exists
*/
static bool auth_digest_get_key_value(const char *chlg,
const char *key,
char *value,
size_t max_val_len,
char end_char)
{
char *find_pos;
size_t i;
find_pos = strstr(chlg, key);
if(!find_pos)
return FALSE;
find_pos += strlen(key);
for(i = 0; *find_pos && *find_pos != end_char && i < max_val_len - 1; ++i)
value[i] = *find_pos++;
value[i] = '\0';
return TRUE;
}
static CURLcode auth_digest_get_qop_values(const char *options, int *value)
{
char *tmp;
char *token;
char *tok_buf = NULL;
/* Initialise the output */
*value = 0;
/* Tokenise the list of qop values. Use a temporary clone of the buffer since
strtok_r() ruins it. */
tmp = strdup(options);
if(!tmp)
return CURLE_OUT_OF_MEMORY;
token = strtok_r(tmp, ",", &tok_buf);
while(token != NULL) {
if(strcasecompare(token, DIGEST_QOP_VALUE_STRING_AUTH))
*value |= DIGEST_QOP_VALUE_AUTH;
else if(strcasecompare(token, DIGEST_QOP_VALUE_STRING_AUTH_INT))
*value |= DIGEST_QOP_VALUE_AUTH_INT;
else if(strcasecompare(token, DIGEST_QOP_VALUE_STRING_AUTH_CONF))
*value |= DIGEST_QOP_VALUE_AUTH_CONF;
token = strtok_r(NULL, ",", &tok_buf);
}
free(tmp);
return CURLE_OK;
}
/*
* auth_decode_digest_md5_message()
*
* This is used internally to decode an already encoded DIGEST-MD5 challenge
* message into the separate attributes.
*
* Parameters:
*
* chlg64 [in] - The base64 encoded challenge message.
* nonce [in/out] - The buffer where the nonce will be stored.
* nlen [in] - The length of the nonce buffer.
* realm [in/out] - The buffer where the realm will be stored.
* rlen [in] - The length of the realm buffer.
* alg [in/out] - The buffer where the algorithm will be stored.
* alen [in] - The length of the algorithm buffer.
* qop [in/out] - The buffer where the qop-options will be stored.
* qlen [in] - The length of the qop buffer.
*
* Returns CURLE_OK on success.
*/
static CURLcode auth_decode_digest_md5_message(const char *chlg64,
char *nonce, size_t nlen,
char *realm, size_t rlen,
char *alg, size_t alen,
char *qop, size_t qlen)
{
CURLcode result = CURLE_OK;
unsigned char *chlg = NULL;
size_t chlglen = 0;
size_t chlg64len = strlen(chlg64);
/* Decode the base-64 encoded challenge message */
if(chlg64len && *chlg64 != '=') {
result = Curl_base64_decode(chlg64, &chlg, &chlglen);
if(result)
return result;
}
/* Ensure we have a valid challenge message */
if(!chlg)
return CURLE_BAD_CONTENT_ENCODING;
/* Retrieve nonce string from the challenge */
if(!auth_digest_get_key_value((char *) chlg, "nonce=\"", nonce, nlen,
'\"')) {
free(chlg);
return CURLE_BAD_CONTENT_ENCODING;
}
/* Retrieve realm string from the challenge */
if(!auth_digest_get_key_value((char *) chlg, "realm=\"", realm, rlen,
'\"')) {
/* Challenge does not have a realm, set empty string [RFC2831] page 6 */
strcpy(realm, "");
}
/* Retrieve algorithm string from the challenge */
if(!auth_digest_get_key_value((char *) chlg, "algorithm=", alg, alen, ',')) {
free(chlg);
return CURLE_BAD_CONTENT_ENCODING;
}
/* Retrieve qop-options string from the challenge */
if(!auth_digest_get_key_value((char *) chlg, "qop=\"", qop, qlen, '\"')) {
free(chlg);
return CURLE_BAD_CONTENT_ENCODING;
}
free(chlg);
return CURLE_OK;
}
/*
* Curl_auth_is_digest_supported()
*
* This is used to evaluate if DIGEST is supported.
*
* Parameters: None
*
* Returns TRUE as DIGEST as handled by libcurl.
*/
bool Curl_auth_is_digest_supported(void)
{
return TRUE;
}
/*
* Curl_auth_create_digest_md5_message()
*
* This is used to generate an already encoded DIGEST-MD5 response message
* ready for sending to the recipient.
*
* Parameters:
*
* data [in] - The session handle.
* chlg64 [in] - The base64 encoded challenge message.
* userp [in] - The user name.
* passdwp [in] - The user's password.
* service [in] - The service type such as http, smtp, pop or imap.
* outptr [in/out] - The address where a pointer to newly allocated memory
* holding the result will be stored upon completion.
* outlen [out] - The length of the output message.
*
* Returns CURLE_OK on success.
*/
CURLcode Curl_auth_create_digest_md5_message(struct Curl_easy *data,
const char *chlg64,
const char *userp,
const char *passwdp,
const char *service,
char **outptr, size_t *outlen)
{
CURLcode result = CURLE_OK;
size_t i;
MD5_context *ctxt;
char *response = NULL;
unsigned char digest[MD5_DIGEST_LEN];
char HA1_hex[2 * MD5_DIGEST_LEN + 1];
char HA2_hex[2 * MD5_DIGEST_LEN + 1];
char resp_hash_hex[2 * MD5_DIGEST_LEN + 1];
char nonce[64];
char realm[128];
char algorithm[64];
char qop_options[64];
int qop_values;
char cnonce[33];
char nonceCount[] = "00000001";
char method[] = "AUTHENTICATE";
char qop[] = DIGEST_QOP_VALUE_STRING_AUTH;
char *spn = NULL;
/* Decode the challenge message */
result = auth_decode_digest_md5_message(chlg64, nonce, sizeof(nonce),
realm, sizeof(realm),
algorithm, sizeof(algorithm),
qop_options, sizeof(qop_options));
if(result)
return result;
/* We only support md5 sessions */
if(strcmp(algorithm, "md5-sess") != 0)
return CURLE_BAD_CONTENT_ENCODING;
/* Get the qop-values from the qop-options */
result = auth_digest_get_qop_values(qop_options, &qop_values);
if(result)
return result;
/* We only support auth quality-of-protection */
if(!(qop_values & DIGEST_QOP_VALUE_AUTH))
return CURLE_BAD_CONTENT_ENCODING;
/* Generate 32 random hex chars, 32 bytes + 1 zero termination */
result = Curl_rand_hex(data, (unsigned char *)cnonce, sizeof(cnonce));
if(result)
return result;
/* So far so good, now calculate A1 and H(A1) according to RFC 2831 */
ctxt = Curl_MD5_init(Curl_DIGEST_MD5);
if(!ctxt)
return CURLE_OUT_OF_MEMORY;
Curl_MD5_update(ctxt, (const unsigned char *) userp,
curlx_uztoui(strlen(userp)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) realm,
curlx_uztoui(strlen(realm)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) passwdp,
curlx_uztoui(strlen(passwdp)));
Curl_MD5_final(ctxt, digest);
ctxt = Curl_MD5_init(Curl_DIGEST_MD5);
if(!ctxt)
return CURLE_OUT_OF_MEMORY;
Curl_MD5_update(ctxt, (const unsigned char *) digest, MD5_DIGEST_LEN);
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) nonce,
curlx_uztoui(strlen(nonce)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) cnonce,
curlx_uztoui(strlen(cnonce)));
Curl_MD5_final(ctxt, digest);
/* Convert calculated 16 octet hex into 32 bytes string */
for(i = 0; i < MD5_DIGEST_LEN; i++)
snprintf(&HA1_hex[2 * i], 3, "%02x", digest[i]);
/* Generate our SPN */
spn = Curl_auth_build_spn(service, realm, NULL);
if(!spn)
return CURLE_OUT_OF_MEMORY;
/* Calculate H(A2) */
ctxt = Curl_MD5_init(Curl_DIGEST_MD5);
if(!ctxt) {
free(spn);
return CURLE_OUT_OF_MEMORY;
}
Curl_MD5_update(ctxt, (const unsigned char *) method,
curlx_uztoui(strlen(method)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) spn,
curlx_uztoui(strlen(spn)));
Curl_MD5_final(ctxt, digest);
for(i = 0; i < MD5_DIGEST_LEN; i++)
snprintf(&HA2_hex[2 * i], 3, "%02x", digest[i]);
/* Now calculate the response hash */
ctxt = Curl_MD5_init(Curl_DIGEST_MD5);
if(!ctxt) {
free(spn);
return CURLE_OUT_OF_MEMORY;
}
Curl_MD5_update(ctxt, (const unsigned char *) HA1_hex, 2 * MD5_DIGEST_LEN);
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) nonce,
curlx_uztoui(strlen(nonce)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) nonceCount,
curlx_uztoui(strlen(nonceCount)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) cnonce,
curlx_uztoui(strlen(cnonce)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) qop,
curlx_uztoui(strlen(qop)));
Curl_MD5_update(ctxt, (const unsigned char *) ":", 1);
Curl_MD5_update(ctxt, (const unsigned char *) HA2_hex, 2 * MD5_DIGEST_LEN);
Curl_MD5_final(ctxt, digest);
for(i = 0; i < MD5_DIGEST_LEN; i++)
snprintf(&resp_hash_hex[2 * i], 3, "%02x", digest[i]);
/* Generate the response */
response = aprintf("username=\"%s\",realm=\"%s\",nonce=\"%s\","
"cnonce=\"%s\",nc=\"%s\",digest-uri=\"%s\",response=%s,"
"qop=%s",
userp, realm, nonce,
cnonce, nonceCount, spn, resp_hash_hex, qop);
free(spn);
if(!response)
return CURLE_OUT_OF_MEMORY;
/* Base64 encode the response */
result = Curl_base64_encode(data, response, 0, outptr, outlen);
free(response);
return result;
}
/*
* Curl_auth_decode_digest_http_message()
*
* This is used to decode a HTTP DIGEST challenge message into the separate
* attributes.
*
* Parameters:
*
* chlg [in] - The challenge message.
* digest [in/out] - The digest data struct being used and modified.
*
* Returns CURLE_OK on success.
*/
CURLcode Curl_auth_decode_digest_http_message(const char *chlg,
struct digestdata *digest)
{
bool before = FALSE; /* got a nonce before */
bool foundAuth = FALSE;
bool foundAuthInt = FALSE;
char *token = NULL;
char *tmp = NULL;
/* If we already have received a nonce, keep that in mind */
if(digest->nonce)
before = TRUE;
/* Clean up any former leftovers and initialise to defaults */
Curl_auth_digest_cleanup(digest);
for(;;) {
char value[DIGEST_MAX_VALUE_LENGTH];
char content[DIGEST_MAX_CONTENT_LENGTH];
/* Pass all additional spaces here */
while(*chlg && ISSPACE(*chlg))
chlg++;
/* Extract a value=content pair */
if(Curl_auth_digest_get_pair(chlg, value, content, &chlg)) {
if(strcasecompare(value, "nonce")) {
free(digest->nonce);
digest->nonce = strdup(content);
if(!digest->nonce)
return CURLE_OUT_OF_MEMORY;
}
else if(strcasecompare(value, "stale")) {
if(strcasecompare(content, "true")) {
digest->stale = TRUE;
digest->nc = 1; /* we make a new nonce now */
}
}
else if(strcasecompare(value, "realm")) {
free(digest->realm);
digest->realm = strdup(content);
if(!digest->realm)
return CURLE_OUT_OF_MEMORY;
}
else if(strcasecompare(value, "opaque")) {
free(digest->opaque);
digest->opaque = strdup(content);
if(!digest->opaque)
return CURLE_OUT_OF_MEMORY;
}
else if(strcasecompare(value, "qop")) {
char *tok_buf = NULL;
/* Tokenize the list and choose auth if possible, use a temporary
clone of the buffer since strtok_r() ruins it */
tmp = strdup(content);
if(!tmp)
return CURLE_OUT_OF_MEMORY;
token = strtok_r(tmp, ",", &tok_buf);
while(token != NULL) {
if(strcasecompare(token, DIGEST_QOP_VALUE_STRING_AUTH)) {
foundAuth = TRUE;
}
else if(strcasecompare(token, DIGEST_QOP_VALUE_STRING_AUTH_INT)) {
foundAuthInt = TRUE;
}
token = strtok_r(NULL, ",", &tok_buf);
}
free(tmp);
/* Select only auth or auth-int. Otherwise, ignore */
if(foundAuth) {
free(digest->qop);
digest->qop = strdup(DIGEST_QOP_VALUE_STRING_AUTH);
if(!digest->qop)
return CURLE_OUT_OF_MEMORY;
}
else if(foundAuthInt) {
free(digest->qop);
digest->qop = strdup(DIGEST_QOP_VALUE_STRING_AUTH_INT);
if(!digest->qop)
return CURLE_OUT_OF_MEMORY;
}
}
else if(strcasecompare(value, "algorithm")) {
free(digest->algorithm);
digest->algorithm = strdup(content);
if(!digest->algorithm)
return CURLE_OUT_OF_MEMORY;
if(strcasecompare(content, "MD5-sess"))
digest->algo = CURLDIGESTALGO_MD5SESS;
else if(strcasecompare(content, "MD5"))
digest->algo = CURLDIGESTALGO_MD5;
else if(strcasecompare(content, "SHA-256"))
digest->algo = CURLDIGESTALGO_SHA256;
else if(strcasecompare(content, "SHA-256-SESS"))
digest->algo = CURLDIGESTALGO_SHA256SESS;
else if(strcasecompare(content, "SHA-512-256"))
digest->algo = CURLDIGESTALGO_SHA512_256;
else if(strcasecompare(content, "SHA-512-256-SESS"))
digest->algo = CURLDIGESTALGO_SHA512_256SESS;
else
return CURLE_BAD_CONTENT_ENCODING;
}
else if(strcasecompare(value, "userhash")) {
if(strcasecompare(content, "true")) {
digest->userhash = TRUE;
}
}
else {
/* Unknown specifier, ignore it! */
}
}
else
break; /* We're done here */
/* Pass all additional spaces here */
while(*chlg && ISSPACE(*chlg))
chlg++;
/* Allow the list to be comma-separated */
if(',' == *chlg)
chlg++;
}
/* We had a nonce since before, and we got another one now without
'stale=true'. This means we provided bad credentials in the previous
request */
if(before && !digest->stale)
return CURLE_BAD_CONTENT_ENCODING;
/* We got this header without a nonce, that's a bad Digest line! */
if(!digest->nonce)
return CURLE_BAD_CONTENT_ENCODING;
return CURLE_OK;
}
/*
* _Curl_auth_create_digest_http_message()
*
* This is used to generate a HTTP DIGEST response message ready for sending
* to the recipient.
*
* Parameters:
*
* data [in] - The session handle.
* userp [in] - The user name.
* passdwp [in] - The user's password.
* request [in] - The HTTP request.
* uripath [in] - The path of the HTTP uri.
* digest [in/out] - The digest data struct being used and modified.
* outptr [in/out] - The address where a pointer to newly allocated memory
* holding the result will be stored upon completion.
* outlen [out] - The length of the output message.
*
* Returns CURLE_OK on success.
*/
static CURLcode _Curl_auth_create_digest_http_message(
struct Curl_easy *data,
const char *userp,
const char *passwdp,
const unsigned char *request,
const unsigned char *uripath,
struct digestdata *digest,
char **outptr, size_t *outlen,
void (*convert_to_ascii)(unsigned char *, unsigned char *),
void (*hash)(unsigned char *, const unsigned char *))
{
CURLcode result;
unsigned char hashbuf[32]; /* 32 bytes/256 bits */
unsigned char request_digest[65];
unsigned char *hashthis;
unsigned char ha1[65]; /* 64 digits and 1 zero byte */
unsigned char ha2[65]; /* 64 digits and 1 zero byte */
char userh[65];
char cnoncebuf[33];
char *cnonce = NULL;
size_t cnonce_sz = 0;
char *userp_quoted;
char *response = NULL;
char *tmp = NULL;
if(!digest->nc)
digest->nc = 1;
if(!digest->cnonce) {
result = Curl_rand_hex(data, (unsigned char *)cnoncebuf,
sizeof(cnoncebuf));
if(result)
return result;
result = Curl_base64_encode(data, cnoncebuf, strlen(cnoncebuf),
&cnonce, &cnonce_sz);
if(result)
return result;
digest->cnonce = cnonce;
}
if(digest->userhash) {
hashthis = (unsigned char *) aprintf("%s:%s", userp, digest->realm);
if(!hashthis)
return CURLE_OUT_OF_MEMORY;
CURL_OUTPUT_DIGEST_CONV(data, hashthis);
hash(hashbuf, hashthis);
free(hashthis);
convert_to_ascii(hashbuf, (unsigned char *)userh);
}
/*
If the algorithm is "MD5" or unspecified (which then defaults to MD5):
A1 = unq(username-value) ":" unq(realm-value) ":" passwd
If the algorithm is "MD5-sess" then:
A1 = H(unq(username-value) ":" unq(realm-value) ":" passwd) ":"
unq(nonce-value) ":" unq(cnonce-value)
*/
hashthis = (unsigned char *)
aprintf("%s:%s:%s", digest->userhash ? userh : userp,
digest->realm, passwdp);
if(!hashthis)
return CURLE_OUT_OF_MEMORY;
CURL_OUTPUT_DIGEST_CONV(data, hashthis); /* convert on non-ASCII machines */
hash(hashbuf, hashthis);
free(hashthis);
convert_to_ascii(hashbuf, ha1);
if(digest->algo == CURLDIGESTALGO_MD5SESS ||
digest->algo == CURLDIGESTALGO_SHA256SESS ||
digest->algo == CURLDIGESTALGO_SHA512_256SESS) {
/* nonce and cnonce are OUTSIDE the hash */
tmp = aprintf("%s:%s:%s", ha1, digest->nonce, digest->cnonce);
if(!tmp)
return CURLE_OUT_OF_MEMORY;
CURL_OUTPUT_DIGEST_CONV(data, tmp); /* Convert on non-ASCII machines */
hash(hashbuf, (unsigned char *) tmp);
free(tmp);
convert_to_ascii(hashbuf, ha1);
}
/*
If the "qop" directive's value is "auth" or is unspecified, then A2 is:
A2 = Method ":" digest-uri-value
If the "qop" value is "auth-int", then A2 is:
A2 = Method ":" digest-uri-value ":" H(entity-body)
(The "Method" value is the HTTP request method as specified in section
5.1.1 of RFC 2616)
*/
hashthis = (unsigned char *) aprintf("%s:%s", request, uripath);
if(digest->qop && strcasecompare(digest->qop, "auth-int")) {
/* We don't support auth-int for PUT or POST at the moment.
TODO: replace hash of empty string with entity-body for PUT/POST */
char hashed[65];
unsigned char *hashthis2;
hash(hashbuf, (const unsigned char *)"");
convert_to_ascii(hashbuf, (unsigned char *)hashed);
hashthis2 = (unsigned char *)aprintf("%s:%s", hashthis, hashed);
free(hashthis);
hashthis = hashthis2;
}
if(!hashthis)
return CURLE_OUT_OF_MEMORY;
CURL_OUTPUT_DIGEST_CONV(data, hashthis); /* convert on non-ASCII machines */
hash(hashbuf, hashthis);
free(hashthis);
convert_to_ascii(hashbuf, ha2);
if(digest->qop) {
hashthis = (unsigned char *) aprintf("%s:%s:%08x:%s:%s:%s",
ha1,
digest->nonce,
digest->nc,
digest->cnonce,
digest->qop,
ha2);
}
else {
hashthis = (unsigned char *) aprintf("%s:%s:%s",
ha1,
digest->nonce,
ha2);
}
if(!hashthis)
return CURLE_OUT_OF_MEMORY;
CURL_OUTPUT_DIGEST_CONV(data, hashthis); /* convert on non-ASCII machines */
hash(hashbuf, hashthis);
free(hashthis);
convert_to_ascii(hashbuf, request_digest);
/* For test case 64 (snooped from a Mozilla 1.3a request)
Authorization: Digest username="testuser", realm="testrealm", \
nonce="1053604145", uri="/64", response="c55f7f30d83d774a3d2dcacf725abaca"
Digest parameters are all quoted strings. Username which is provided by
the user will need double quotes and backslashes within it escaped. For
the other fields, this shouldn't be an issue. realm, nonce, and opaque
are copied as is from the server, escapes and all. cnonce is generated
with web-safe characters. uri is already percent encoded. nc is 8 hex
characters. algorithm and qop with standard values only contain web-safe
characters.
*/
userp_quoted = auth_digest_string_quoted(digest->userhash ? userh : userp);
if(!userp_quoted)
return CURLE_OUT_OF_MEMORY;
if(digest->qop) {
response = aprintf("username=\"%s\", "
"realm=\"%s\", "
"nonce=\"%s\", "
"uri=\"%s\", "
"cnonce=\"%s\", "
"nc=%08x, "
"qop=%s, "
"response=\"%s\"",
userp_quoted,
digest->realm,
digest->nonce,
uripath,
digest->cnonce,
digest->nc,
digest->qop,
request_digest);
if(strcasecompare(digest->qop, "auth"))
digest->nc++; /* The nc (from RFC) has to be a 8 hex digit number 0
padded which tells to the server how many times you are
using the same nonce in the qop=auth mode */
}
else {
response = aprintf("username=\"%s\", "
"realm=\"%s\", "
"nonce=\"%s\", "
"uri=\"%s\", "
"response=\"%s\"",
userp_quoted,
digest->realm,
digest->nonce,
uripath,
request_digest);
}
free(userp_quoted);
if(!response)
return CURLE_OUT_OF_MEMORY;
/* Add the optional fields */
if(digest->opaque) {
/* Append the opaque */
tmp = aprintf("%s, opaque=\"%s\"", response, digest->opaque);
free(response);
if(!tmp)
return CURLE_OUT_OF_MEMORY;
response = tmp;
}
if(digest->algorithm) {
/* Append the algorithm */
tmp = aprintf("%s, algorithm=\"%s\"", response, digest->algorithm);
free(response);
if(!tmp)
return CURLE_OUT_OF_MEMORY;
response = tmp;
}
if(digest->userhash) {
/* Append the userhash */
tmp = aprintf("%s, userhash=true", response);
free(response);
if(!tmp)
return CURLE_OUT_OF_MEMORY;
response = tmp;
}
/* Return the output */
*outptr = response;
*outlen = strlen(response);
return CURLE_OK;
}
/*
* Curl_auth_create_digest_http_message()
*
* This is used to generate a HTTP DIGEST response message ready for sending
* to the recipient.
*
* Parameters:
*
* data [in] - The session handle.
* userp [in] - The user name.
* passdwp [in] - The user's password.
* request [in] - The HTTP request.
* uripath [in] - The path of the HTTP uri.
* digest [in/out] - The digest data struct being used and modified.
* outptr [in/out] - The address where a pointer to newly allocated memory
* holding the result will be stored upon completion.
* outlen [out] - The length of the output message.
*
* Returns CURLE_OK on success.
*/
CURLcode Curl_auth_create_digest_http_message(struct Curl_easy *data,
const char *userp,
const char *passwdp,
const unsigned char *request,
const unsigned char *uripath,
struct digestdata *digest,
char **outptr, size_t *outlen)
{
switch(digest->algo) {
case CURLDIGESTALGO_MD5:
case CURLDIGESTALGO_MD5SESS:
return _Curl_auth_create_digest_http_message(data, userp, passwdp,
request, uripath, digest,
outptr, outlen,
auth_digest_md5_to_ascii,
Curl_md5it);
case CURLDIGESTALGO_SHA256:
case CURLDIGESTALGO_SHA256SESS:
case CURLDIGESTALGO_SHA512_256:
case CURLDIGESTALGO_SHA512_256SESS:
return _Curl_auth_create_digest_http_message(data, userp, passwdp,
request, uripath, digest,
outptr, outlen,
auth_digest_sha256_to_ascii,
Curl_sha256it);
default:
return CURLE_UNSUPPORTED_PROTOCOL;
}
}
/*
* Curl_auth_digest_cleanup()
*
* This is used to clean up the digest specific data.
*
* Parameters:
*
* digest [in/out] - The digest data struct being cleaned up.
*
*/
void Curl_auth_digest_cleanup(struct digestdata *digest)
{
Curl_safefree(digest->nonce);
Curl_safefree(digest->cnonce);
Curl_safefree(digest->realm);
Curl_safefree(digest->opaque);
Curl_safefree(digest->qop);
Curl_safefree(digest->algorithm);
digest->nc = 0;
digest->algo = CURLDIGESTALGO_MD5; /* default algorithm */
digest->stale = FALSE; /* default means normal, not stale */
digest->userhash = FALSE;
}
#endif /* !USE_WINDOWS_SSPI */
#endif /* CURL_DISABLE_CRYPTO_AUTH */
| {
"content_hash": "c5422a0e519cd7aa2890b41acfb45b3f",
"timestamp": "",
"source": "github",
"line_count": 980,
"max_line_length": 79,
"avg_line_length": 30.84285714285714,
"alnum_prop": 0.5640177330774829,
"repo_name": "draede/cx",
"id": "131d9da8c45a299d4202050f40bfd7462a3d77db",
"size": "31350",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Contrib/cURL/Src/vauth/digest.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "4691"
},
{
"name": "C",
"bytes": "15739837"
},
{
"name": "C++",
"bytes": "4586671"
},
{
"name": "Makefile",
"bytes": "11584"
}
],
"symlink_target": ""
} |
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", { value: true });
var core_1 = require("@ag-grid-community/core");
var core_2 = require("./files/ooxml/core");
var contentTypes_1 = require("./files/ooxml/contentTypes");
var office_1 = require("./files/ooxml/themes/office");
var sharedStrings_1 = require("./files/ooxml/sharedStrings");
var stylesheet_1 = require("./files/ooxml/styles/stylesheet");
var workbook_1 = require("./files/ooxml/workbook");
var worksheet_1 = require("./files/ooxml/worksheet");
var relationships_1 = require("./files/ooxml/relationships");
/**
* See https://www.ecma-international.org/news/TC45_current_work/OpenXML%20White%20Paper.pdf
*/
var ExcelXlsxFactory = /** @class */ (function (_super) {
__extends(ExcelXlsxFactory, _super);
function ExcelXlsxFactory() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.sharedStrings = [];
return _this;
}
ExcelXlsxFactory.prototype.createSharedStrings = function () {
return this.createXmlPart(sharedStrings_1.default.getTemplate(this.sharedStrings));
};
ExcelXlsxFactory.prototype.createXmlPart = function (body) {
var header = this.xmlFactory.createHeader({
encoding: 'UTF-8',
standalone: 'yes'
});
var xmlBody = this.xmlFactory.createXml(body);
return "" + header + xmlBody;
};
ExcelXlsxFactory.prototype.createExcel = function (styles, worksheets, sharedStrings) {
if (sharedStrings === void 0) { sharedStrings = []; }
this.sharedStrings = sharedStrings;
this.sheetNames = worksheets.map(function (worksheet) { return worksheet.name; });
stylesheet_1.registerStyles(styles);
return this.createWorksheet(worksheets);
};
ExcelXlsxFactory.prototype.createCore = function () {
return this.createXmlPart(core_2.default.getTemplate());
};
ExcelXlsxFactory.prototype.createContentTypes = function () {
return this.createXmlPart(contentTypes_1.default.getTemplate());
};
ExcelXlsxFactory.prototype.createRels = function () {
var rs = relationships_1.default.getTemplate([{
Id: 'rId1',
Type: 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument',
Target: 'xl/workbook.xml'
}, {
Id: 'rId2',
Type: 'http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties',
Target: 'docProps/core.xml'
}]);
return this.createXmlPart(rs);
};
ExcelXlsxFactory.prototype.createStylesheet = function () {
return this.createXmlPart(stylesheet_1.default.getTemplate());
};
ExcelXlsxFactory.prototype.createTheme = function () {
return this.createXmlPart(office_1.default.getTemplate());
};
ExcelXlsxFactory.prototype.createWorkbook = function () {
return this.createXmlPart(workbook_1.default.getTemplate(this.sheetNames));
};
ExcelXlsxFactory.prototype.createWorkbookRels = function () {
var rs = relationships_1.default.getTemplate([{
Id: 'rId1',
Type: 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet',
Target: 'worksheets/sheet1.xml'
}, {
Id: 'rId2',
Type: 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/theme',
Target: 'theme/theme1.xml'
}, {
Id: 'rId3',
Type: 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/styles',
Target: 'styles.xml'
}, {
Id: 'rId4',
Type: 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/sharedStrings',
Target: 'sharedStrings.xml'
}]);
return this.createXmlPart(rs);
};
ExcelXlsxFactory.prototype.createWorksheet = function (worksheets) {
return this.createXmlPart(worksheet_1.default.getTemplate(worksheets[0]));
};
__decorate([
core_1.Autowired('xmlFactory')
], ExcelXlsxFactory.prototype, "xmlFactory", void 0);
ExcelXlsxFactory = __decorate([
core_1.Bean('excelXlsxFactory')
], ExcelXlsxFactory);
return ExcelXlsxFactory;
}(core_1.BeanStub));
exports.ExcelXlsxFactory = ExcelXlsxFactory;
//# sourceMappingURL=excelXlsxFactory.js.map | {
"content_hash": "5f7761fa89ff04236d8cd80ad977d38e",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 150,
"avg_line_length": 47.983050847457626,
"alnum_prop": 0.6232779936418227,
"repo_name": "ceolter/ag-grid",
"id": "25ccdfab491ac04d7d046457b54e72827153e891",
"size": "5662",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "enterprise-modules/excel-export/dist/cjs/excelExport/excelXlsxFactory.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "37765"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "TypeScript",
"bytes": "1267988"
}
],
"symlink_target": ""
} |
using System;
namespace NServiceBus.SagaPersisters.RavenDB.Attributes
{
public class UniqueAttribute : Attribute
{
//TODO: remove this class when the original Unique attribute is moved from NHibernate namespace
}
} | {
"content_hash": "ac2e52f2b11b0ca4a2c3e57f11ae0d07",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 103,
"avg_line_length": 26.444444444444443,
"alnum_prop": 0.7394957983193278,
"repo_name": "ianbattersby/NServiceBus-Contrib",
"id": "61328bf304cfd77f72df9e77cfc2262d25fc906f",
"size": "238",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/SagaPersisters/RavenDB/NServiceBus.SagaPersisters.RavenDB/UniqueAttribute.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "76669"
}
],
"symlink_target": ""
} |
package org.apereo.cas.ticket;
import org.apereo.cas.config.CasCoreAuthenticationMetadataConfiguration;
import org.apereo.cas.config.CasCoreHttpConfiguration;
import org.apereo.cas.config.CasCoreNotificationsConfiguration;
import org.apereo.cas.config.CasCoreServicesAuthenticationConfiguration;
import org.apereo.cas.config.CasCoreServicesConfiguration;
import org.apereo.cas.config.CasCoreTicketCatalogConfiguration;
import org.apereo.cas.config.CasCoreTicketIdGeneratorsConfiguration;
import org.apereo.cas.config.CasCoreTicketsConfiguration;
import org.apereo.cas.config.CasCoreTicketsSerializationConfiguration;
import org.apereo.cas.config.CasCoreUtilConfiguration;
import org.apereo.cas.config.CasDefaultServiceTicketIdGeneratorsConfiguration;
import org.apereo.cas.config.support.CasWebApplicationServiceFactoryConfiguration;
import org.apereo.cas.services.RegisteredServiceTestUtils;
import org.apereo.cas.ticket.proxy.ProxyGrantingTicket;
import org.apereo.cas.ticket.proxy.ProxyGrantingTicketFactory;
import org.apereo.cas.ticket.proxy.ProxyTicket;
import org.apereo.cas.ticket.proxy.ProxyTicketFactory;
import org.apereo.cas.ticket.serialization.TicketSerializationManager;
import org.apereo.cas.util.CollectionUtils;
import lombok.val;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration;
import static org.junit.jupiter.api.Assertions.*;
/**
* This is {@link TicketSerializersTests}.
*
* @author Misagh Moayyed
* @since 6.0.0
*/
@SpringBootTest(classes = {
RefreshAutoConfiguration.class,
CasCoreUtilConfiguration.class,
CasCoreTicketsConfiguration.class,
CasCoreTicketIdGeneratorsConfiguration.class,
CasDefaultServiceTicketIdGeneratorsConfiguration.class,
CasCoreHttpConfiguration.class,
CasCoreNotificationsConfiguration.class,
CasCoreAuthenticationMetadataConfiguration.class,
CasCoreTicketCatalogConfiguration.class,
CasCoreTicketsSerializationConfiguration.class,
CasWebApplicationServiceFactoryConfiguration.class,
CasCoreServicesConfiguration.class,
CasCoreServicesAuthenticationConfiguration.class
})
@Tag("Tickets")
public class TicketSerializersTests {
@Autowired
@Qualifier("defaultTicketFactory")
private TicketFactory defaultTicketFactory;
@Autowired
@Qualifier("ticketSerializationManager")
private TicketSerializationManager ticketSerializationManager;
@Test
public void verifyTicketGrantingTicketSerialization() {
val factory = (TicketGrantingTicketFactory) this.defaultTicketFactory.get(TicketGrantingTicket.class);
val ticket = factory.create(RegisteredServiceTestUtils.getAuthentication(),
RegisteredServiceTestUtils.getService(), TicketGrantingTicket.class);
verifySerialization(ticket);
}
@Test
public void verifyTransientSessionTicketSerialization() {
val factory = (TransientSessionTicketFactory) this.defaultTicketFactory.get(TransientSessionTicket.class);
val ticket = factory.create(RegisteredServiceTestUtils.getService(), CollectionUtils.wrap("key", "value"));
verifySerialization(ticket);
}
@Test
public void verifyServiceTicketSerialization() {
val tgtFactory = (TicketGrantingTicketFactory) this.defaultTicketFactory.get(TicketGrantingTicket.class);
val tgt = tgtFactory.create(RegisteredServiceTestUtils.getAuthentication(),
RegisteredServiceTestUtils.getService(), TicketGrantingTicket.class);
val factory = (ServiceTicketFactory) this.defaultTicketFactory.get(ServiceTicket.class);
val ticket = factory.create(tgt, RegisteredServiceTestUtils.getService(), true, ServiceTicket.class);
verifySerialization(ticket);
}
@Test
public void verifyProxyGrantingTicketSerialization() {
val tgtFactory = (TicketGrantingTicketFactory) this.defaultTicketFactory.get(TicketGrantingTicket.class);
val tgt = tgtFactory.create(RegisteredServiceTestUtils.getAuthentication(),
RegisteredServiceTestUtils.getService(), TicketGrantingTicket.class);
val stFactory = (ServiceTicketFactory) this.defaultTicketFactory.get(ServiceTicket.class);
val st = stFactory.create(tgt, RegisteredServiceTestUtils.getService(), true, ServiceTicket.class);
val pgtFactory = (ProxyGrantingTicketFactory) this.defaultTicketFactory.get(ProxyGrantingTicket.class);
val pgt = pgtFactory.create(st, tgt.getAuthentication(), ProxyGrantingTicket.class);
verifySerialization(pgt);
}
@Test
public void verifyProxyTicketSerialization() {
val tgtFactory = (TicketGrantingTicketFactory) this.defaultTicketFactory.get(TicketGrantingTicket.class);
val tgt = tgtFactory.create(RegisteredServiceTestUtils.getAuthentication(),
RegisteredServiceTestUtils.getService(), TicketGrantingTicket.class);
val stFactory = (ServiceTicketFactory) this.defaultTicketFactory.get(ServiceTicket.class);
val st = stFactory.create(tgt, RegisteredServiceTestUtils.getService(), true, ServiceTicket.class);
val pgtFactory = (ProxyGrantingTicketFactory) this.defaultTicketFactory.get(ProxyGrantingTicket.class);
val pgt = pgtFactory.create(st, tgt.getAuthentication(), ProxyGrantingTicket.class);
val ptFactory = (ProxyTicketFactory) this.defaultTicketFactory.get(ProxyTicket.class);
val pt = ptFactory.create(pgt, st.getService(), ProxyTicket.class);
verifySerialization(pt);
}
private void verifySerialization(final Ticket ticket) {
val serialized = ticketSerializationManager.serializeTicket(ticket);
assertNotNull(serialized);
val deserialized = ticketSerializationManager.deserializeTicket(serialized, ticket.getClass());
assertNotNull(deserialized);
assertEquals(deserialized, ticket);
}
}
| {
"content_hash": "e9c4c6717c7acbd9debe1de79a63d1b6",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 115,
"avg_line_length": 46.70229007633588,
"alnum_prop": 0.7904543968617195,
"repo_name": "pdrados/cas",
"id": "0c0b45fffd712b855f628f2064b228b3b31af70c",
"size": "6118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/cas-server-core-tickets/src/test/java/org/apereo/cas/ticket/TicketSerializersTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13992"
},
{
"name": "Dockerfile",
"bytes": "75"
},
{
"name": "Groovy",
"bytes": "31399"
},
{
"name": "HTML",
"bytes": "195237"
},
{
"name": "Java",
"bytes": "12509257"
},
{
"name": "JavaScript",
"bytes": "85879"
},
{
"name": "Python",
"bytes": "26699"
},
{
"name": "Ruby",
"bytes": "1323"
},
{
"name": "Shell",
"bytes": "177491"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
Bot. Mag. , Tokyo 37: 60 (1923)
#### Original name
Stereum japonicum Yasuda
### Remarks
null | {
"content_hash": "946c15fff7cb743689cebf18de4d4ccd",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 12,
"alnum_prop": 0.6858974358974359,
"repo_name": "mdoering/backbone",
"id": "afa91228f81401e70410c9fcb6119da816c268e8",
"size": "204",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Basidiomycota/Agaricomycetes/Russulales/Stereaceae/Stereum/Stereum japonicum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import { ITranslation } from './work.model';
export interface ISkill {
name: string;
order: number;
active: boolean;
class: string;
icon: string;
legend: ITranslation;
description: ITranslation;
proficiency: number;
years: number;
slug: any;
}
| {
"content_hash": "be18bfb561cfb9e72b8b15f28442245d",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 44,
"avg_line_length": 18.928571428571427,
"alnum_prop": 0.690566037735849,
"repo_name": "plastikaweb/plastikaweb2017",
"id": "4d087c539ec03a8af8f6161173e27ac4f96f8df2",
"size": "265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/models/skill.model.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8501"
},
{
"name": "HTML",
"bytes": "20061"
},
{
"name": "JavaScript",
"bytes": "3110"
},
{
"name": "Shell",
"bytes": "604"
},
{
"name": "TypeScript",
"bytes": "96803"
}
],
"symlink_target": ""
} |
export { default } from 'ember-dynamic-table/components/ember-dynamic-table-header-tr';
| {
"content_hash": "93142ebcff3c755ccc038324756210f7",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 87,
"avg_line_length": 88,
"alnum_prop": 0.7840909090909091,
"repo_name": "phll2/ember-dynamic-table",
"id": "7fdf3b29fade68fba49905cbf6664b18c322ae06",
"size": "88",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/components/ember-dynamic-table-header-tr.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "109"
},
{
"name": "HTML",
"bytes": "5358"
},
{
"name": "JavaScript",
"bytes": "68542"
}
],
"symlink_target": ""
} |
module OS
module Linux
module Glibc
module_function
def system_version
return @system_version if @system_version
version = Utils.popen_read("/usr/bin/ldd", "--version")[/ (\d+\.\d+)/, 1]
return Version::NULL unless version
@system_version = Version.new version
end
end
end
end
| {
"content_hash": "98567720f1ebc184cd5a3759bcaeb3c8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 81,
"avg_line_length": 21.375,
"alnum_prop": 0.6023391812865497,
"repo_name": "maxim-belkin/brew",
"id": "7be42f20432bec75180fadc08082fb2458f3a1e0",
"size": "342",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Library/Homebrew/os/linux/glibc.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "10080"
},
{
"name": "PostScript",
"bytes": "485"
},
{
"name": "Roff",
"bytes": "61924"
},
{
"name": "Ruby",
"bytes": "1958261"
},
{
"name": "Shell",
"bytes": "71086"
}
],
"symlink_target": ""
} |
/**
* @fileoverview A fake web worker.
*/
goog.provide("trapeze.FauxWorker");
goog.require("trapeze.TrapezeWorker");
/**
* A fake web worker for browsers that don't support web workers, it is also
* handy for debugging since firebug doesn't really support debugging web worker
* threads.
* @constructor
*/
trapeze.FauxWorker = function() {
var that = this;
this.worker = new trapeze.TrapezeWorker(function(data) {
that.onmessage({'data': data});
});
};
/**
* This posts a message to the TrapezeWorker.
* @param {Array} data The data to send to TrapezeWorker
*/
trapeze.FauxWorker.prototype.postMessage = function(data) {
this.worker.onmessage({'data': data});
};
| {
"content_hash": "07ff822b767c94b6435e9a9981fd1d6c",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 80,
"avg_line_length": 24.392857142857142,
"alnum_prop": 0.705710102489019,
"repo_name": "AKamanjha/trapeze-reader",
"id": "7cf1feec3d57e536f6602595594e554b5f6cabef",
"size": "683",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/FauxWorker.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2571"
},
{
"name": "HTML",
"bytes": "8176"
},
{
"name": "JavaScript",
"bytes": "333536"
},
{
"name": "PHP",
"bytes": "5564"
}
],
"symlink_target": ""
} |
<!doctype html>
<html lang="en">
<head>
<title>Code coverage report for features/common</title>
<meta charset="utf-8" />
<link rel="stylesheet" href="../../prettify.css" />
<link rel="stylesheet" href="../../base.css" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type='text/css'>
.coverage-summary .sorter {
background-image: url(../../sort-arrow-sprite.png);
}
</style>
</head>
<body>
<div class='wrapper'>
<div class='pad1'>
<h1>
<a href="../../index.html">All files</a> features/common
</h1>
<div class='clearfix'>
<div class='fl pad1y space-right2'>
<span class="strong">0% </span>
<span class="quiet">Statements</span>
<span class='fraction'>0/19</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">0% </span>
<span class="quiet">Branches</span>
<span class='fraction'>0/12</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">0% </span>
<span class="quiet">Functions</span>
<span class='fraction'>0/7</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">0% </span>
<span class="quiet">Lines</span>
<span class='fraction'>0/18</span>
</div>
</div>
</div>
<div class='status-line low'></div>
<div class="pad1">
<table class="coverage-summary">
<thead>
<tr>
<th data-col="file" data-fmt="html" data-html="true" class="file">File</th>
<th data-col="pic" data-type="number" data-fmt="html" data-html="true" class="pic"></th>
<th data-col="statements" data-type="number" data-fmt="pct" class="pct">Statements</th>
<th data-col="statements_raw" data-type="number" data-fmt="html" class="abs"></th>
<th data-col="branches" data-type="number" data-fmt="pct" class="pct">Branches</th>
<th data-col="branches_raw" data-type="number" data-fmt="html" class="abs"></th>
<th data-col="functions" data-type="number" data-fmt="pct" class="pct">Functions</th>
<th data-col="functions_raw" data-type="number" data-fmt="html" class="abs"></th>
<th data-col="lines" data-type="number" data-fmt="pct" class="pct">Lines</th>
<th data-col="lines_raw" data-type="number" data-fmt="html" class="abs"></th>
</tr>
</thead>
<tbody><tr>
<td class="file low" data-value="PageNotFound.js"><a href="PageNotFound.js.html">PageNotFound.js</a></td>
<td data-value="0" class="pic low"><div class="chart"><div class="cover-fill" style="width: 0%;"></div><div class="cover-empty" style="width:100%;"></div></div></td>
<td data-value="0" class="pct low">0%</td>
<td data-value="1" class="abs low">0/1</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="1" class="abs low">0/1</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="1" class="abs low">0/1</td>
</tr>
<tr>
<td class="file low" data-value="SearchInput.js"><a href="SearchInput.js.html">SearchInput.js</a></td>
<td data-value="0" class="pic low"><div class="chart"><div class="cover-fill" style="width: 0%;"></div><div class="cover-empty" style="width:100%;"></div></div></td>
<td data-value="0" class="pct low">0%</td>
<td data-value="7" class="abs low">0/7</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="2" class="abs low">0/2</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="3" class="abs low">0/3</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="6" class="abs low">0/6</td>
</tr>
<tr>
<td class="file low" data-value="SimpleNav.js"><a href="SimpleNav.js.html">SimpleNav.js</a></td>
<td data-value="0" class="pic low"><div class="chart"><div class="cover-fill" style="width: 0%;"></div><div class="cover-empty" style="width:100%;"></div></div></td>
<td data-value="0" class="pct low">0%</td>
<td data-value="11" class="abs low">0/11</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="10" class="abs low">0/10</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="3" class="abs low">0/3</td>
<td data-value="0" class="pct low">0%</td>
<td data-value="11" class="abs low">0/11</td>
</tr>
<tr>
<td class="file high" data-value="colors.js"><a href="colors.js.html">colors.js</a></td>
<td data-value="100" class="pic high"><div class="chart"><div class="cover-fill cover-full" style="width: 100%;"></div><div class="cover-empty" style="width:0%;"></div></div></td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
</tr>
<tr>
<td class="file high" data-value="index.js"><a href="index.js.html">index.js</a></td>
<td data-value="100" class="pic high"><div class="chart"><div class="cover-fill cover-full" style="width: 100%;"></div><div class="cover-empty" style="width:0%;"></div></div></td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
<td data-value="100" class="pct high">100%</td>
<td data-value="0" class="abs high">0/0</td>
</tr>
</tbody>
</table>
</div><div class='push'></div><!-- for sticky footer -->
</div><!-- /wrapper -->
<div class='footer quiet pad2 space-top1 center small'>
Code coverage
generated by <a href="https://istanbul.js.org/" target="_blank">istanbul</a> at Mon Jan 22 2018 12:15:56 GMT+0800 (CST)
</div>
</div>
<script src="../../prettify.js"></script>
<script>
window.onload = function () {
if (typeof prettyPrint === 'function') {
prettyPrint();
}
};
</script>
<script src="../../sorter.js"></script>
</body>
</html>
| {
"content_hash": "5a92c11519ce1f0758a10c838a87bb5e",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 180,
"avg_line_length": 42.42758620689655,
"alnum_prop": 0.6124837451235371,
"repo_name": "supnate/rekit-portal",
"id": "a9c5f72aa45d485579bb015960688192ff69505b",
"size": "6152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coverage/lcov-report/features/common/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "44979"
},
{
"name": "HTML",
"bytes": "1590994"
},
{
"name": "JavaScript",
"bytes": "262516"
}
],
"symlink_target": ""
} |
require 'rails'
module Less
module Rails
module Bootstrap
class Engine < ::Rails::Engine
initializer 'less-rails-bootstrap.setup', :after => 'less-rails.before.load_config_initializers', :group => :all do |app|
app.config.less.paths << File.join(config.root, 'app', 'frameworks')
end
end
end
end
end
| {
"content_hash": "66bceb83808863a590e1fff5905322f0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 129,
"avg_line_length": 24.6,
"alnum_prop": 0.6043360433604336,
"repo_name": "watanabe-kazunori/New_EC-Site",
"id": "9a58f280624e3be8abcb166572ab8e03bd9cbc8e",
"size": "369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/bundle/ruby/2.1.0/gems/less-rails-bootstrap-3.3.5.0/lib/less/rails/bootstrap/engine.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7469"
},
{
"name": "CoffeeScript",
"bytes": "6471"
},
{
"name": "Groff",
"bytes": "104"
},
{
"name": "HTML",
"bytes": "245873"
},
{
"name": "JavaScript",
"bytes": "1019"
},
{
"name": "Ruby",
"bytes": "199532"
}
],
"symlink_target": ""
} |
/**
* @file mali_kbase_gpuprops.h
* Base kernel property query APIs
*/
#ifndef _KBASE_GPUPROPS_H_
#define _KBASE_GPUPROPS_H_
#include "mali_kbase_gpuprops_types.h"
/* Forward definition - see mali_kbase.h */
struct kbase_device;
/**
* @brief Set up Kbase GPU properties.
*
* Set up Kbase GPU properties with information from the GPU registers
*
* @param kbdev The struct kbase_device structure for the device
*/
void kbase_gpuprops_set(struct kbase_device *kbdev);
/**
* @brief Provide GPU properties to userside through UKU call.
*
* Fill the struct kbase_uk_gpuprops with values from GPU configuration registers.
*
* @param kctx The struct kbase_context structure
* @param kbase_props A copy of the struct kbase_uk_gpuprops structure from userspace
*
* @return MALI_ERROR_NONE on success. Any other value indicates failure.
*/
mali_error kbase_gpuprops_uk_get_props(struct kbase_context *kctx, struct kbase_uk_gpuprops * const kbase_props);
#endif /* _KBASE_GPUPROPS_H_ */
| {
"content_hash": "5997de3e9ff881b402d15f0f5c63c5dd",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 113,
"avg_line_length": 24.634146341463413,
"alnum_prop": 0.7257425742574257,
"repo_name": "ghostkim-sc/SMG920T_profiling_enabled",
"id": "fe2676cd9b998089cf49f051dec2702519a4ab95",
"size": "1515",
"binary": false,
"copies": "608",
"ref": "refs/heads/master",
"path": "drivers/gpu/arm/t7xx/r5p0/mali_kbase_gpuprops.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "4528"
},
{
"name": "Assembly",
"bytes": "9791460"
},
{
"name": "Awk",
"bytes": "18681"
},
{
"name": "C",
"bytes": "518034272"
},
{
"name": "C++",
"bytes": "13105745"
},
{
"name": "GDB",
"bytes": "18113"
},
{
"name": "Lex",
"bytes": "40805"
},
{
"name": "M4",
"bytes": "3388"
},
{
"name": "Makefile",
"bytes": "1522326"
},
{
"name": "Objective-C",
"bytes": "1278363"
},
{
"name": "Perl",
"bytes": "372361"
},
{
"name": "Python",
"bytes": "22590"
},
{
"name": "Roff",
"bytes": "22012"
},
{
"name": "Scilab",
"bytes": "21433"
},
{
"name": "Shell",
"bytes": "218756"
},
{
"name": "SourcePawn",
"bytes": "2711"
},
{
"name": "Stata",
"bytes": "4176"
},
{
"name": "UnrealScript",
"bytes": "6113"
},
{
"name": "Yacc",
"bytes": "83091"
}
],
"symlink_target": ""
} |
package coyote.commons.network.http.wsd;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
import java.util.logging.Logger;
import coyote.commons.network.MimeType;
import coyote.commons.network.http.HTTPD;
import coyote.commons.network.http.HTTPSession;
import coyote.commons.network.http.Response;
import coyote.commons.network.http.SecurityResponseException;
import coyote.commons.network.http.Status;
/**
* Web Socket Daemon
*/
public abstract class WebSocketDaemon extends HTTPD {
public static final String HEADER_CONNECTION = "connection";
public static final String HEADER_CONNECTION_VALUE = "Upgrade";
public static final String HEADER_UPGRADE = "upgrade";
public static final String HEADER_UPGRADE_VALUE = "websocket";
public static final String HEADER_WEBSOCKET_ACCEPT = "sec-websocket-accept";
public static final String HEADER_WEBSOCKET_KEY = "sec-websocket-key";
public static final String HEADER_WEBSOCKET_PROTOCOL = "sec-websocket-protocol";
public static final String HEADER_WEBSOCKET_VERSION = "sec-websocket-version";
public static final String HEADER_WEBSOCKET_VERSION_VALUE = "13";
private final static char[] ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".toCharArray();
private final static String WEBSOCKET_KEY_MAGIC = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
static final Logger LOG = Logger.getLogger(WebSocketDaemon.class.getName());
public static String makeAcceptKey(final String key) throws NoSuchAlgorithmException {
final MessageDigest md = MessageDigest.getInstance("SHA-1");
final String text = key + WebSocketDaemon.WEBSOCKET_KEY_MAGIC;
md.update(text.getBytes(), 0, text.length());
final byte[] sha1hash = md.digest();
return encodeBase64(sha1hash);
}
/**
* Translates the specified byte array into Base64 string.
*
* @param buf the byte array (not null)
*
* @return the translated Base64 string (not null)
*/
private static String encodeBase64(final byte[] buf) {
final int size = buf.length;
final char[] ar = new char[((size + 2) / 3) * 4];
int a = 0;
int i = 0;
while (i < size) {
final byte b0 = buf[i++];
final byte b1 = i < size ? buf[i++] : 0;
final byte b2 = i < size ? buf[i++] : 0;
final int mask = 0x3F;
ar[a++] = WebSocketDaemon.ALPHABET[(b0 >> 2) & mask];
ar[a++] = WebSocketDaemon.ALPHABET[((b0 << 4) | ((b1 & 0xFF) >> 4)) & mask];
ar[a++] = WebSocketDaemon.ALPHABET[((b1 << 2) | ((b2 & 0xFF) >> 6)) & mask];
ar[a++] = WebSocketDaemon.ALPHABET[b2 & mask];
}
switch (size % 3) {
case 1:
ar[--a] = '=';
break;
case 2:
ar[--a] = '=';
break;
}
return new String(ar);
}
public WebSocketDaemon(final int port) {
super(port);
}
public WebSocketDaemon(final String hostname, final int port) {
super(hostname, port);
}
@Override
public Response serve(final HTTPSession session) throws SecurityResponseException {
final Map<String, String> headers = session.getRequestHeaders();
if (isWebsocketRequested(session)) {
if (!WebSocketDaemon.HEADER_WEBSOCKET_VERSION_VALUE.equalsIgnoreCase(headers.get(WebSocketDaemon.HEADER_WEBSOCKET_VERSION))) {
return Response.createFixedLengthResponse(Status.BAD_REQUEST, MimeType.TEXT.getType(), "Invalid Websocket-Version " + headers.get(WebSocketDaemon.HEADER_WEBSOCKET_VERSION));
}
if (!headers.containsKey(WebSocketDaemon.HEADER_WEBSOCKET_KEY)) {
return Response.createFixedLengthResponse(Status.BAD_REQUEST, MimeType.TEXT.getType(), "Missing Websocket-Key");
}
final WebSocket webSocket = openWebSocket(session);
final Response handshakeResponse = webSocket.getHandshakeResponse();
try {
handshakeResponse.addHeader(WebSocketDaemon.HEADER_WEBSOCKET_ACCEPT, makeAcceptKey(headers.get(WebSocketDaemon.HEADER_WEBSOCKET_KEY)));
} catch (final NoSuchAlgorithmException e) {
return Response.createFixedLengthResponse(Status.INTERNAL_ERROR, MimeType.TEXT.getType(), "The SHA-1 Algorithm required for websockets is not available on the server.");
}
if (headers.containsKey(WebSocketDaemon.HEADER_WEBSOCKET_PROTOCOL)) {
handshakeResponse.addHeader(WebSocketDaemon.HEADER_WEBSOCKET_PROTOCOL, headers.get(WebSocketDaemon.HEADER_WEBSOCKET_PROTOCOL).split(",")[0]);
}
return handshakeResponse;
} else {
return serveHttp(session);
}
}
private boolean isWebSocketConnectionHeader(final Map<String, String> headers) {
final String connection = headers.get(WebSocketDaemon.HEADER_CONNECTION);
return (connection != null) && connection.toLowerCase().contains(WebSocketDaemon.HEADER_CONNECTION_VALUE.toLowerCase());
}
protected boolean isWebsocketRequested(final HTTPSession session) {
final Map<String, String> headers = session.getRequestHeaders();
final String upgrade = headers.get(WebSocketDaemon.HEADER_UPGRADE);
final boolean isCorrectConnection = isWebSocketConnectionHeader(headers);
final boolean isUpgrade = WebSocketDaemon.HEADER_UPGRADE_VALUE.equalsIgnoreCase(upgrade);
return isUpgrade && isCorrectConnection;
}
protected abstract WebSocket openWebSocket(HTTPSession handshake);
protected Response serveHttp(final HTTPSession session) throws SecurityResponseException {
return super.serve(session);
}
/**
* not all websockets implementations accept gzip compression.
*/
@Override
protected boolean useGzipWhenAccepted(final Response r) {
return false;
}
public static enum State {
CLOSED, CLOSING, CONNECTING, OPEN, UNCONNECTED
}
}
| {
"content_hash": "a2e3be2b7cfa5f30be7c68cd7201cb7f",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 181,
"avg_line_length": 32.80681818181818,
"alnum_prop": 0.71527537235885,
"repo_name": "sdcote/loader",
"id": "9c5c051a79381007b50e8589ce673deaead611a8",
"size": "5774",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/coyote/commons/network/http/wsd/WebSocketDaemon.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "267"
},
{
"name": "Java",
"bytes": "1889663"
}
],
"symlink_target": ""
} |
/**
* @file simulator.h
*
* This module interfaces via MAVLink to a software in the loop simulator (SITL)
* such as jMAVSim or Gazebo.
*/
#pragma once
#include <drivers/drv_hrt.h>
#include <drivers/drv_rc_input.h>
#include <drivers/drv_range_finder.h>
#include <lib/drivers/accelerometer/PX4Accelerometer.hpp>
#include <lib/drivers/barometer/PX4Barometer.hpp>
#include <lib/drivers/gyroscope/PX4Gyroscope.hpp>
#include <lib/drivers/magnetometer/PX4Magnetometer.hpp>
#include <lib/ecl/geo/geo.h>
#include <lib/perf/perf_counter.h>
#include <px4_platform_common/atomic.h>
#include <px4_platform_common/bitmask.h>
#include <px4_platform_common/module_params.h>
#include <px4_platform_common/posix.h>
#include <uORB/Publication.hpp>
#include <uORB/Subscription.hpp>
#include <uORB/topics/actuator_outputs.h>
#include <uORB/topics/differential_pressure.h>
#include <uORB/topics/distance_sensor.h>
#include <uORB/topics/irlock_report.h>
#include <uORB/topics/manual_control_setpoint.h>
#include <uORB/topics/optical_flow.h>
#include <uORB/topics/parameter_update.h>
#include <uORB/topics/sensor_gps.h>
#include <uORB/topics/vehicle_angular_velocity.h>
#include <uORB/topics/vehicle_attitude.h>
#include <uORB/topics/vehicle_global_position.h>
#include <uORB/topics/vehicle_local_position.h>
#include <uORB/topics/vehicle_odometry.h>
#include <uORB/topics/vehicle_status.h>
#include <uORB/topics/vehicle_command.h>
#include <uORB/topics/vehicle_command_ack.h>
#include <random>
#include <v2.0/common/mavlink.h>
#include <v2.0/mavlink_types.h>
using namespace time_literals;
//! Enumeration to use on the bitmask in HIL_SENSOR
enum class SensorSource {
ACCEL = 0b111,
GYRO = 0b111000,
MAG = 0b111000000,
BARO = 0b1101000000000,
DIFF_PRESS = 0b10000000000
};
ENABLE_BIT_OPERATORS(SensorSource)
//! AND operation for the enumeration and unsigned types that returns the bitmask
template<typename A, typename B>
static inline SensorSource operator &(A lhs, B rhs)
{
// make it type safe
static_assert((std::is_same<A, uint32_t>::value || std::is_same<A, SensorSource>::value),
"first argument is not uint32_t or SensorSource enum type");
static_assert((std::is_same<B, uint32_t>::value || std::is_same<B, SensorSource>::value),
"second argument is not uint32_t or SensorSource enum type");
typedef typename std::underlying_type<SensorSource>::type underlying;
return static_cast<SensorSource>(
static_cast<underlying>(lhs) &
static_cast<underlying>(rhs)
);
}
class Simulator : public ModuleParams
{
public:
static Simulator *getInstance() { return _instance; }
enum class InternetProtocol {
TCP,
UDP
};
static int start(int argc, char *argv[]);
void set_ip(InternetProtocol ip) { _ip = ip; }
void set_port(unsigned port) { _port = port; }
#if defined(ENABLE_LOCKSTEP_SCHEDULER)
bool has_initialized() { return _has_initialized.load(); }
#endif
private:
Simulator() : ModuleParams(nullptr)
{
}
~Simulator()
{
// free perf counters
perf_free(_perf_sim_delay);
perf_free(_perf_sim_interval);
for (size_t i = 0; i < sizeof(_dist_pubs) / sizeof(_dist_pubs[0]); i++) {
delete _dist_pubs[i];
}
px4_lockstep_unregister_component(_lockstep_component);
_instance = nullptr;
}
void check_failure_injections();
int publish_flow_topic(const mavlink_hil_optical_flow_t *flow);
int publish_odometry_topic(const mavlink_message_t *odom_mavlink);
int publish_distance_topic(const mavlink_distance_sensor_t *dist);
static Simulator *_instance;
// simulated sensor instances
static constexpr uint8_t ACCEL_COUNT_MAX = 3;
PX4Accelerometer _px4_accel[ACCEL_COUNT_MAX] {
{1310988, ROTATION_NONE}, // 1310988: DRV_IMU_DEVTYPE_SIM, BUS: 1, ADDR: 1, TYPE: SIMULATION
{1310996, ROTATION_NONE}, // 1310996: DRV_IMU_DEVTYPE_SIM, BUS: 2, ADDR: 1, TYPE: SIMULATION
{1311004, ROTATION_NONE}, // 1311004: DRV_IMU_DEVTYPE_SIM, BUS: 3, ADDR: 1, TYPE: SIMULATION
};
static constexpr uint8_t GYRO_COUNT_MAX = 3;
PX4Gyroscope _px4_gyro[GYRO_COUNT_MAX] {
{1310988, ROTATION_NONE}, // 1310988: DRV_IMU_DEVTYPE_SIM, BUS: 1, ADDR: 1, TYPE: SIMULATION
{1310996, ROTATION_NONE}, // 1310996: DRV_IMU_DEVTYPE_SIM, BUS: 2, ADDR: 1, TYPE: SIMULATION
{1311004, ROTATION_NONE}, // 1311004: DRV_IMU_DEVTYPE_SIM, BUS: 3, ADDR: 1, TYPE: SIMULATION
};
PX4Magnetometer _px4_mag_0{197388, ROTATION_NONE}; // 197388: DRV_MAG_DEVTYPE_MAGSIM, BUS: 1, ADDR: 1, TYPE: SIMULATION
PX4Magnetometer _px4_mag_1{197644, ROTATION_NONE}; // 197644: DRV_MAG_DEVTYPE_MAGSIM, BUS: 2, ADDR: 1, TYPE: SIMULATION
PX4Barometer _px4_baro_0{6620172}; // 6620172: DRV_BARO_DEVTYPE_BAROSIM, BUS: 1, ADDR: 4, TYPE: SIMULATION
PX4Barometer _px4_baro_1{6620428}; // 6620428: DRV_BARO_DEVTYPE_BAROSIM, BUS: 2, ADDR: 4, TYPE: SIMULATION
float _sensors_temperature{0};
perf_counter_t _perf_sim_delay{perf_alloc(PC_ELAPSED, MODULE_NAME": network delay")};
perf_counter_t _perf_sim_interval{perf_alloc(PC_INTERVAL, MODULE_NAME": network interval")};
// uORB publisher handlers
uORB::Publication<differential_pressure_s> _differential_pressure_pub{ORB_ID(differential_pressure)};
uORB::PublicationMulti<optical_flow_s> _flow_pub{ORB_ID(optical_flow)};
uORB::Publication<irlock_report_s> _irlock_report_pub{ORB_ID(irlock_report)};
uORB::Publication<vehicle_odometry_s> _visual_odometry_pub{ORB_ID(vehicle_visual_odometry)};
uORB::Publication<vehicle_odometry_s> _mocap_odometry_pub{ORB_ID(vehicle_mocap_odometry)};
uORB::Publication<vehicle_command_ack_s> _command_ack_pub{ORB_ID(vehicle_command_ack)};
uORB::PublicationMulti<distance_sensor_s> *_dist_pubs[RANGE_FINDER_MAX_SENSORS] {};
uint8_t _dist_sensor_ids[RANGE_FINDER_MAX_SENSORS] {};
uORB::Subscription _parameter_update_sub{ORB_ID(parameter_update)};
unsigned int _port{14560};
InternetProtocol _ip{InternetProtocol::UDP};
double _realtime_factor{1.0}; ///< How fast the simulation runs in comparison to real system time
hrt_abstime _last_sim_timestamp{0};
hrt_abstime _last_sitl_timestamp{0};
void run();
void handle_message(const mavlink_message_t *msg);
void handle_message_distance_sensor(const mavlink_message_t *msg);
void handle_message_hil_gps(const mavlink_message_t *msg);
void handle_message_hil_sensor(const mavlink_message_t *msg);
void handle_message_hil_state_quaternion(const mavlink_message_t *msg);
void handle_message_landing_target(const mavlink_message_t *msg);
void handle_message_odometry(const mavlink_message_t *msg);
void handle_message_optical_flow(const mavlink_message_t *msg);
void handle_message_rc_channels(const mavlink_message_t *msg);
void handle_message_vision_position_estimate(const mavlink_message_t *msg);
void parameters_update(bool force);
void poll_for_MAVLink_messages();
void request_hil_state_quaternion();
void send();
void send_controls();
void send_heartbeat();
void send_mavlink_message(const mavlink_message_t &aMsg);
void update_sensors(const hrt_abstime &time, const mavlink_hil_sensor_t &sensors);
static void *sending_trampoline(void *);
void actuator_controls_from_outputs(mavlink_hil_actuator_controls_t *msg);
// uORB publisher handlers
uORB::Publication<vehicle_angular_velocity_s> _vehicle_angular_velocity_ground_truth_pub{ORB_ID(vehicle_angular_velocity_groundtruth)};
uORB::Publication<vehicle_attitude_s> _attitude_ground_truth_pub{ORB_ID(vehicle_attitude_groundtruth)};
uORB::Publication<vehicle_global_position_s> _gpos_ground_truth_pub{ORB_ID(vehicle_global_position_groundtruth)};
uORB::Publication<vehicle_local_position_s> _lpos_ground_truth_pub{ORB_ID(vehicle_local_position_groundtruth)};
uORB::Publication<input_rc_s> _input_rc_pub{ORB_ID(input_rc)};
// HIL GPS
static constexpr int MAX_GPS = 3;
uORB::PublicationMulti<sensor_gps_s> *_sensor_gps_pubs[MAX_GPS] {};
uint8_t _gps_ids[MAX_GPS] {};
std::default_random_engine _gen{};
// uORB subscription handlers
int _actuator_outputs_sub{-1};
actuator_outputs_s _actuator_outputs{};
uORB::Subscription _vehicle_status_sub{ORB_ID(vehicle_status)};
uORB::Subscription _vehicle_command_sub{ORB_ID(vehicle_command)};
// hil map_ref data
struct map_projection_reference_s _hil_local_proj_ref {};
bool _hil_local_proj_inited{false};
double _hil_ref_lat{0};
double _hil_ref_lon{0};
float _hil_ref_alt{0.0f};
uint64_t _hil_ref_timestamp{0};
vehicle_status_s _vehicle_status{};
bool _accel_blocked[ACCEL_COUNT_MAX] {};
bool _accel_stuck[ACCEL_COUNT_MAX] {};
matrix::Vector3f _last_accel[GYRO_COUNT_MAX] {};
bool _gyro_blocked[GYRO_COUNT_MAX] {};
bool _gyro_stuck[GYRO_COUNT_MAX] {};
matrix::Vector3f _last_gyro[GYRO_COUNT_MAX] {};
bool _baro_blocked{false};
bool _baro_stuck{false};
bool _mag_blocked{false};
bool _mag_stuck{false};
bool _gps_blocked{false};
bool _airspeed_blocked{false};
float _last_magx{0.0f};
float _last_magy{0.0f};
float _last_magz{0.0f};
#if defined(ENABLE_LOCKSTEP_SCHEDULER)
px4::atomic<bool> _has_initialized {false};
#endif
int _lockstep_component{-1};
DEFINE_PARAMETERS(
(ParamInt<px4::params::MAV_TYPE>) _param_mav_type,
(ParamInt<px4::params::MAV_SYS_ID>) _param_mav_sys_id,
(ParamInt<px4::params::MAV_COMP_ID>) _param_mav_comp_id
)
};
| {
"content_hash": "1a3fd4bb54879656d97e885af6ceff97",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 136,
"avg_line_length": 34.41044776119403,
"alnum_prop": 0.7310778572977662,
"repo_name": "krbeverx/Firmware",
"id": "a76b63e34f2f0a7752bcf7db5322bf3ac405d4d4",
"size": "11020",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/modules/simulator/simulator.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3744385"
},
{
"name": "C++",
"bytes": "9694800"
},
{
"name": "CMake",
"bytes": "1114053"
},
{
"name": "EmberScript",
"bytes": "115886"
},
{
"name": "GDB",
"bytes": "41"
},
{
"name": "Groovy",
"bytes": "66180"
},
{
"name": "HTML",
"bytes": "5343"
},
{
"name": "MATLAB",
"bytes": "9938"
},
{
"name": "Makefile",
"bytes": "20059"
},
{
"name": "Perl",
"bytes": "11401"
},
{
"name": "Python",
"bytes": "1321111"
},
{
"name": "Shell",
"bytes": "308628"
}
],
"symlink_target": ""
} |
<link rel="import"
href="bower_components/paper-button/paper-button.html">
<polymer-element name="tag-toggle">
<template>
<style>
#butt {
background-color:#fff;
color: #526E9C;
font-size: 8pt;
}
</style>
<paper-button id="butt"
label="{{name}}"
raisedButton="{{selected}}"
on-tap="{{tapped}}"></paper-button>
</template>
<script>
Polymer("tag-toggle", {
publish: {
name: "Empty",
parent: "NO PARENT",
active: {},
selected: {
value: (this.active != undefined &&
this.active[this.parent] != undefined&&
this.active[this.parent].indexOf(this.name) != -1),
reflect: true
}
},
tapped: function(event, detail, sender) {
this.selected = !this.selected;
if (this.active[this.parent] == undefined) {
this.active[this.parent] = []
}
var active_list = this.active[this.parent]
var i = active_list.indexOf(this.name);
if(i != -1) {
active_list.splice(i, 1);
} else {
active_list.push(this.name);
}
this.fire(this.parent + '-updated')
},
created: function() {
}
});
</script>
</polymer-element>
<polymer-element name="tag-list">
<template>
<!-- <style>
:host {
display: block;
width: 100%;
}
#tag-cloud {
margin: 15px;
}
</style> -->
<div id='tag-cloud'>
<h4>{{title}}</h4>
<template repeat="{{tag in tags}}">
<tag-toggle id="tag" name="{{tag}}" parent="{{title}}" active="{{active}}"></tag-toggle>
</template>
</div>
</template>
<script>
Polymer("tag-list", {
tags: [],
publish: {
title: 'Title',
active: {
value: {},
reflect: true
}
}
});
</script>
</polymer-element>
| {
"content_hash": "e6bb23ceef685d0e15b7926daa213692",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 90,
"avg_line_length": 20.129411764705882,
"alnum_prop": 0.5552308591466978,
"repo_name": "davidrusu/LaurierCourseGraph",
"id": "ec82dc7ee8dfbe2b50403ceb4279a19041f90fc0",
"size": "1711",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "tag-list.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10427468"
},
{
"name": "JavaScript",
"bytes": "16468"
},
{
"name": "Python",
"bytes": "9047"
}
],
"symlink_target": ""
} |
layout: post
title: Difference between i++ and ++i
---
They both increment the number. ++i is equivalent to i = i + 1.
i++ and ++i are very similar but not exactly the same. Both increment the number, but ++i increments the number before the current expression is evaluted, whereas i++ increments the number after the expression is evaluated.
int i = 6;
int a = i++; // a = 6, i = 7
int b = ++a; // b = 7, a = 7 | {
"content_hash": "bb990f3bb2b79d7a46b0c097ca0a2107",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 223,
"avg_line_length": 35.583333333333336,
"alnum_prop": 0.6557377049180327,
"repo_name": "Korirmitchelle/Korirmitchelle.github.io",
"id": "0d7205da6f8e64766fe43b5aa61bb92707334204",
"size": "431",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2016-7-7-difference-between-i++-and-++1.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63525"
},
{
"name": "HTML",
"bytes": "6266"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<title>data_for_reading_only (Imlib2::Image)</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<link rel="stylesheet" href="../../.././rdoc-style.css" type="text/css" media="screen" />
</head>
<body class="standalone-code">
<pre>/*
* Return a read-only reference to an image's raw 32-bit data.
*
* Examples:
* RAW_DATA = image.data_for_reading_only
* RAW_DATA = image.data!
*
*/
static VALUE image_data_ro(VALUE self) {
</pre>
</body>
</html> | {
"content_hash": "101bbaaa05783819a1da73cd2b75f055",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 91,
"avg_line_length": 28.583333333333332,
"alnum_prop": 0.6472303206997084,
"repo_name": "shao1555/Imlib2-Ruby",
"id": "090af2a7f80d0ce16a9fc817ff7569e54dda4047",
"size": "686",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "doc/classes/Imlib2/Image.src/M000250.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "233990"
},
{
"name": "Ruby",
"bytes": "13516"
}
],
"symlink_target": ""
} |
"""Create tf.Example and vocab for screen2words model."""
import json
import re
from typing import Callable, List, Generator, Sequence
from absl import app
from absl import flags
import apache_beam as beam
from apache_beam import runners
import nltk
import six
import tensorflow as tf
from screen2words import create_tf_example_fn
FLAGS = flags.FLAGS
flags.DEFINE_string('task', 'CREATE_VOCAB',
'Task name, could be CREATE_VOCAB or CREATE_TF_EXAMPLE.')
flags.DEFINE_string('dataset_paths', None,
'List of dataset paths, separated by comma.')
flags.DEFINE_string('json_file_path', None, 'json label file path.')
flags.DEFINE_string('word_vocab_path', None, 'Word vocab file path.')
flags.DEFINE_integer('max_token_per_label', 10,
'Max amount of tokens each label could have.')
flags.DEFINE_integer('max_label_per_screen', 5,
'Max amount of labels each screen has.')
flags.DEFINE_string('output_vocab_path', '/tmp/word_vocab.txt',
'Output vocab file path.')
flags.DEFINE_string('output_tfexample_path', None, 'Path to output tf.Example.')
BBOX_MAX_W = 360
BBOX_MAX_H = 640
def _generate_screen_id_and_captions_pair(json_file_path):
"""Generates pair of screen id and MTurk labels for each screen."""
with tf.gfile.GFile(json_file_path) as f:
screens = json.load(f)
return list(screens.items())
def _get_ascii_token(token):
"""Removes non-ASCII characters in the token."""
chars = []
for char in token:
# Try to encode the character with ASCII encoding. If there is an encoding
# error, it's not an ASCII character and can be skipped.
try:
char.encode('ascii')
except UnicodeEncodeError:
continue
chars.append(char)
return ''.join(chars)
def caption_tokenizer():
"""Creates a tokenizer for screen summary with default configuration.
Returns:
A tokenizer with configuration for screen summary.
"""
return Tokenizer(
lowercase_text=True,
remove_punctuation=True,
remove_nonascii_character=True,
max_token_length=30)
class Tokenizer(object):
"""Tokenizer using NLTK with a few additional options."""
# Pattern for recognizing non-punctuation words.
_ALPHANUMERIC_PATTERN = re.compile(r'[a-zA-Z0-9]')
def __init__(self,
lowercase_text = False,
remove_punctuation = False,
remove_nonascii_character = False,
max_token_length = -1):
"""Constructor.
Args:
lowercase_text: If True, convert text to lower case before tokenization.
remove_punctuation: If True, remove punctuation in the tokens.
remove_nonascii_character: If True, remove non-ascii characters within a
token.
max_token_length: Remove tokens with length larger than this value if it's
positive.
"""
self._lowercase_text = lowercase_text
self._remove_punctuation = remove_punctuation
self._max_token_length = max_token_length
self._remove_nonascii_character = remove_nonascii_character
def tokenize(self, text):
"""Toeknize text into a list of tokens.
Args:
text: Input text.
Returns:
A list of tokens.
"""
text = text.strip()
# Lowercase and tokenize text.
if self._lowercase_text:
text = text.lower()
tokens = nltk.word_tokenize(text)
# Remove punctuation.
if self._remove_punctuation:
tokens = [t for t in tokens if self._ALPHANUMERIC_PATTERN.search(t)]
# Remove non-ASICII characters within the tokens.
if self._remove_nonascii_character:
tokens = [_get_ascii_token(t) for t in tokens]
tokens = [t for t in tokens if t]
# Remove long tokens.
if self._max_token_length > 0:
tokens = [t for t in tokens if len(t) <= self._max_token_length]
return tokens
class CreateTokenFn(beam.DoFn):
"""Reads a view hierarchy json file and yields tokens."""
def __init__(self, dataset_path):
"""Constructor."""
self._screen_counter = beam.metrics.Metrics.counter(self.__class__,
'screen')
self.dataset_path = dataset_path
def start_bundle(self):
# Creates tokenizer used by the model.
self._tokenizer = caption_tokenizer()
def process(self, labels):
"""Emits tokens and phrases.
Args:
labels: A pair of <screen id, mturk labels>. Labels including captions and
labeller-annotated attention bbx.
Yields:
Tokens and phrases.
"""
self._screen_counter.inc(1)
screen_id, mtruk_labels = labels
json_path = self.dataset_path + screen_id + '.json'
for text in create_tf_example_fn.extract_token(json_path, screen_id,
mtruk_labels,
self._tokenizer):
yield text.encode()
class CreateTFExampleFn(beam.DoFn):
"""Reads view hierarchy json and image and yields tf.Example."""
def __init__(self, dataset_path, word_vocab_path, max_token_per_label,
max_label_per_screen):
"""Constructor.
Args:
dataset_path: Path to rico dataset.
word_vocab_path: Path to word vocab.
max_token_per_label: Max tokens for each caption/label.
max_label_per_screen: Max captions/labels for each screen.
"""
self._screen_counter = beam.metrics.Metrics.counter(self.__class__,
'screen')
self._example_counter = beam.metrics.Metrics.counter(
self.__class__, 'example')
self._word_vocab_path = word_vocab_path
self._max_token_per_label = max_token_per_label
self._max_label_per_screen = max_label_per_screen
self._dataset_path = dataset_path
def start_bundle(self):
self._word_vocab = {}
# Initialize word/phrase vocab and phrase type mapping.
with tf.gfile.GFile(self._word_vocab_path) as f:
for index, word in enumerate(f):
word = word.strip()
self._word_vocab[word] = index
self._tokenizer = caption_tokenizer()
def process(self, labels):
"""Emits serialized tf.Example proto.
Args:
labels: A pair of <screen id, mturk labels>. Labels including captions and
labeller-annotated attention bbx.
Yields:
A serizlied tf.Example.
"""
self._screen_counter.inc(1)
screen_id, mturk_labels = labels
prefix = self._dataset_path + screen_id
example = create_tf_example_fn.create_tf_example(prefix, mturk_labels,
self._tokenizer,
self._word_vocab,
self._max_token_per_label,
self._max_label_per_screen)
if not example:
return
self._example_counter.inc(1)
yield example
def create_pipeline(task, dataset_path, json_file_path,
word_vocab_path, max_token_per_label,
max_label_per_screen, output_vocab_path,
output_tfexample_path):
"""Runs the end-to-end beam pipeline."""
# Get file prefix and MTurk labels for each screen.
merged = _generate_screen_id_and_captions_pair(json_file_path)
def vocab_pipeline(root):
"""Pipeline for vocab generation ."""
_ = (
root | 'CreateCollection' >> beam.Create(merged)
| 'CreateToken' >> beam.ParDo(CreateTokenFn(dataset_path))
| 'CountTokens' >> beam.combiners.Count.PerElement()
| 'FormatCount' >>
beam.Map(lambda kv: '{}\t{}'.format(kv[0].decode(), kv[1]))
| 'WriteToFile' >> beam.io.WriteToText(output_vocab_path))
def tf_example_pipeline(root):
"""Pipeline for tf.Example generation."""
_ = (
root | 'CreateCollection' >> beam.Create(merged)
| 'GenerateTFExample' >> beam.ParDo(
CreateTFExampleFn(dataset_path, word_vocab_path,
max_token_per_label, max_label_per_screen))
| 'WriteToFile' >> beam.io.WriteToTFRecord(
output_tfexample_path,
coder=beam.coders.ProtoCoder(tf.train.Example)))
if task == 'CREATE_VOCAB':
return vocab_pipeline
elif task == 'CREATE_TF_EXAMPLE':
return tf_example_pipeline
else:
raise ValueError('Task must be CREATE_VOCAB or CREATE_TF_EXAMPLE.')
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
pipeline = create_pipeline(FLAGS.task, FLAGS.dataset_paths,
FLAGS.json_file_path, FLAGS.word_vocab_path,
FLAGS.max_token_per_label,
FLAGS.max_label_per_screen,
FLAGS.output_vocab_path,
FLAGS.output_tfexample_path)
runners.DataflowRunner().run_pipeline(pipeline)
if __name__ == '__main__':
app.run(main)
| {
"content_hash": "8922307b4875975d75acc8f1ddd226ec",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 80,
"avg_line_length": 32.802919708029194,
"alnum_prop": 0.6172674677347575,
"repo_name": "google-research/google-research",
"id": "38c609370ad2c56fbbe8295026f68add9ca17f2d",
"size": "9596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "screen2words/create_tf_example_main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<style name="ExoMediaButton">
<item name="android:background">?android:attr/selectableItemBackground</item>
<item name="android:layout_width">@dimen/exo_media_button_width</item>
<item name="android:layout_height">@dimen/exo_media_button_height</item>
</style>
<style name="ExoMediaButton.Previous">
<item name="android:src">@drawable/exo_controls_previous</item>
<item name="android:contentDescription">@string/exo_controls_previous_description</item>
</style>
<style name="ExoMediaButton.Next">
<item name="android:src">@drawable/exo_controls_next</item>
<item name="android:contentDescription">@string/exo_controls_next_description</item>
</style>
<style name="ExoMediaButton.FastForward">
<item name="android:src">@drawable/exo_controls_fastforward</item>
<item name="android:contentDescription">@string/exo_controls_fastforward_description</item>
</style>
<style name="ExoMediaButton.Rewind">
<item name="android:src">@drawable/exo_controls_rewind</item>
<item name="android:contentDescription">@string/exo_controls_rewind_description</item>
</style>
<style name="ExoMediaButton.Play">
<item name="android:src">@drawable/exo_controls_play</item>
<item name="android:contentDescription">@string/exo_controls_play_description</item>
</style>
<style name="ExoMediaButton.Pause">
<item name="android:src">@drawable/exo_controls_pause</item>
<item name="android:contentDescription">@string/exo_controls_pause_description</item>
</style>
<style name="ExoMediaButton.Shuffle">
<item name="android:src">@drawable/exo_controls_shuffle</item>
<item name="android:contentDescription">@string/exo_controls_shuffle_description</item>
</style>
<style name="ExoMediaButton.VR">
<item name="android:src">@drawable/exo_icon_vr</item>
<item name="android:contentDescription">@string/exo_controls_vr_description</item>
</style>
</resources>
| {
"content_hash": "3b5d8e09bf187733a1a2073d5f83b455",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 95,
"avg_line_length": 41.109375,
"alnum_prop": 0.72595971113645,
"repo_name": "saki4510t/ExoPlayer",
"id": "89d7a2fc8e07d970502510d293afb6eaee27ed28",
"size": "2631",
"binary": false,
"copies": "1",
"ref": "refs/heads/release-v2",
"path": "library/ui/src/main/res/values/styles.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "52882"
},
{
"name": "Java",
"bytes": "3818414"
},
{
"name": "Makefile",
"bytes": "13719"
},
{
"name": "Shell",
"bytes": "5691"
}
],
"symlink_target": ""
} |
package view.experiment.signalID.dialog;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
/**
* Кнопка для добавления нового канала в список
*
* @author Mikey
*
*/
public class SignalIDAddNewPanel extends JButton {
/**
*
*/
private static final long serialVersionUID = 6251168837166000430L;
public SignalIDAddNewPanel() {
super("Добавить канал", getIconImage());
}
public static Icon getIconImage() {
BufferedImage image = new BufferedImage(32, 32, BufferedImage.TYPE_INT_ARGB);
Graphics2D g = (Graphics2D) image.getGraphics();
g.setColor(Color.GREEN);
g.setStroke(new BasicStroke(15f));
g.drawLine(0, 16, 32, 16);
g.drawLine(16, 0, 16, 32);
return new ImageIcon(image);
}
}
| {
"content_hash": "032a8708374f0455d4792331cf18fcfa",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 79,
"avg_line_length": 23.763157894736842,
"alnum_prop": 0.7009966777408638,
"repo_name": "MikhailChe/TemperatureWaveMethod",
"id": "15b92852f2051a15897798925e5b27d622193c97",
"size": "954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/view/experiment/signalID/dialog/SignalIDAddNewPanel.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "201196"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<doctrine-mapping xmlns="http://doctrine-project.org/schemas/orm/doctrine-mapping" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://doctrine-project.org/schemas/orm/doctrine-mapping http://doctrine-project.org/schemas/orm/doctrine-mapping.xsd">
<entity name="AppBundle\Entity\CmsStzefBannerDeta" table="cms_stzef_banner_deta">
<indexes>
<index name="fk_cms_stzef_banner_deta_cms_stzef_banners1_idx" columns="cms_stzef_banners_id"/>
</indexes>
<id name="id" type="integer" column="id">
<generator strategy="IDENTITY"/>
</id>
<field name="image" type="string" column="image" length="500" nullable="false"/>
<field name="contentHtml" type="text" column="content_html" length="65535" nullable="false"/>
<many-to-one field="cmsStzefBanners" target-entity="CmsStzefBanners">
<join-columns>
<join-column name="cms_stzef_banners_id" referenced-column-name="id"/>
</join-columns>
</many-to-one>
</entity>
</doctrine-mapping>
| {
"content_hash": "5088180ca93d079ec6016151c2f94745",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 276,
"avg_line_length": 58.611111111111114,
"alnum_prop": 0.7004739336492891,
"repo_name": "stzef/CMSstzef",
"id": "32f5cb0c3a7c6aa1b2b673145c2108262e2f9aae",
"size": "1055",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/AppBundle/Resources/config/doctrine/CmsStzefBannerDeta.orm.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "977955"
},
{
"name": "HTML",
"bytes": "329812"
},
{
"name": "JavaScript",
"bytes": "2669029"
},
{
"name": "PHP",
"bytes": "986577"
},
{
"name": "Shell",
"bytes": "3476"
}
],
"symlink_target": ""
} |
#include "StableHeaders.h"
#include "DebugOperatorNew.h"
#include "AssetTreeWidget.h"
#include "SceneTreeWidgetItems.h"
#include "AddContentWindow.h"
#include "SupportedFileTypes.h"
#include "RequestNewAssetDialog.h"
#include "CloneAssetDialog.h"
#include "FunctionDialog.h"
#include "AssetsWindow.h"
#include "SceneAPI.h"
#include "AssetAPI.h"
#include "IAsset.h"
#include "IAssetStorage.h"
#include "Scene.h"
#include "AssetCache.h"
#include "QtUtils.h"
#include "UiAPI.h"
#include "FunctionInvoker.h"
#include "ArgumentType.h"
#include "MemoryLeakCheck.h"
AssetTreeWidget::AssetTreeWidget(Framework *fw, QWidget *parent) :
QTreeWidget(parent),
framework(fw),
contextMenu(0)
{
setSelectionMode(QAbstractItemView::ExtendedSelection);
setSelectionBehavior(QAbstractItemView::SelectItems);
setDragDropMode(QAbstractItemView::DropOnly/*DragDrop*/);
setDropIndicatorShown(true);
}
void AssetTreeWidget::contextMenuEvent(QContextMenuEvent *e)
{
// Do mousePressEvent so that the right item gets selected before we show the menu
// (right-click doesn't do this automatically).
QMouseEvent mouseEvent(QEvent::MouseButtonPress, e->pos(), e->globalPos(),
Qt::LeftButton, Qt::LeftButton, Qt::NoModifier);
mousePressEvent(&mouseEvent);
// Create context menu and show it.
SAFE_DELETE(contextMenu);
contextMenu = new QMenu(this);
AddAvailableActions(contextMenu);
contextMenu->popup(e->globalPos());
}
void AssetTreeWidget::dragEnterEvent(QDragEnterEvent *e)
{
const QMimeData *data = e->mimeData();
if (data->hasUrls())
{
foreach(QUrl url, data->urls())
if (!AssetAPI::GetResourceTypeFromAssetRef(url.path()).isEmpty())
e->acceptProposedAction();
}
else
QTreeWidget::dragEnterEvent(e);
}
void AssetTreeWidget::dragMoveEvent(QDragMoveEvent *e)
{
const QMimeData *data = e->mimeData();
if (data->hasUrls())
{
foreach(QUrl url, data->urls())
if (!AssetAPI::GetResourceTypeFromAssetRef(url.path()).isEmpty())
e->acceptProposedAction();
}
else
QTreeWidget::dragMoveEvent(e);
}
void AssetTreeWidget::dropEvent(QDropEvent *e)
{
const QMimeData *data = e->mimeData();
if (data->hasUrls())
{
QStringList filenames;
foreach(QUrl url, data->urls())
if (!AssetAPI::GetResourceTypeFromAssetRef(url.path()).isEmpty())
{
QString filename = url.path();
#ifdef _WINDOWS
// We have '/' as the first char on windows and the filename
// is not identified as a file properly. But on other platforms the '/' is valid/required.
filename = filename.mid(1);
#endif
filenames << filename;
}
if (!filenames.isEmpty())
{
e->acceptProposedAction();
Upload(filenames);
}
}
else
QTreeWidget::dropEvent(e);
}
void AssetTreeWidget::AddAvailableActions(QMenu *menu)
{
AssetTreeWidgetSelection sel = GetSelection();
if (sel.HasAssets())
{
QMenu *deleteMenu = new QMenu(tr("Delete"), menu);
QAction *deleteSourceAction = new QAction(tr("Delete from source"), deleteMenu);
QAction *deleteCacheAction = new QAction(tr("Delete from cache"), deleteMenu);
QAction *forgetAction = new QAction(tr("Forget asset"), deleteMenu);
deleteMenu->addAction(deleteSourceAction);
deleteMenu->addAction(deleteCacheAction);
deleteMenu->addAction(forgetAction);
menu->addMenu(deleteMenu);
connect(deleteSourceAction, SIGNAL(triggered()), SLOT(DeleteFromSource()));
connect(deleteCacheAction, SIGNAL(triggered()), SLOT(DeleteFromCache()));
connect(forgetAction, SIGNAL(triggered()), SLOT(Forget()));
QMenu *reloadMenu = new QMenu(tr("Reload"), menu);
QAction *reloadFromSourceAction = new QAction(tr("Reload from source"), deleteMenu);
QAction *reloadFromCacheAction = new QAction(tr("Reload from cache"), deleteMenu);
QAction *unloadAction = new QAction(tr("Unload"), deleteMenu);
// Reload from cache & delete from cache are not possible for e.g. local assets don't have a cached version of the asset,
// Even if the asset is an HTTP asset, these options are disable if there does not exist a cached version of that asset in the cache.
foreach(AssetItem *item, sel.assets)
if (item->Asset() && framework->Asset()->GetAssetCache()->FindInCache(item->Asset()->Name()).isEmpty())
{
reloadFromCacheAction->setDisabled(true);
deleteCacheAction->setDisabled(true);
break;
}
reloadMenu->addAction(reloadFromSourceAction);
reloadMenu->addAction(reloadFromCacheAction);
reloadMenu->addAction(unloadAction);
menu->addMenu(reloadMenu);
connect(reloadFromSourceAction, SIGNAL(triggered()), SLOT(ReloadFromSource()));
connect(reloadFromCacheAction, SIGNAL(triggered()), SLOT(ReloadFromCache()));
connect(unloadAction, SIGNAL(triggered()), SLOT(Unload()));
QAction *openFileLocationAction = new QAction(tr("Open file location"), menu);
menu->addAction(openFileLocationAction);
connect(openFileLocationAction, SIGNAL(triggered()), SLOT(OpenFileLocation()));
// Delete from Source, Delete from Cache, Reload from Source, Unload, Open File Location
// are not applicable for assets which have been created programmatically (disk source is empty).
///\todo Currently disk source is empty for unloaded assets, and open file location is disabled for them. This should not happen.
foreach(AssetItem *item, sel.assets)
if (item->Asset() && item->Asset()->DiskSource().trimmed().isEmpty())
{
deleteSourceAction->setDisabled(true);
deleteCacheAction->setDisabled(true);
reloadFromSourceAction->setDisabled(true);
unloadAction->setDisabled(true);
openFileLocationAction->setDisabled(true);
break;
}
menu->addSeparator();
QAction *cloneAction = new QAction(tr("Clone..."), menu);
menu->addAction(cloneAction);
connect(cloneAction, SIGNAL(triggered()), SLOT(Clone()));
QAction *exportAction = new QAction(tr("Export..."), menu);
menu->addAction(exportAction);
connect(exportAction, SIGNAL(triggered()), SLOT(Export()));
}
QAction *functionsAction = new QAction(tr("Functions..."), menu);
connect(functionsAction, SIGNAL(triggered()), this, SLOT(OpenFunctionDialog()));
menu->addAction(functionsAction);
// "Functions..." is disabled if we have both entities and components selected simultaneously.
if (sel.HasAssets() && sel.HasStorages())
functionsAction->setDisabled(true);
QAction *importAction = new QAction(tr("Import..."), menu);
connect(importAction, SIGNAL(triggered()), SLOT(Import()));
menu->addAction(importAction);
QAction *requestNewAssetAction = new QAction(tr("Request new asset..."), menu);
connect(requestNewAssetAction, SIGNAL(triggered()), SLOT(RequestNewAsset()));
menu->addAction(requestNewAssetAction);
if (sel.storages.count() == 1)
{
QAction *makeDefaultStorageAction = new QAction(tr("Make default storage"), menu);
connect(makeDefaultStorageAction, SIGNAL(triggered()), SLOT(MakeDefaultStorage()));
menu->addAction(makeDefaultStorageAction);
}
if (sel.storages.count() > 0)
{
QAction *removeStorageAction = new QAction(tr("Remove storage"), menu);
connect(removeStorageAction, SIGNAL(triggered()), SLOT(RemoveStorage()));
menu->addAction(removeStorageAction);
}
// Let other instances add their possible functionality.
// For now, pass only asset items.
QList<QObject *> targets;
foreach(AssetItem *item, sel.assets)
targets.append(item->Asset().get());
framework->Ui()->EmitContextMenuAboutToOpen(menu, targets);
}
AssetTreeWidgetSelection AssetTreeWidget::GetSelection() const
{
AssetTreeWidgetSelection sel;
foreach(QTreeWidgetItem *item, selectedItems())
{
// Omit top-level i.e. asset storage items
AssetItem *aItem = dynamic_cast<AssetItem *>(item);
if (aItem)
sel.assets << aItem;
else
{
AssetStorageItem* sItem = dynamic_cast<AssetStorageItem *>(item);
if (sItem)
sel.storages << sItem;
}
}
return sel;
}
void AssetTreeWidget::DeleteFromSource()
{
// AssetAPI::DeleteAssetFromStorage() signals will start deletion of tree widget asset items:
// Gather the asset refs to a separate list beforehand in order to prevent crash.
QStringList assetRefs, assetsToBeDeleted;
foreach(AssetItem *item, GetSelection().assets)
if (item->Asset())
{
assetRefs << item->Asset()->Name();
assetsToBeDeleted << item->Asset()->DiskSource();
}
QMessageBox msgBox(QMessageBox::Warning, tr("Delete From Source"),
tr("Are you sure want to delete the selected asset(s) permanently from the source?\n"),
QMessageBox::Ok | QMessageBox::Cancel, this);
msgBox.setDetailedText(assetsToBeDeleted.join("\n"));
int ret = msgBox.exec();
if (ret == QMessageBox::Ok)
foreach(QString ref, assetRefs)
framework->Asset()->DeleteAssetFromStorage(ref);
}
void AssetTreeWidget::DeleteFromCache()
{
if (!framework->Asset()->GetAssetCache())
{
LogError("Cannot delete asset from cache: Not running Tundra with an asset cache!");
return;
}
foreach(AssetItem *item, GetSelection().assets)
if (item->Asset())
framework->Asset()->GetAssetCache()->DeleteAsset(item->Asset()->Name());
}
void AssetTreeWidget::Forget()
{
foreach(AssetItem *item, GetSelection().assets)
if (item->Asset())
framework->Asset()->ForgetAsset(item->Asset(), false);
}
void AssetTreeWidget::Unload()
{
foreach(AssetItem *item, GetSelection().assets)
if (item->Asset())
{
item->Asset()->Unload();
// Do not delete item, instead mark it as unloaded in AssetsWindow.
//QTreeWidgetItem *parent = item->parent();
//parent->removeChild(item);
//SAFE_DELETE(item);
///\todo Preferably use the AssetDeleted() or similar signal from AssetAPI for deleting items.
}
}
void AssetTreeWidget::ReloadFromCache()
{
foreach(AssetItem *item, GetSelection().assets)
if (item->Asset())
item->Asset()->LoadFromCache();
}
void AssetTreeWidget::ReloadFromSource()
{
foreach(AssetItem *item, GetSelection().assets)
if (item->Asset())
{
QString assetRef = item->Asset()->Name();
// Make a 'forced request' of the existing asset. This will cause a full re-download of the asset
// and the newly downloaded data will be deserialized to the existing asset object.
framework->Asset()->RequestAsset(assetRef, item->Asset()->Type(), true);
}
}
void AssetTreeWidget::Import()
{
QtUtils::OpenFileDialogNonModal(cAllTypesFileFilter, tr("Import"), "", 0, this, SLOT(OpenFileDialogClosed(int)), true);
}
void AssetTreeWidget::OpenFileDialogClosed(int result)
{
QFileDialog *dialog = dynamic_cast<QFileDialog *>(sender());
assert(dialog);
if (!dialog)
return;
if (result != QDialog::Accepted)
return;
if (dialog->selectedFiles().isEmpty())
return;
Upload(dialog->selectedFiles());
}
void AssetTreeWidget::RequestNewAsset()
{
RequestNewAssetDialog *dialog = new RequestNewAssetDialog(framework->Asset(), this);
connect(dialog, SIGNAL(finished(int)), SLOT(RequestNewAssetDialogClosed(int)));
dialog->show();
}
void AssetTreeWidget::MakeDefaultStorage()
{
AssetTreeWidgetSelection sel = GetSelection();
if (sel.storages.size() == 1)
{
framework->Asset()->SetDefaultAssetStorage(sel.storages.first()->Storage());
//QString storageName = selected.first()->data(0, Qt::UserRole).toString();
//framework->Asset()->SetDefaultAssetStorage(framework->Asset()->GetAssetStorageByName(storageName));
}
AssetsWindow *parent = dynamic_cast<AssetsWindow*>(parentWidget());
if (parent)
parent->PopulateTreeWidget();
}
void AssetTreeWidget::RemoveStorage()
{
foreach(AssetStorageItem *item, GetSelection().storages)
{
//QString storageName = item->data(0, Qt::UserRole).toString();
//framework->Asset()->RemoveAssetStorage(storageName);
if (item->Storage())
framework->Asset()->RemoveAssetStorage(item->Storage()->Name());
}
AssetsWindow *parent = dynamic_cast<AssetsWindow*>(parentWidget());
if (parent)
parent->PopulateTreeWidget();
}
void AssetTreeWidget::RequestNewAssetDialogClosed(int result)
{
RequestNewAssetDialog *dialog = qobject_cast<RequestNewAssetDialog*>(sender());
if (!dialog)
return;
if (result != QDialog::Accepted)
return;
framework->Asset()->RequestAsset(dialog->Source(), dialog->Type());
}
void AssetTreeWidget::Export()
{
QList<AssetItem *> sel = GetSelection().assets;
if (sel.isEmpty())
return;
if (sel.size() == 1)
{
QString ref = sel.first()->Asset() ? sel.first()->Asset()->Name() : "";
QString assetName= AssetAPI::ExtractFilenameFromAssetRef(ref);
QtUtils::SaveFileDialogNonModal("", tr("Save Asset As"), assetName, 0, this, SLOT(SaveAssetDialogClosed(int)));
}
else
{
QtUtils::DirectoryDialogNonModal(tr("Select Directory"), "", 0, this, SLOT(SaveAssetDialogClosed(int)));
}
}
void AssetTreeWidget::Clone()
{
QList<AssetItem *> sel = GetSelection().assets;
if (sel.isEmpty())
return;
CloneAssetDialog *dialog = new CloneAssetDialog(sel.first()->Asset(), framework->Asset(), this);
connect(dialog, SIGNAL(finished(int)), SLOT(CloneAssetDialogClosed(int)));
dialog->show();
}
void AssetTreeWidget::CloneAssetDialogClosed(int result)
{
CloneAssetDialog *dialog = qobject_cast<CloneAssetDialog *>(sender());
if (!dialog)
return;
if (result != QDialog::Accepted)
return;
if (dialog->Asset().lock())
dialog->Asset().lock()->Clone(dialog->NewName());
}
void AssetTreeWidget::SaveAssetDialogClosed(int result)
{
QFileDialog *dialog = dynamic_cast<QFileDialog *>(sender());
assert(dialog);
if (!dialog || result != QDialog::Accepted || dialog->selectedFiles().isEmpty())
return;
QStringList files = dialog->selectedFiles();
QList<AssetItem *> sel = GetSelection().assets;
bool isDir = QDir(files[0]).exists();
if ((sel.size() == 1 && isDir) || (sel.size() > 1 && !isDir))
{
// should not happen normally, so just log error. No prompt for user.
// LogError("Could not save asset: no such directory.");
return;
}
foreach(AssetItem *item, sel)
if (item->Asset())
{
// if saving multiple assets, append filename to directory
QString filename = files[0];
if (isDir)
{
QString assetName = AssetAPI::ExtractFilenameFromAssetRef(item->Asset()->Name());
//while(QFile::exists(filename))
//filename.append("_");
filename += QDir::separator() + assetName;
}
QString param;
if (item->Asset()->Type().contains("texture", Qt::CaseInsensitive))
param = filename.right(filename.size() - filename.lastIndexOf('.') - 1);
item->Asset()->SaveToFile(filename, param);
}
}
void AssetTreeWidget::Upload(const QStringList &files)
{
AddContentWindow *addContent = new AddContentWindow(framework, framework->Scene()->MainCameraScene()->shared_from_this());
addContent->AddFiles(files);
addContent->show();
}
void AssetTreeWidget::OpenFileLocation()
{
QList<AssetItem *> selection = GetSelection().assets;
if (selection.isEmpty() || selection.size() < 1)
return;
AssetItem *item = selection.first();
if (item->Asset() && !item->Asset()->DiskSource().isEmpty())
{
QString path = QDir::toNativeSeparators(QFileInfo(item->Asset()->DiskSource()).dir().path());
QDesktopServices::openUrl("file:///" + path);
}
}
void AssetTreeWidget::OpenFunctionDialog()
{
AssetTreeWidgetSelection sel = GetSelection();
if (sel.HasAssets() && sel.HasStorages())
return;
QObjectWeakPtrList objs;
if (sel.HasAssets())
foreach(AssetItem *item, GetSelection().assets)
objs << boost::dynamic_pointer_cast<QObject>(item->Asset());
else if (sel.HasStorages())
foreach(AssetStorageItem *item, GetSelection().storages)
objs << boost::dynamic_pointer_cast<QObject>(item->Storage());
if (objs.size())
{
FunctionDialog *d = new FunctionDialog(objs, this);
connect(d, SIGNAL(finished(int)), this, SLOT(FunctionDialogFinished(int)));
d->show();
}
}
void AssetTreeWidget::FunctionDialogFinished(int result)
{
FunctionDialog *dialog = qobject_cast<FunctionDialog *>(sender());
if (!dialog)
return;
if (result == QDialog::Rejected)
return;
// Get the list of parameters we will pass to the function we are invoking,
// and update the latest values to them from the editor widgets the user inputted.
QVariantList params;
foreach(IArgumentType *arg, dialog->Arguments())
{
arg->UpdateValueFromEditor();
params << arg->ToQVariant();
}
// Clear old return value from the dialog.
dialog->SetReturnValueText("");
foreach(const QObjectWeakPtr &o, dialog->Objects())
if (o.lock())
{
QObject *obj = o.lock().get();
QString objName = obj->metaObject()->className();
QString objNameWithId = objName;
IAsset *asset = dynamic_cast<IAsset *>(obj);
if (asset)
objNameWithId.append('(' + asset->Name() + ')');
QString errorMsg;
QVariant ret;
FunctionInvoker invoker;
invoker.Invoke(obj, dialog->Function(), params, &ret, &errorMsg);
if (errorMsg.isEmpty())
dialog->AppendReturnValueText(objNameWithId + ' ' + ret.toString());
else
dialog->AppendReturnValueText(objNameWithId + ' ' + errorMsg);
}
}
| {
"content_hash": "71e775c46643028f0a467f2f278a4f0b",
"timestamp": "",
"source": "github",
"line_count": 554,
"max_line_length": 141,
"avg_line_length": 33.9927797833935,
"alnum_prop": 0.6386469838572643,
"repo_name": "antont/tundra",
"id": "2472819f9261b89c2e007c49dfe824f18b4f683f",
"size": "19008",
"binary": false,
"copies": "1",
"ref": "refs/heads/tundra2",
"path": "src/Core/ECEditorModule/AssetTreeWidget.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "110345"
},
{
"name": "C#",
"bytes": "76173"
},
{
"name": "C++",
"bytes": "4959154"
},
{
"name": "CoffeeScript",
"bytes": "2229"
},
{
"name": "JavaScript",
"bytes": "316308"
},
{
"name": "Objective-C",
"bytes": "222359"
},
{
"name": "Python",
"bytes": "999850"
},
{
"name": "Shell",
"bytes": "8224"
},
{
"name": "TypeScript",
"bytes": "230019"
}
],
"symlink_target": ""
} |
// This source file is adapted from the WinUI project.
// (https://github.com/microsoft/microsoft-ui-xaml)
//
// Licensed to The Avalonia Project under MIT License, courtesy of The .NET Foundation.
using System;
using System.Collections;
using System.Collections.Specialized;
using Avalonia.Controls.Templates;
using Avalonia.Input;
using Avalonia.Layout;
using Avalonia.Logging;
using Avalonia.Utilities;
using Avalonia.VisualTree;
namespace Avalonia.Controls
{
/// <summary>
/// Represents a data-driven collection control that incorporates a flexible layout system,
/// custom views, and virtualization.
/// </summary>
public class ItemsRepeater : Panel
{
/// <summary>
/// Defines the <see cref="HorizontalCacheLength"/> property.
/// </summary>
public static readonly StyledProperty<double> HorizontalCacheLengthProperty =
AvaloniaProperty.Register<ItemsRepeater, double>(nameof(HorizontalCacheLength), 2.0);
/// <summary>
/// Defines the <see cref="ItemTemplate"/> property.
/// </summary>
public static readonly StyledProperty<IDataTemplate> ItemTemplateProperty =
ItemsControl.ItemTemplateProperty.AddOwner<ItemsRepeater>();
/// <summary>
/// Defines the <see cref="Items"/> property.
/// </summary>
public static readonly DirectProperty<ItemsRepeater, IEnumerable> ItemsProperty =
ItemsControl.ItemsProperty.AddOwner<ItemsRepeater>(o => o.Items, (o, v) => o.Items = v);
/// <summary>
/// Defines the <see cref="Layout"/> property.
/// </summary>
public static readonly StyledProperty<AttachedLayout> LayoutProperty =
AvaloniaProperty.Register<ItemsRepeater, AttachedLayout>(nameof(Layout), new StackLayout());
/// <summary>
/// Defines the <see cref="VerticalCacheLength"/> property.
/// </summary>
public static readonly StyledProperty<double> VerticalCacheLengthProperty =
AvaloniaProperty.Register<ItemsRepeater, double>(nameof(VerticalCacheLength), 2.0);
private static readonly StyledProperty<VirtualizationInfo> VirtualizationInfoProperty =
AvaloniaProperty.RegisterAttached<ItemsRepeater, IControl, VirtualizationInfo>("VirtualizationInfo");
internal static readonly Rect InvalidRect = new Rect(-1, -1, -1, -1);
internal static readonly Point ClearedElementsArrangePosition = new Point(-10000.0, -10000.0);
private readonly ViewManager _viewManager;
private readonly ViewportManager _viewportManager;
private IEnumerable _items;
private VirtualizingLayoutContext _layoutContext;
private NotifyCollectionChangedEventArgs _processingItemsSourceChange;
private bool _isLayoutInProgress;
private ItemsRepeaterElementPreparedEventArgs _elementPreparedArgs;
private ItemsRepeaterElementClearingEventArgs _elementClearingArgs;
private ItemsRepeaterElementIndexChangedEventArgs _elementIndexChangedArgs;
/// <summary>
/// Initializes a new instance of the <see cref="ItemsRepeater"/> class.
/// </summary>
public ItemsRepeater()
{
_viewManager = new ViewManager(this);
_viewportManager = new ViewportManager(this);
KeyboardNavigation.SetTabNavigation(this, KeyboardNavigationMode.Once);
OnLayoutChanged(null, Layout);
}
static ItemsRepeater()
{
ClipToBoundsProperty.OverrideDefaultValue<ItemsRepeater>(true);
RequestBringIntoViewEvent.AddClassHandler<ItemsRepeater>((x, e) => x.OnRequestBringIntoView(e));
}
/// <summary>
/// Gets or sets the layout used to size and position elements in the ItemsRepeater.
/// </summary>
/// <value>
/// The layout used to size and position elements. The default is a StackLayout with
/// vertical orientation.
/// </value>
public AttachedLayout Layout
{
get => GetValue(LayoutProperty);
set => SetValue(LayoutProperty, value);
}
/// <summary>
/// Gets or sets an object source used to generate the content of the ItemsRepeater.
/// </summary>
public IEnumerable Items
{
get => _items;
set => SetAndRaise(ItemsProperty, ref _items, value);
}
/// <summary>
/// Gets or sets the template used to display each item.
/// </summary>
public IDataTemplate ItemTemplate
{
get => GetValue(ItemTemplateProperty);
set => SetValue(ItemTemplateProperty, value);
}
/// <summary>
/// Gets or sets a value that indicates the size of the buffer used to realize items when
/// panning or scrolling horizontally.
/// </summary>
public double HorizontalCacheLength
{
get => GetValue(HorizontalCacheLengthProperty);
set => SetValue(HorizontalCacheLengthProperty, value);
}
/// <summary>
/// Gets or sets a value that indicates the size of the buffer used to realize items when
/// panning or scrolling vertically.
/// </summary>
public double VerticalCacheLength
{
get => GetValue(VerticalCacheLengthProperty);
set => SetValue(VerticalCacheLengthProperty, value);
}
/// <summary>
/// Gets a standardized view of the supported interactions between a given Items object and
/// the ItemsRepeater control and its associated components.
/// </summary>
public ItemsSourceView ItemsSourceView { get; private set; }
internal IElementFactory ItemTemplateShim { get; set; }
internal Point LayoutOrigin { get; set; }
internal object LayoutState { get; set; }
internal IControl MadeAnchor => _viewportManager.MadeAnchor;
internal Rect RealizationWindow => _viewportManager.GetLayoutRealizationWindow();
internal IControl SuggestedAnchor => _viewportManager.SuggestedAnchor;
private bool IsProcessingCollectionChange => _processingItemsSourceChange != null;
private LayoutContext LayoutContext
{
get
{
if (_layoutContext == null)
{
_layoutContext = new RepeaterLayoutContext(this);
}
return _layoutContext;
}
}
/// <summary>
/// Occurs each time an element is cleared and made available to be re-used.
/// </summary>
/// <remarks>
/// This event is raised immediately each time an element is cleared, such as when it falls
/// outside the range of realized items. Elements are cleared when they become available
/// for re-use.
/// </remarks>
public event EventHandler<ItemsRepeaterElementClearingEventArgs> ElementClearing;
/// <summary>
/// Occurs for each realized <see cref="IControl"/> when the index for the item it
/// represents has changed.
/// </summary>
/// <remarks>
/// When you use ItemsRepeater to build a more complex control that supports specific
/// interactions on the child elements (such as selection or click), it is useful to be
/// able to keep an up-to-date identifier for the backing data item.
///
/// This event is raised for each realized IControl where the index for the item it
/// represents has changed. For example, when another item is added or removed in the data
/// source, the index for items that come after in the ordering will be impacted.
/// </remarks>
public event EventHandler<ItemsRepeaterElementIndexChangedEventArgs> ElementIndexChanged;
/// <summary>
/// Occurs each time an element is prepared for use.
/// </summary>
/// <remarks>
/// The prepared element might be newly created or an existing element that is being re-
/// used.
/// </remarks>
public event EventHandler<ItemsRepeaterElementPreparedEventArgs> ElementPrepared;
/// <summary>
/// Retrieves the index of the item from the data source that corresponds to the specified
/// <see cref="IControl"/>.
/// </summary>
/// <param name="element">
/// The element that corresponds to the item to get the index of.
/// </param>
/// <returns>
/// The index of the item from the data source that corresponds to the specified UIElement,
/// or -1 if the element is not supported.
/// </returns>
public int GetElementIndex(IControl element) => GetElementIndexImpl(element);
/// <summary>
/// Retrieves the realized UIElement that corresponds to the item at the specified index in
/// the data source.
/// </summary>
/// <param name="index">The index of the item.</param>
/// <returns>
/// he UIElement that corresponds to the item at the specified index if the item is
/// realized, or null if the item is not realized.
/// </returns>
public IControl TryGetElement(int index) => GetElementFromIndexImpl(index);
/// <summary>
/// Retrieves the UIElement that corresponds to the item at the specified index in the
/// data source.
/// </summary>
/// <param name="index">The index of the item.</param>
/// <returns>
/// An <see cref="IControl"/> that corresponds to the item at the specified index. If the
/// item is not realized, a new UIElement is created.
/// </returns>
public IControl GetOrCreateElement(int index) => GetOrCreateElementImpl(index);
internal void PinElement(IControl element) => _viewManager.UpdatePin(element, true);
internal void UnpinElement(IControl element) => _viewManager.UpdatePin(element, false);
internal static VirtualizationInfo TryGetVirtualizationInfo(IControl element)
{
var value = element.GetValue(VirtualizationInfoProperty);
return value;
}
internal static VirtualizationInfo CreateAndInitializeVirtualizationInfo(IControl element)
{
if (TryGetVirtualizationInfo(element) != null)
{
throw new InvalidOperationException("VirtualizationInfo already created.");
}
var result = new VirtualizationInfo();
element.SetValue(VirtualizationInfoProperty, result);
return result;
}
internal static VirtualizationInfo GetVirtualizationInfo(IControl element)
{
var result = element.GetValue(VirtualizationInfoProperty);
if (result == null)
{
result = new VirtualizationInfo();
element.SetValue(VirtualizationInfoProperty, result);
}
return result;
}
private protected override void InvalidateMeasureOnChildrenChanged()
{
// Don't invalidate measure when children change.
}
protected override Size MeasureOverride(Size availableSize)
{
if (_isLayoutInProgress)
{
throw new AvaloniaInternalException("Reentrancy detected during layout.");
}
if (IsProcessingCollectionChange)
{
throw new NotSupportedException("Cannot run layout in the middle of a collection change.");
}
_viewportManager.OnOwnerMeasuring();
_isLayoutInProgress = true;
try
{
_viewManager.PrunePinnedElements();
var extent = new Rect();
var desiredSize = new Size();
var layout = Layout;
if (layout != null)
{
var layoutContext = GetLayoutContext();
desiredSize = layout.Measure(layoutContext, availableSize);
extent = new Rect(LayoutOrigin.X, LayoutOrigin.Y, desiredSize.Width, desiredSize.Height);
// Clear auto recycle candidate elements that have not been kept alive by layout - i.e layout did not
// call GetElementAt(index).
foreach (var element in Children)
{
var virtInfo = GetVirtualizationInfo(element);
if (virtInfo.Owner == ElementOwner.Layout &&
virtInfo.AutoRecycleCandidate &&
!virtInfo.KeepAlive)
{
Logger.TryGet(LogEventLevel.Verbose, "Repeater")?.Log(this, "AutoClear - {Index}", virtInfo.Index);
ClearElementImpl(element);
}
}
}
_viewportManager.SetLayoutExtent(extent);
return desiredSize;
}
finally
{
_isLayoutInProgress = false;
}
}
protected override Size ArrangeOverride(Size finalSize)
{
if (_isLayoutInProgress)
{
throw new AvaloniaInternalException("Reentrancy detected during layout.");
}
if (IsProcessingCollectionChange)
{
throw new NotSupportedException("Cannot run layout in the middle of a collection change.");
}
_isLayoutInProgress = true;
try
{
var arrangeSize = Layout?.Arrange(GetLayoutContext(), finalSize) ?? default;
// The view manager might clear elements during this call.
// That's why we call it before arranging cleared elements
// off screen.
_viewManager.OnOwnerArranged();
foreach (var element in Children)
{
var virtInfo = GetVirtualizationInfo(element);
virtInfo.KeepAlive = false;
if (virtInfo.Owner == ElementOwner.ElementFactory ||
virtInfo.Owner == ElementOwner.PinnedPool)
{
// Toss it away. And arrange it with size 0 so that XYFocus won't use it.
element.Arrange(new Rect(
ClearedElementsArrangePosition.X - element.DesiredSize.Width,
ClearedElementsArrangePosition.Y - element.DesiredSize.Height,
0,
0));
}
else
{
var newBounds = element.Bounds;
virtInfo.ArrangeBounds = newBounds;
if (!virtInfo.IsRegisteredAsAnchorCandidate)
{
_viewportManager.RegisterScrollAnchorCandidate(element);
virtInfo.IsRegisteredAsAnchorCandidate = true;
}
}
}
_viewportManager.OnOwnerArranged();
return arrangeSize;
}
finally
{
_isLayoutInProgress = false;
}
}
protected override void OnAttachedToVisualTree(VisualTreeAttachmentEventArgs e)
{
InvalidateMeasure();
_viewportManager.ResetScrollers();
}
protected override void OnDetachedFromVisualTree(VisualTreeAttachmentEventArgs e)
{
_viewportManager.ResetScrollers();
}
protected override void OnPropertyChanged<T>(AvaloniaPropertyChangedEventArgs<T> change)
{
if (change.Property == ItemsProperty)
{
var oldEnumerable = change.OldValue.GetValueOrDefault<IEnumerable>();
var newEnumerable = change.NewValue.GetValueOrDefault<IEnumerable>();
if (oldEnumerable != newEnumerable)
{
var newDataSource = newEnumerable as ItemsSourceView;
if (newEnumerable != null && newDataSource == null)
{
newDataSource = new ItemsSourceView(newEnumerable);
}
OnDataSourcePropertyChanged(ItemsSourceView, newDataSource);
}
}
else if (change.Property == ItemTemplateProperty)
{
OnItemTemplateChanged(
change.OldValue.GetValueOrDefault<IDataTemplate>(),
change.NewValue.GetValueOrDefault<IDataTemplate>());
}
else if (change.Property == LayoutProperty)
{
OnLayoutChanged(
change.OldValue.GetValueOrDefault<AttachedLayout>(),
change.NewValue.GetValueOrDefault<AttachedLayout>());
}
else if (change.Property == HorizontalCacheLengthProperty)
{
_viewportManager.HorizontalCacheLength = change.NewValue.GetValueOrDefault<double>();
}
else if (change.Property == VerticalCacheLengthProperty)
{
_viewportManager.VerticalCacheLength = change.NewValue.GetValueOrDefault<double>();
}
base.OnPropertyChanged(change);
}
internal IControl GetElementImpl(int index, bool forceCreate, bool supressAutoRecycle)
{
var element = _viewManager.GetElement(index, forceCreate, supressAutoRecycle);
return element;
}
internal void ClearElementImpl(IControl element)
{
// Clearing an element due to a collection change
// is more strict in that pinned elements will be forcibly
// unpinned and sent back to the view generator.
var isClearedDueToCollectionChange =
_processingItemsSourceChange != null &&
(_processingItemsSourceChange.Action == NotifyCollectionChangedAction.Remove ||
_processingItemsSourceChange.Action == NotifyCollectionChangedAction.Replace ||
_processingItemsSourceChange.Action == NotifyCollectionChangedAction.Reset);
_viewManager.ClearElement(element, isClearedDueToCollectionChange);
_viewportManager.OnElementCleared(element);
}
private int GetElementIndexImpl(IControl element)
{
// Verify that element is actually a child of this ItemsRepeater
var parent = element.GetVisualParent();
if (parent == this)
{
var virtInfo = TryGetVirtualizationInfo(element);
return _viewManager.GetElementIndex(virtInfo);
}
return -1;
}
private IControl GetElementFromIndexImpl(int index)
{
IControl result = null;
var children = Children;
for (var i = 0; i < children.Count && result == null; ++i)
{
var element = children[i];
var virtInfo = TryGetVirtualizationInfo(element);
if (virtInfo?.IsRealized == true && virtInfo.Index == index)
{
result = element;
}
}
return result;
}
private IControl GetOrCreateElementImpl(int index)
{
if (index >= 0 && index >= ItemsSourceView.Count)
{
throw new ArgumentException("Argument index is invalid.", "index");
}
if (_isLayoutInProgress)
{
throw new NotSupportedException("GetOrCreateElement invocation is not allowed during layout.");
}
var element = GetElementFromIndexImpl(index);
bool isAnchorOutsideRealizedRange = element == null;
if (isAnchorOutsideRealizedRange)
{
if (Layout == null)
{
throw new InvalidOperationException("Cannot make an Anchor when there is no attached layout.");
}
element = (IControl)GetLayoutContext().GetOrCreateElementAt(index);
element.Measure(Size.Infinity);
}
_viewportManager.OnMakeAnchor(element, isAnchorOutsideRealizedRange);
InvalidateMeasure();
return element;
}
internal void OnElementPrepared(IControl element, VirtualizationInfo virtInfo)
{
_viewportManager.OnElementPrepared(element, virtInfo);
if (ElementPrepared != null)
{
var index = virtInfo.Index;
if (_elementPreparedArgs == null)
{
_elementPreparedArgs = new ItemsRepeaterElementPreparedEventArgs(element, index);
}
else
{
_elementPreparedArgs.Update(element, index);
}
ElementPrepared(this, _elementPreparedArgs);
}
}
internal void OnElementClearing(IControl element)
{
if (ElementClearing != null)
{
if (_elementClearingArgs == null)
{
_elementClearingArgs = new ItemsRepeaterElementClearingEventArgs(element);
}
else
{
_elementClearingArgs.Update(element);
}
ElementClearing(this, _elementClearingArgs);
}
}
internal void OnElementIndexChanged(IControl element, int oldIndex, int newIndex)
{
if (ElementIndexChanged != null)
{
if (_elementIndexChangedArgs == null)
{
_elementIndexChangedArgs = new ItemsRepeaterElementIndexChangedEventArgs(element, oldIndex, newIndex);
}
else
{
_elementIndexChangedArgs.Update(element, oldIndex, newIndex);
}
ElementIndexChanged(this, _elementIndexChangedArgs);
}
}
private void OnDataSourcePropertyChanged(ItemsSourceView oldValue, ItemsSourceView newValue)
{
if (_isLayoutInProgress)
{
throw new AvaloniaInternalException("Cannot set ItemsSourceView during layout.");
}
ItemsSourceView?.Dispose();
ItemsSourceView = newValue;
if (oldValue != null)
{
oldValue.CollectionChanged -= OnItemsSourceViewChanged;
}
if (newValue != null)
{
newValue.CollectionChanged += OnItemsSourceViewChanged;
}
if (Layout != null)
{
var args = new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset);
try
{
_processingItemsSourceChange = args;
if (Layout is VirtualizingLayout virtualLayout)
{
virtualLayout.OnItemsChanged(GetLayoutContext(), newValue, args);
}
else if (Layout is NonVirtualizingLayout nonVirtualLayout)
{
// Walk through all the elements and make sure they are cleared for
// non-virtualizing layouts.
foreach (var element in Children)
{
if (GetVirtualizationInfo(element).IsRealized)
{
ClearElementImpl(element);
}
}
Children.Clear();
}
}
finally
{
_processingItemsSourceChange = null;
}
InvalidateMeasure();
}
}
private void OnItemTemplateChanged(IDataTemplate oldValue, IDataTemplate newValue)
{
if (_isLayoutInProgress && oldValue != null)
{
throw new AvaloniaInternalException("ItemTemplate cannot be changed during layout.");
}
// Since the ItemTemplate has changed, we need to re-evaluate all the items that
// have already been created and are now in the tree. The easiest way to do that
// would be to do a reset.. Note that this has to be done before we change the template
// so that the cleared elements go back into the old template.
if (Layout != null)
{
if (Layout is VirtualizingLayout virtualLayout)
{
var args = new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset);
_processingItemsSourceChange = args;
try
{
virtualLayout.OnItemsChanged(GetLayoutContext(), newValue, args);
}
finally
{
_processingItemsSourceChange = null;
}
}
else if (Layout is NonVirtualizingLayout)
{
// Walk through all the elements and make sure they are cleared for
// non-virtualizing layouts.
foreach (var element in Children)
{
if (GetVirtualizationInfo(element).IsRealized)
{
ClearElementImpl(element);
}
}
}
}
ItemTemplateShim = newValue as IElementFactory ?? new ItemTemplateWrapper(newValue);
InvalidateMeasure();
}
private void OnLayoutChanged(AttachedLayout oldValue, AttachedLayout newValue)
{
if (_isLayoutInProgress)
{
throw new InvalidOperationException("Layout cannot be changed during layout.");
}
_viewManager.OnLayoutChanging();
if (oldValue != null)
{
oldValue.UninitializeForContext(LayoutContext);
WeakEventHandlerManager.Unsubscribe<EventArgs, ItemsRepeater>(
oldValue,
nameof(AttachedLayout.MeasureInvalidated),
InvalidateMeasureForLayout);
WeakEventHandlerManager.Unsubscribe<EventArgs, ItemsRepeater>(
oldValue,
nameof(AttachedLayout.ArrangeInvalidated),
InvalidateArrangeForLayout);
// Walk through all the elements and make sure they are cleared
foreach (var element in Children)
{
if (GetVirtualizationInfo(element).IsRealized)
{
ClearElementImpl(element);
}
}
LayoutState = null;
}
if (newValue != null)
{
newValue.InitializeForContext(LayoutContext);
WeakEventHandlerManager.Subscribe<AttachedLayout, EventArgs, ItemsRepeater>(
newValue,
nameof(AttachedLayout.MeasureInvalidated),
InvalidateMeasureForLayout);
WeakEventHandlerManager.Subscribe<AttachedLayout, EventArgs, ItemsRepeater>(
newValue,
nameof(AttachedLayout.ArrangeInvalidated),
InvalidateArrangeForLayout);
}
bool isVirtualizingLayout = newValue != null && newValue is VirtualizingLayout;
_viewportManager.OnLayoutChanged(isVirtualizingLayout);
InvalidateMeasure();
}
private void OnItemsSourceViewChanged(object sender, NotifyCollectionChangedEventArgs args)
{
if (_isLayoutInProgress)
{
// Bad things will follow if the data changes while we are in the middle of a layout pass.
throw new InvalidOperationException("Changes in data source are not allowed during layout.");
}
if (IsProcessingCollectionChange)
{
throw new InvalidOperationException("Changes in the data source are not allowed during another change in the data source.");
}
_processingItemsSourceChange = args;
try
{
_viewManager.OnItemsSourceChanged(sender, args);
if (Layout != null)
{
if (Layout is VirtualizingLayout virtualLayout)
{
virtualLayout.OnItemsChanged(GetLayoutContext(), sender, args);
}
else
{
// NonVirtualizingLayout
InvalidateMeasure();
}
}
}
finally
{
_processingItemsSourceChange = null;
}
}
private void OnRequestBringIntoView(RequestBringIntoViewEventArgs e)
{
_viewportManager.OnBringIntoViewRequested(e);
}
private void InvalidateMeasureForLayout(object sender, EventArgs e) => InvalidateMeasure();
private void InvalidateArrangeForLayout(object sender, EventArgs e) => InvalidateArrange();
private VirtualizingLayoutContext GetLayoutContext()
{
if (_layoutContext == null)
{
_layoutContext = new RepeaterLayoutContext(this);
}
return _layoutContext;
}
}
}
| {
"content_hash": "658f48ae1242caee18a2395cbfabf2ea",
"timestamp": "",
"source": "github",
"line_count": 796,
"max_line_length": 140,
"avg_line_length": 38.17964824120603,
"alnum_prop": 0.5601000296140305,
"repo_name": "akrisiun/Perspex",
"id": "fb2da09e7360d6a0529f741fa71934361a58a9ea",
"size": "30393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Avalonia.Controls/Repeater/ItemsRepeater.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "120"
},
{
"name": "C#",
"bytes": "2397160"
},
{
"name": "PowerShell",
"bytes": "4386"
},
{
"name": "Smalltalk",
"bytes": "58936"
}
],
"symlink_target": ""
} |
<?php
namespace Manticora\RestExtraBundle\DependencyInjection;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\Config\FileLocator;
use Symfony\Component\HttpKernel\DependencyInjection\Extension;
use Symfony\Component\DependencyInjection\Loader;
/**
* This is the class that loads and manages your bundle configuration
*
* To learn more see {@link http://symfony.com/doc/current/cookbook/bundles/extension.html}
*/
class ManticoraRestExtraExtension extends Extension
{
/**
* {@inheritDoc}
*/
public function load(array $configs, ContainerBuilder $container)
{
$configuration = new Configuration();
$config = $this->processConfiguration($configuration, $configs);
$loader = new Loader\XmlFileLoader($container, new FileLocator(__DIR__.'/../Resources/config'));
$loader->load('services.xml');
}
}
| {
"content_hash": "caf91377a4bf1c4aaae035611083431d",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 104,
"avg_line_length": 31.928571428571427,
"alnum_prop": 0.7315436241610739,
"repo_name": "mgiustiniani/ManticoraRestExtraBundle",
"id": "a6261db2790c738f3f3a53b0f9af4633c7528f44",
"size": "894",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DependencyInjection/ManticoraRestExtraExtension.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "10764"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@drawable/background"
tools:context="jett_apps.grouvie.Views.LeaderInitialPlan"
tools:layout_editor_absoluteY="81dp"
tools:layout_editor_absoluteX="0dp">
<TextView
android:id="@+id/textView2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginEnd="8dp"
android:layout_marginLeft="8dp"
android:layout_marginRight="8dp"
android:layout_marginStart="8dp"
android:layout_marginTop="16dp"
android:text="@string/current_plan"
android:textAlignment="center"
android:textColor="@color/grey_100"
android:textSize="40sp"
app:layout_constraintHorizontal_bias="0.503"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<TextView
android:id="@+id/SelectedFilm"
android:textSize="30sp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textAlignment="center"
android:text="TextView"
android:textColor="@color/grey_300"
android:layout_marginRight="8dp"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginStart="8dp"
android:layout_marginEnd="8dp"
android:layout_marginLeft="8dp"
app:layout_constraintHorizontal_bias="0.502"
android:layout_marginTop="69dp"
app:layout_constraintTop_toBottomOf="@+id/textView2"
app:layout_constraintLeft_toLeftOf="parent" />
<TextView
android:id="@+id/SelectedDay"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="TextView"
android:textColor="@color/grey_300"
android:textAlignment="center"
android:textSize="25sp"
android:layout_marginRight="8dp"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginStart="8dp"
android:layout_marginEnd="8dp"
android:layout_marginTop="35dp"
app:layout_constraintTop_toBottomOf="@+id/SelectedCinema"
app:layout_constraintLeft_toRightOf="@+id/moviePoster"
android:layout_marginLeft="8dp"
app:layout_constraintHorizontal_bias="0.466" />
<TextView
android:id="@+id/SelectedCinema"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginEnd="8dp"
android:layout_marginLeft="8dp"
android:layout_marginRight="55dp"
android:layout_marginStart="8dp"
android:layout_marginTop="12dp"
android:text="TextView"
android:textAlignment="center"
android:textColor="@color/grey_300"
android:textSize="20sp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toBottomOf="@+id/SelectedFilm"
app:layout_constraintVertical_chainStyle="spread_inside" />
<TextView
android:id="@+id/SelectedShowtime"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="TextView"
android:textColor="@color/grey_300"
android:textAlignment="center"
android:textSize="25sp"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginTop="31dp"
app:layout_constraintTop_toBottomOf="@+id/SelectedDay"
app:layout_constraintLeft_toRightOf="@+id/moviePoster"
android:layout_marginLeft="8dp"
app:layout_constraintHorizontal_bias="0.44" />
<Button
android:id="@+id/viewGroupReplies"
style="@style/Widget.AppCompat.Button"
android:layout_width="175dp"
android:layout_height="45dp"
android:layout_marginEnd="8dp"
android:layout_marginLeft="8dp"
android:layout_marginRight="8dp"
android:layout_marginStart="8dp"
android:background="@drawable/rounded_button"
android:backgroundTint="@color/grey_100"
android:elevation="0dp"
android:onClick="viewGroupReplies"
android:text="View Group Replies"
android:textAlignment="center"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintHorizontal_bias="0.502"
android:layout_marginTop="48dp"
app:layout_constraintTop_toBottomOf="@+id/SelectedShowtime"
android:layout_marginBottom="8dp"
app:layout_constraintBottom_toTopOf="@+id/suggestChange" />
<Button
android:id="@+id/button4"
android:layout_width="100dp"
android:layout_height="50dp"
android:layout_marginBottom="85dp"
android:layout_marginLeft="16dp"
android:layout_marginTop="8dp"
android:background="@drawable/rounded_button"
android:backgroundTintMode="src_in"
android:onClick="cantGo"
android:text="Can't Go"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/viewGroupReplies"
app:layout_constraintVertical_bias="0.0" />
<Button
android:id="@+id/cancelPlan"
android:layout_width="100dp"
android:layout_height="50dp"
android:layout_marginBottom="85dp"
android:layout_marginRight="16dp"
android:layout_marginTop="8dp"
android:background="@drawable/rounded_button"
android:backgroundTint="@android:color/holo_red_light"
android:elevation="10dp"
android:onClick="cancelPlan"
android:text="Cancel Plan"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toBottomOf="@+id/viewGroupReplies"
app:layout_constraintVertical_bias="0.0" />
<Button
android:id="@+id/suggestChange"
android:layout_width="100dp"
android:layout_height="50dp"
android:layout_marginBottom="18dp"
android:layout_marginLeft="8dp"
android:layout_marginRight="8dp"
android:background="@drawable/rounded_button"
android:onClick="makeChange"
android:text="Change"
android:textColor="@color/grey_300"
app:layout_constraintBottom_toTopOf="@+id/backButton"
app:layout_constraintLeft_toRightOf="@+id/button4"
app:layout_constraintRight_toLeftOf="@+id/cancelPlan" />
<Button
android:id="@+id/backButton"
style="@style/Widget.AppCompat.Button.Borderless"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="1dp"
android:layout_marginLeft="8dp"
android:layout_marginRight="8dp"
android:text="back"
android:textColor="@color/grey_100"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent" />
<Button
android:id="@+id/acceptPlan"
android:layout_width="100dp"
android:layout_height="50dp"
android:layout_marginBottom="85dp"
android:layout_marginRight="16dp"
android:layout_marginTop="8dp"
android:background="@drawable/rounded_button"
android:elevation="10dp"
android:onClick="acceptPlan"
android:text="Accept Plan"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toBottomOf="@+id/viewGroupReplies"
app:layout_constraintVertical_bias="0.0" />
<ImageView
android:id="@+id/moviePoster"
android:layout_width="122dp"
android:layout_height="160dp"
app:srcCompat="@drawable/hockeyapp_btn_background"
android:layout_marginLeft="30dp"
app:layout_constraintLeft_toLeftOf="parent"
android:layout_marginBottom="8dp"
app:layout_constraintBottom_toTopOf="@+id/viewGroupReplies"
android:layout_marginTop="8dp"
app:layout_constraintTop_toBottomOf="@+id/SelectedCinema"
app:layout_constraintVertical_bias="0.285" />
<ImageButton
android:id="@+id/imageButton"
android:layout_width="42dp"
android:layout_height="39dp"
android:layout_marginLeft="8dp"
android:onClick="getDirections"
app:layout_constraintLeft_toLeftOf="parent"
app:srcCompat="@mipmap/get_directions"
app:layout_constraintRight_toLeftOf="@+id/textView2"
android:layout_marginRight="8dp"
app:layout_constraintHorizontal_bias="0.956"
android:layout_marginBottom="19dp"
app:layout_constraintBottom_toTopOf="@+id/SelectedFilm" />
<ImageButton
android:id="@+id/addToCalendar"
android:layout_width="48dp"
android:layout_height="46dp"
android:layout_marginRight="8dp"
android:onClick="addToCalendar"
app:layout_constraintRight_toRightOf="parent"
app:srcCompat="@mipmap/add_to_calendar"
app:layout_constraintLeft_toRightOf="@+id/textView2"
android:layout_marginLeft="8dp"
android:layout_marginBottom="23dp"
app:layout_constraintBottom_toTopOf="@+id/SelectedFilm" />
<ImageButton
android:id="@+id/imageButton4"
android:layout_width="48dp"
android:layout_height="39dp"
android:layout_marginLeft="8dp"
android:layout_marginRight="8dp"
android:onClick="bookTickets"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:srcCompat="@mipmap/book_tickets"
android:layout_marginTop="8dp"
app:layout_constraintTop_toBottomOf="@+id/textView2"
android:layout_marginBottom="8dp"
app:layout_constraintBottom_toTopOf="@+id/SelectedFilm" />
/>
</android.support.constraint.ConstraintLayout>
| {
"content_hash": "8a69f389709c3527b55aacd19ee89ad1",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 103,
"avg_line_length": 40.7578125,
"alnum_prop": 0.6665708261452943,
"repo_name": "Team-JETT/Grouvie",
"id": "3a17310ea16273566c004f309e2bdc0c15bd535f",
"size": "10434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/activity_current_plan_view.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "152082"
},
{
"name": "Python",
"bytes": "30781"
},
{
"name": "Shell",
"bytes": "966"
}
],
"symlink_target": ""
} |
from flask import Flask
from flask.ext.script import Manager
from app import create_app
app = Flask(__name__)
manage = Manager(create_app())
if __name__ == '__main__':
manage.run()
| {
"content_hash": "d187daea52ec92e25d0a770146c61539",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 36,
"avg_line_length": 18.8,
"alnum_prop": 0.6702127659574468,
"repo_name": "hanks-zyh/fir-local",
"id": "386ee489f21742ef2b248d21c4dff85c65d8c99d",
"size": "234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2782"
},
{
"name": "HTML",
"bytes": "4186"
},
{
"name": "JavaScript",
"bytes": "277"
},
{
"name": "Python",
"bytes": "198769"
}
],
"symlink_target": ""
} |
package org.elasticsearch.action.support.replication;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.ActionTestUtils;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.support.WriteResponse;
import org.elasticsearch.action.support.replication.ReplicationOperation.ReplicaResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexingPressure;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.EmptySystemIndices;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.test.ClusterServiceUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Locale;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TransportWriteActionTests extends ESTestCase {
private static ThreadPool threadPool;
private ClusterService clusterService;
private IndexShard indexShard;
private Translog.Location location;
@BeforeClass
public static void beforeClass() {
threadPool = new TestThreadPool("ShardReplicationTests");
}
@Before
public void initCommonMocks() {
indexShard = mock(IndexShard.class);
location = mock(Translog.Location.class);
clusterService = createClusterService(threadPool);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
<T> void assertListenerThrows(String msg, PlainActionFuture<T> listener, Class<?> klass) throws InterruptedException {
try {
listener.get();
fail(msg);
} catch (ExecutionException ex) {
assertThat(ex.getCause(), instanceOf(klass));
}
}
public void testPrimaryNoRefreshCall() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.NONE); // The default, but we'll set it anyway just to be explicit
TestAction testAction = new TestAction();
testAction.dispatchedShardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.runPostReplicationActions(listener.map(ignore -> result.finalResponseIfSuccessful));
assertNotNull(listener.response);
assertNull(listener.failure);
verify(indexShard, never()).refresh(any());
verify(indexShard, never()).addRefreshListener(any(), any());
}));
}
public void testReplicaNoRefreshCall() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.NONE); // The default, but we'll set it anyway just to be explicit
TestAction testAction = new TestAction();
final PlainActionFuture<TransportReplicationAction.ReplicaResult> future = PlainActionFuture.newFuture();
testAction.dispatchedShardOperationOnReplica(request, indexShard, future);
final TransportReplicationAction.ReplicaResult result = future.actionGet();
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.runPostReplicaActions(listener.map(ignore -> TransportResponse.Empty.INSTANCE));
assertNotNull(listener.response);
assertNull(listener.failure);
verify(indexShard, never()).refresh(any());
verify(indexShard, never()).addRefreshListener(any(), any());
}
public void testPrimaryImmediateRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
TestAction testAction = new TestAction();
testAction.dispatchedShardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.runPostReplicationActions(listener.map(ignore -> result.finalResponseIfSuccessful));
assertNotNull(listener.response);
assertNull(listener.failure);
assertTrue(listener.response.forcedRefresh);
verify(indexShard).refresh("refresh_flag_index");
verify(indexShard, never()).addRefreshListener(any(), any());
}));
}
public void testReplicaImmediateRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
TestAction testAction = new TestAction();
final PlainActionFuture<TransportReplicationAction.ReplicaResult> future = PlainActionFuture.newFuture();
testAction.dispatchedShardOperationOnReplica(request, indexShard, future);
final TransportReplicationAction.ReplicaResult result = future.actionGet();
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.runPostReplicaActions(listener.map(ignore -> TransportResponse.Empty.INSTANCE));
assertNotNull(listener.response);
assertNull(listener.failure);
verify(indexShard).refresh("refresh_flag_index");
verify(indexShard, never()).addRefreshListener(any(), any());
}
public void testPrimaryWaitForRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
TestAction testAction = new TestAction();
testAction.dispatchedShardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.runPostReplicationActions(listener.map(ignore -> result.finalResponseIfSuccessful));
assertNull(listener.response); // Haven't really responded yet
@SuppressWarnings({"unchecked", "rawtypes"})
ArgumentCaptor<Consumer<Boolean>> refreshListener = ArgumentCaptor.forClass((Class) Consumer.class);
verify(indexShard, never()).refresh(any());
verify(indexShard).addRefreshListener(any(), refreshListener.capture());
// Now we can fire the listener manually and we'll get a response
boolean forcedRefresh = randomBoolean();
refreshListener.getValue().accept(forcedRefresh);
assertNotNull(listener.response);
assertNull(listener.failure);
assertEquals(forcedRefresh, listener.response.forcedRefresh);
}));
}
public void testReplicaWaitForRefresh() throws Exception {
TestRequest request = new TestRequest();
request.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
TestAction testAction = new TestAction();
final PlainActionFuture<TransportReplicationAction.ReplicaResult> future = PlainActionFuture.newFuture();
testAction.dispatchedShardOperationOnReplica(request, indexShard, future);
final TransportReplicationAction.ReplicaResult result = future.actionGet();
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.runPostReplicaActions(listener.map(ignore -> TransportResponse.Empty.INSTANCE));
assertNull(listener.response); // Haven't responded yet
@SuppressWarnings({ "unchecked", "rawtypes" })
ArgumentCaptor<Consumer<Boolean>> refreshListener = ArgumentCaptor.forClass((Class) Consumer.class);
verify(indexShard, never()).refresh(any());
verify(indexShard).addRefreshListener(any(), refreshListener.capture());
// Now we can fire the listener manually and we'll get a response
boolean forcedRefresh = randomBoolean();
refreshListener.getValue().accept(forcedRefresh);
assertNotNull(listener.response);
assertNull(listener.failure);
}
public void testDocumentFailureInShardOperationOnPrimary() throws Exception {
TestRequest request = new TestRequest();
TestAction testAction = new TestAction(true, true);
testAction.dispatchedShardOperationOnPrimary(request, indexShard,
ActionTestUtils.assertNoFailureListener(result -> {
CapturingActionListener<TestResponse> listener = new CapturingActionListener<>();
result.runPostReplicationActions(listener.map(ignore -> result.finalResponseIfSuccessful));
assertNull(listener.response);
assertNotNull(listener.failure);
}));
}
public void testDocumentFailureInShardOperationOnReplica() throws Exception {
TestRequest request = new TestRequest();
TestAction testAction = new TestAction(randomBoolean(), true);
final PlainActionFuture<TransportReplicationAction.ReplicaResult> future = PlainActionFuture.newFuture();
testAction.dispatchedShardOperationOnReplica(request, indexShard, future);
final TransportReplicationAction.ReplicaResult result = future.actionGet();
CapturingActionListener<TransportResponse.Empty> listener = new CapturingActionListener<>();
result.runPostReplicaActions(listener.map(ignore -> TransportResponse.Empty.INSTANCE));
assertNull(listener.response);
assertNotNull(listener.failure);
}
public void testReplicaProxy() throws InterruptedException, ExecutionException {
CapturingTransport transport = new CapturingTransport();
TransportService transportService = transport.createTransportService(clusterService.getSettings(), threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet());
transportService.start();
transportService.acceptIncomingRequests();
ShardStateAction shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool);
TestAction action = new TestAction(Settings.EMPTY, "internal:testAction", transportService,
clusterService, shardStateAction, threadPool);
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = ClusterStateCreationUtils.stateWithActivePrimary(index, true, 1 + randomInt(3), randomInt(2));
logger.info("using state: {}", state);
ClusterServiceUtils.setState(clusterService, state);
final long primaryTerm = state.metadata().index(index).primaryTerm(0);
ReplicationOperation.Replicas<TestRequest> proxy = action.newReplicasProxy();
// check that at unknown node fails
PlainActionFuture<ReplicaResponse> listener = new PlainActionFuture<>();
ShardRoutingState routingState = randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.STARTED,
ShardRoutingState.RELOCATING);
proxy.performOn(
TestShardRouting.newShardRouting(shardId, "NOT THERE",
routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState),
new TestRequest(),
primaryTerm, randomNonNegativeLong(), randomNonNegativeLong(), listener);
assertTrue(listener.isDone());
assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class);
final IndexShardRoutingTable shardRoutings = state.routingTable().shardRoutingTable(shardId);
final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream()
.filter(ShardRouting::assignedToNode).collect(Collectors.toList()));
listener = new PlainActionFuture<>();
proxy.performOn(replica, new TestRequest(), primaryTerm, randomNonNegativeLong(), randomNonNegativeLong(), listener);
assertFalse(listener.isDone());
CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear();
assertThat(captures, arrayWithSize(1));
if (randomBoolean()) {
final TransportReplicationAction.ReplicaResponse response =
new TransportReplicationAction.ReplicaResponse(randomLong(), randomLong());
transport.handleResponse(captures[0].requestId, response);
assertTrue(listener.isDone());
assertThat(listener.get(), equalTo(response));
} else if (randomBoolean()) {
transport.handleRemoteError(captures[0].requestId, new ElasticsearchException("simulated"));
assertTrue(listener.isDone());
assertListenerThrows("listener should reflect remote error", listener, ElasticsearchException.class);
} else {
transport.handleError(captures[0].requestId, new TransportException("simulated"));
assertTrue(listener.isDone());
assertListenerThrows("listener should reflect remote error", listener, TransportException.class);
}
AtomicReference<Object> failure = new AtomicReference<>();
AtomicBoolean success = new AtomicBoolean();
proxy.failShardIfNeeded(replica, primaryTerm, "test", new ElasticsearchException("simulated"),
ActionListener.wrap(r -> success.set(true), failure::set));
CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear();
// A write replication action proxy should fail the shard
assertEquals(1, shardFailedRequests.length);
CapturingTransport.CapturedRequest shardFailedRequest = shardFailedRequests[0];
ShardStateAction.FailedShardEntry shardEntry = (ShardStateAction.FailedShardEntry) shardFailedRequest.request;
// the shard the request was sent to and the shard to be failed should be the same
assertEquals(shardEntry.getShardId(), replica.shardId());
assertEquals(shardEntry.getAllocationId(), replica.allocationId().getId());
if (randomBoolean()) {
// simulate success
transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE);
assertTrue(success.get());
assertNull(failure.get());
} else if (randomBoolean()) {
// simulate the primary has been demoted
transport.handleRemoteError(shardFailedRequest.requestId,
new ShardStateAction.NoLongerPrimaryShardException(replica.shardId(),
"shard-failed-test"));
assertFalse(success.get());
assertNotNull(failure.get());
} else {
// simulated a node closing exception
transport.handleRemoteError(shardFailedRequest.requestId,
new NodeClosedException(state.nodes().getLocalNode()));
assertFalse(success.get());
assertNotNull(failure.get());
}
}
private class TestAction extends TransportWriteAction<TestRequest, TestRequest, TestResponse> {
private final boolean withDocumentFailureOnPrimary;
private final boolean withDocumentFailureOnReplica;
protected TestAction() {
this(false, false);
}
protected TestAction(boolean withDocumentFailureOnPrimary, boolean withDocumentFailureOnReplica) {
super(Settings.EMPTY, "internal:test",
new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()), TransportWriteActionTests.this.clusterService, null, null, null,
new ActionFilters(new HashSet<>()), TestRequest::new, TestRequest::new, ignore -> ThreadPool.Names.SAME, false,
new IndexingPressure(Settings.EMPTY), EmptySystemIndices.INSTANCE);
this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary;
this.withDocumentFailureOnReplica = withDocumentFailureOnReplica;
}
protected TestAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ShardStateAction shardStateAction, ThreadPool threadPool) {
super(settings, actionName, transportService, clusterService,
mockIndicesService(clusterService), threadPool, shardStateAction,
new ActionFilters(new HashSet<>()), TestRequest::new, TestRequest::new, ignore -> ThreadPool.Names.SAME, false,
new IndexingPressure(settings), EmptySystemIndices.INSTANCE);
this.withDocumentFailureOnPrimary = false;
this.withDocumentFailureOnReplica = false;
}
@Override
protected TestResponse newResponseInstance(StreamInput in) throws IOException {
return new TestResponse();
}
@Override
protected void dispatchedShardOperationOnPrimary(
TestRequest request, IndexShard primary, ActionListener<PrimaryResult<TestRequest, TestResponse>> listener) {
ActionListener.completeWith(listener, () -> {
if (withDocumentFailureOnPrimary) {
return new WritePrimaryResult<>(request, null, null, new RuntimeException("simulated"), primary, logger);
} else {
return new WritePrimaryResult<>(request, new TestResponse(), location, null, primary, logger);
}
});
}
@Override
protected void dispatchedShardOperationOnReplica(TestRequest request, IndexShard replica, ActionListener<ReplicaResult> listener) {
ActionListener.completeWith(listener, () -> {
final WriteReplicaResult<TestRequest> replicaResult;
if (withDocumentFailureOnReplica) {
replicaResult = new WriteReplicaResult<>(request, null, new RuntimeException("simulated"), replica, logger);
} else {
replicaResult = new WriteReplicaResult<>(request, location, null, replica, logger);
}
return replicaResult;
});
}
}
final IndexService mockIndexService(final IndexMetadata indexMetadata, ClusterService clusterService) {
final IndexService indexService = mock(IndexService.class);
when(indexService.getShard(anyInt())).then(invocation -> {
int shard = (Integer) invocation.getArguments()[0];
final ShardId shardId = new ShardId(indexMetadata.getIndex(), shard);
if (shard > indexMetadata.getNumberOfShards()) {
throw new ShardNotFoundException(shardId);
}
return mockIndexShard(shardId, clusterService);
});
return indexService;
}
final IndicesService mockIndicesService(ClusterService clusterService) {
final IndicesService indicesService = mock(IndicesService.class);
when(indicesService.indexServiceSafe(any(Index.class))).then(invocation -> {
Index index = (Index)invocation.getArguments()[0];
final ClusterState state = clusterService.state();
final IndexMetadata indexSafe = state.metadata().getIndexSafe(index);
return mockIndexService(indexSafe, clusterService);
});
when(indicesService.indexService(any(Index.class))).then(invocation -> {
Index index = (Index) invocation.getArguments()[0];
final ClusterState state = clusterService.state();
if (state.metadata().hasIndex(index.getName())) {
return mockIndexService(clusterService.state().metadata().getIndexSafe(index), clusterService);
} else {
return null;
}
});
return indicesService;
}
private final AtomicInteger count = new AtomicInteger(0);
private final AtomicBoolean isRelocated = new AtomicBoolean(false);
private IndexShard mockIndexShard(ShardId shardId, ClusterService clusterService) {
final IndexShard indexShard = mock(IndexShard.class);
doAnswer(invocation -> {
ActionListener<Releasable> callback = (ActionListener<Releasable>) invocation.getArguments()[0];
count.incrementAndGet();
callback.onResponse(count::decrementAndGet);
return null;
}).when(indexShard).acquirePrimaryOperationPermit(any(ActionListener.class), anyString(), anyObject());
doAnswer(invocation -> {
long term = (Long)invocation.getArguments()[0];
ActionListener<Releasable> callback = (ActionListener<Releasable>) invocation.getArguments()[1];
final long primaryTerm = indexShard.getPendingPrimaryTerm();
if (term < primaryTerm) {
throw new IllegalArgumentException(String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])",
shardId, term, primaryTerm));
}
count.incrementAndGet();
callback.onResponse(count::decrementAndGet);
return null;
}).when(indexShard)
.acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject());
when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> {
final ClusterState state = clusterService.state();
final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId());
final ShardRouting routing = node.getByShardId(shardId);
if (routing == null) {
throw new ShardNotFoundException(shardId, "shard is no longer assigned to current node");
}
return routing;
});
when(indexShard.isRelocatedPrimary()).thenAnswer(invocationOnMock -> isRelocated.get());
doThrow(new AssertionError("failed shard is not supported")).when(indexShard).failShard(anyString(), any(Exception.class));
when(indexShard.getPendingPrimaryTerm()).thenAnswer(i ->
clusterService.state().metadata().getIndexSafe(shardId.getIndex()).primaryTerm(shardId.id()));
return indexShard;
}
private static class TestRequest extends ReplicatedWriteRequest<TestRequest> {
TestRequest(StreamInput in) throws IOException {
super(in);
}
TestRequest() {
super(new ShardId("test", "test", 1));
}
@Override
public String toString() {
return "TestRequest{}";
}
}
private static class TestResponse extends ReplicationResponse implements WriteResponse {
boolean forcedRefresh;
@Override
public void setForcedRefresh(boolean forcedRefresh) {
this.forcedRefresh = forcedRefresh;
}
}
private static class CapturingActionListener<R> implements ActionListener<R> {
private R response;
private Exception failure;
@Override
public void onResponse(R response) {
this.response = response;
}
@Override
public void onFailure(Exception failure) {
this.failure = failure;
}
}
}
| {
"content_hash": "c98ba9bcec5b270d457ed767bec29c8a",
"timestamp": "",
"source": "github",
"line_count": 513,
"max_line_length": 139,
"avg_line_length": 51.21052631578947,
"alnum_prop": 0.7008107799474705,
"repo_name": "robin13/elasticsearch",
"id": "56cfdac9197c23f341741d19347304d061df7ad8",
"size": "26624",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "11082"
},
{
"name": "Batchfile",
"bytes": "14049"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "315863"
},
{
"name": "HTML",
"bytes": "3399"
},
{
"name": "Java",
"bytes": "40107206"
},
{
"name": "Perl",
"bytes": "7271"
},
{
"name": "Python",
"bytes": "54437"
},
{
"name": "Shell",
"bytes": "108937"
}
],
"symlink_target": ""
} |
<?php
namespace TreeBuilder;
interface TreeBuilderInterface
{
public function rootNode($nodes, $firstStart, $userParams);
public function childNode($item, $childNodes, $aliases, $nestingLevel, $userParams);
} | {
"content_hash": "3ffc09da7b47a107e7b9e4c9689b25a5",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 88,
"avg_line_length": 21.9,
"alnum_prop": 0.7534246575342466,
"repo_name": "artnv/TreeBuilder",
"id": "a22950b7a8c0ce4fff4fa24a459fd6486eb23d6c",
"size": "219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "php/TreeBuilder/TreeBuilderInterface.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "9243"
},
{
"name": "JavaScript",
"bytes": "10652"
},
{
"name": "PHP",
"bytes": "13933"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>canon-bdds: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.5.1 / canon-bdds - 8.10.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
canon-bdds
<small>
8.10.0
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-10-22 13:17:09 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-22 13:17:09 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-ocamlbuild base OCamlbuild binary and libraries distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.5.1 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.02.3 The OCaml compiler (virtual package)
ocaml-base-compiler 4.02.3 Official 4.02.3 release
ocaml-config 1 OCaml Switch Configuration
# opam file:
opam-version: "2.0"
maintainer: "Hugo.Herbelin@inria.fr"
homepage: "https://github.com/coq-contribs/canon-bdds"
license: "Unknown"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/CanonBDDs"]
depends: [
"ocaml"
"coq" {>= "8.10" & < "8.11~"}
]
tags: [
"keyword: BDD"
"keyword: BDT"
"keyword: finite sets"
"keyword: model checking"
"keyword: binary decision diagrams"
"category: Computer Science/Decision Procedures and Certified Algorithms/Decision procedures"
"category: Miscellaneous/Extracted Programs/Decision procedures"
]
authors: [
"Emmanuel Ledinot"
]
bug-reports: "https://github.com/coq-contribs/canon-bdds/issues"
dev-repo: "git+https://github.com/coq-contribs/canon-bdds.git"
synopsis: "Canonicity of Binary Decision Dags"
description: """
A proof of unicity and canonicity of Binary Decision Trees and
Binary Decision Dags. This contrib contains also a development on finite sets."""
flags: light-uninstall
url {
src: "https://github.com/coq-contribs/canon-bdds/archive/v8.10.0.tar.gz"
checksum: "md5=d23b9b74b3a8af434c1e1907d24c4a9d"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-canon-bdds.8.10.0 coq.8.5.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.5.1).
The following dependencies couldn't be met:
- coq-canon-bdds -> coq >= 8.10 -> ocaml >= 4.05.0
base of this switch (use `--unlock-base' to force)
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-canon-bdds.8.10.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "462a59c73685a45122e4cb481d093b9a",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 159,
"avg_line_length": 41.21714285714286,
"alnum_prop": 0.5506723970608624,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "7b6959f3a1f8493e9397dad8203a651bb395b342",
"size": "7238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.02.3-2.0.6/released/8.5.1/canon-bdds/8.10.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
#ifndef _IMUSENSOR_HH_
#define _IMUSENSOR_HH_
#include <vector>
#include <string>
#include "gazebo/physics/PhysicsTypes.hh"
#include "gazebo/sensors/Sensor.hh"
#include "gazebo/util/system.hh"
namespace gazebo
{
namespace sensors
{
/// \addtogroup gazebo_sensors
/// \{
/// \class ImuSensor ImuSensor.hh sensors/sensors.hh
/// \brief An IMU sensor.
class GAZEBO_VISIBLE ImuSensor: public Sensor
{
/// \brief Constructor.
public: ImuSensor();
/// \brief Destructor.
public: virtual ~ImuSensor();
// Documentation inherited.
protected: void Load(const std::string &_worldName, sdf::ElementPtr _sdf);
// Documentation inherited.
protected: virtual void Load(const std::string &_worldName);
/// \brief Initialize the IMU.
public: virtual void Init();
// Documentation inherited
protected: virtual bool UpdateImpl(bool _force);
// Documentation inherited
protected: virtual void Fini();
/// \brief Returns the imu message
/// \return Imu message.
public: msgs::IMU GetImuMessage() const;
/// \brief Returns the angular velocity.
/// \return Angular velocity.
public: math::Vector3 GetAngularVelocity() const;
/// \brief Returns the imu linear acceleration
/// \return Linear acceleration.
public: math::Vector3 GetLinearAcceleration() const;
/// \brief get orientation of the IMU relative to the reference pose
/// \return returns the orientation quaternion of the IMU relative to
/// the imu reference pose.
public: math::Quaternion GetOrientation() const;
/// \brief Sets the current pose as the IMU reference pose
public: void SetReferencePose();
// Documentation inherited.
public: virtual bool IsActive();
/// \brief Callback when link data is received
/// \param[in] _msg Message containing link data
private: void OnLinkData(ConstLinkDataPtr &_msg);
/// \brief Imu reference pose
private: math::Pose referencePose;
/// \brief Save previous imu linear velocity for computing acceleration.
private: math::Vector3 lastLinearVel;
/// \brief Imu linear acceleration
private: math::Vector3 linearAcc;
/// \brief store gravity vector to be added to the imu output.
private: math::Vector3 gravity;
/// \brief Imu data publisher
private: transport::PublisherPtr pub;
/// \brief Subscriber to link data published by parent entity
private: transport::SubscriberPtr linkDataSub;
/// \brief Parent entity which the IMU is attached to
private: physics::LinkPtr parentEntity;
/// \brief Imu message
private: msgs::IMU imuMsg;
/// \brief Mutex to protect reads and writes.
private: mutable boost::mutex mutex;
/// \brief Buffer for storing link data
private: boost::shared_ptr<msgs::LinkData const> incomingLinkData[2];
/// \brief Index for accessing element in the link data array
private: unsigned int dataIndex;
/// \brief True if new link data is received
private: bool dataDirty;
/// \brief Which noise type we support
private: enum NoiseModelType
{
NONE,
GAUSSIAN
};
/// \brief If true, apply the noise model specified by other noise
/// parameters
private: bool noiseActive;
/// \brief Which type of noise we're applying
private: enum NoiseModelType noiseType;
/// \brief If noiseType==GAUSSIAN, the mean of the distibution
/// from which we sample when adding noise to accelerations
private: double accelNoiseMean;
/// \brief If accelNoiseType==GAUSSIAN, the standard devation of the
/// distibution from which we sample when adding noise to accelerations
private: double accelNoiseStdDev;
/// \brief If noiseType==GAUSSIAN, the bias we'll add to acceleratations
private: double accelBias;
/// \brief If noiseType==GAUSSIAN, the mean of the distibution
/// from which we sample when adding noise to rates
private: double rateNoiseMean;
/// \brief If noiseType==GAUSSIAN, the standard devation of the
/// distibution from which we sample when adding noise to rates
private: double rateNoiseStdDev;
/// \brief If noiseType==GAUSSIAN, the bias we'll add to rates
private: double rateBias;
};
/// \}
}
}
#endif
| {
"content_hash": "89c7d9d194bcc0354c806ae85f564c90",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 80,
"avg_line_length": 30.564625850340136,
"alnum_prop": 0.6594702871132874,
"repo_name": "arpg/Gazebo",
"id": "22bd33971de6dbe89c566519565d95d74a6a0042",
"size": "5118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gazebo/sensors/ImuSensor.hh",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3423114"
},
{
"name": "C++",
"bytes": "8826323"
},
{
"name": "CSS",
"bytes": "12592"
},
{
"name": "JavaScript",
"bytes": "25255"
}
],
"symlink_target": ""
} |
package com.coolweather.app.db;
import android.content.Context;
import android.database.DatabaseErrorHandler;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteDatabase.CursorFactory;
import android.database.sqlite.SQLiteOpenHelper;
public class CoolWeatherOpenHelper extends SQLiteOpenHelper {
/**
* Province±í½¨±íÓï¾ä
*/
public static final String CREATE_PROVINCE = "create table Province ("
+ "id integer primary key autoincrement, "
+ "province_name text, "
+ "province_code text)";
/**
* City±í½¨±íÓï¾ä
*/
public static final String CREATE_CITY = "create table City ("
+ "id integer primary key autoincrement, "
+ "city_name text, "
+ "city_code text, "
+ "province_id integer)";
/**
* County±í½¨±íÓï¾ä
*/
public static final String CREATE_COUNTY = "create table County ("
+ "id integer primary key autoincrement, "
+ "county_name text, "
+ "county_code text, "
+ "city_id integer)";
public CoolWeatherOpenHelper(Context context, String name,
CursorFactory factory, int version) {
super(context, name, factory, version);
}
public void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_PROVINCE);
db.execSQL(CREATE_CITY);
db.execSQL(CREATE_COUNTY);
}
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
}
| {
"content_hash": "425eddc1bddaf79d5496f14fbc613aa5",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 75,
"avg_line_length": 24.10909090909091,
"alnum_prop": 0.72473604826546,
"repo_name": "dongjiajun/coolweather",
"id": "0885fcf80eeaafc4ef92c8ddcafc6ec3b15843aa",
"size": "1326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/coolweather/app/db/CoolWeatherOpenHelper.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "28433"
}
],
"symlink_target": ""
} |
package vpp1908
| {
"content_hash": "7751f3de505e556090a9af56ab6bc1cf",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 15,
"avg_line_length": 16,
"alnum_prop": 0.875,
"repo_name": "rastislavszabo/vpp",
"id": "2c2d56e63c1effd39aede6ab29436a7ef6c60678",
"size": "780",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor/github.com/ligato/vpp-agent/plugins/vpp/l2plugin/vppcalls/vpp1908/doc.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7753"
},
{
"name": "Dockerfile",
"bytes": "8549"
},
{
"name": "Go",
"bytes": "2219611"
},
{
"name": "HTML",
"bytes": "33270"
},
{
"name": "JavaScript",
"bytes": "1716"
},
{
"name": "Makefile",
"bytes": "12395"
},
{
"name": "Python",
"bytes": "4797"
},
{
"name": "RobotFramework",
"bytes": "141402"
},
{
"name": "Ruby",
"bytes": "13223"
},
{
"name": "Shell",
"bytes": "172620"
},
{
"name": "TypeScript",
"bytes": "221104"
}
],
"symlink_target": ""
} |
@protocol Mappable
- (id)map:(id (^)(id))funcBlock;
@end | {
"content_hash": "0eaa9b246eb47bd1a5e62e30fc1c7666",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 32,
"avg_line_length": 18.666666666666668,
"alnum_prop": 0.6607142857142857,
"repo_name": "stuartervine/OCTotallyLazy",
"id": "22f4d553ad3fe0bec7c2ca370bb0fbf8c4e021d8",
"size": "56",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OCTotallyLazy/Mappable.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "93973"
},
{
"name": "Ruby",
"bytes": "987"
},
{
"name": "Shell",
"bytes": "1167"
}
],
"symlink_target": ""
} |
package com.beyondj.gateway.handlers.detecting.protocol.openwire.codec;
import com.beyondj.gateway.handlers.detecting.protocol.openwire.command.DataStructure;
import org.fusesource.hawtbuf.DataByteArrayInputStream;
import org.fusesource.hawtbuf.DataByteArrayOutputStream;
import java.io.IOException;
public interface DataStreamMarshaller {
byte getDataStructureType();
DataStructure createObject();
int tightMarshal1(OpenWireFormat format, Object c, BooleanStream bs) throws IOException;
void tightMarshal2(OpenWireFormat format, Object c, DataByteArrayOutputStream ds, BooleanStream bs) throws IOException;
void tightUnmarshal(OpenWireFormat format, Object data, DataByteArrayInputStream dis, BooleanStream bs) throws IOException;
void looseMarshal(OpenWireFormat format, Object c, DataByteArrayOutputStream ds) throws IOException;
void looseUnmarshal(OpenWireFormat format, Object data, DataByteArrayInputStream dis) throws IOException;
}
| {
"content_hash": "ecf357911d42846e45da4743ad4f3921",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 127,
"avg_line_length": 44.59090909090909,
"alnum_prop": 0.8256880733944955,
"repo_name": "nkasvosve/beyondj",
"id": "9842efa4d7aa3a11ac49850947ee555adc023616",
"size": "1619",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "beyondj-gateway/beyondj-gateway-core/src/main/java/com/beyondj/gateway/handlers/detecting/protocol/openwire/codec/DataStreamMarshaller.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3265"
},
{
"name": "CSS",
"bytes": "231601"
},
{
"name": "HTML",
"bytes": "269326"
},
{
"name": "Java",
"bytes": "7169407"
},
{
"name": "JavaScript",
"bytes": "2065748"
},
{
"name": "Makefile",
"bytes": "5769"
},
{
"name": "Python",
"bytes": "19932"
},
{
"name": "Ruby",
"bytes": "186"
},
{
"name": "Shell",
"bytes": "4666"
},
{
"name": "Smarty",
"bytes": "902"
}
],
"symlink_target": ""
} |
#ifndef WEBRTC_CONDUCTOR_H_
#define WEBRTC_CONDUCTOR_H_
#include <deque>
#include <map>
#include <memory>
#include <set>
#include <string>
#include "buffer_renderer.h"
#include "peer_connection_client.h"
#include "input_data_channel_observer.h"
#include "main_window.h"
#include "config_parser.h"
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/peerconnectioninterface.h"
class Conductor : public webrtc::PeerConnectionObserver,
public webrtc::CreateSessionDescriptionObserver,
public PeerConnectionClientObserver,
public MainWindowCallback
{
public:
enum CallbackID
{
MEDIA_CHANNELS_INITIALIZED = 1,
PEER_CONNECTION_CLOSED,
SEND_MESSAGE_TO_PEER,
NEW_STREAM_ADDED,
STREAM_REMOVED,
};
Conductor(
PeerConnectionClient* client,
MainWindow* main_window,
StreamingToolkit::WebRTCConfig* webrtc_config,
StreamingToolkit::BufferRenderer* buffer_renderer);
bool connection_active() const;
void SetTurnCredentials(const std::string& username, const std::string& password);
void SetInputDataHandler(StreamingToolkit::InputDataHandler* handler);
//-------------------------------------------------------------------------
// MainWindowCallback implementation.
//-------------------------------------------------------------------------
void StartLogin(const std::string& server, int port) override;
virtual void Close();
protected:
~Conductor();
bool InitializePeerConnection();
bool ReinitializePeerConnectionForLoopback();
bool CreatePeerConnection(bool dtls);
void DeletePeerConnection();
void EnsureStreamingUI();
void AddStreams();
std::unique_ptr<cricket::VideoCapturer> OpenVideoCaptureDevice();
//-------------------------------------------------------------------------
// PeerConnectionObserver implementation.
//-------------------------------------------------------------------------
void OnSignalingChange(
webrtc::PeerConnectionInterface::SignalingState new_state) override {};
void OnAddStream(
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override;
void OnRemoveStream(
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override;
void OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> channel) override;
void OnRenegotiationNeeded() override {}
void OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) override {};
void OnIceGatheringChange(
webrtc::PeerConnectionInterface::IceGatheringState new_state) override {};
void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
void OnIceConnectionReceivingChange(bool receiving) override {}
//-------------------------------------------------------------------------
// PeerConnectionClientObserver implementation.
//-------------------------------------------------------------------------
void OnSignedIn() override;
void OnDisconnected() override;
void OnPeerConnected(int id, const std::string& name) override;
void OnPeerDisconnected(int id) override;
void OnMessageFromPeer(int peer_id, const std::string& message) override;
void OnMessageSent(int err) override;
void OnServerConnectionFailure() override;
//-------------------------------------------------------------------------
// MainWindowCallback implementation.
//-------------------------------------------------------------------------
void DisconnectFromServer() override;
void ConnectToPeer(int peer_id) override;
void DisconnectFromCurrentPeer() override;
void UIThreadCallback(int msg_id, void* data) override;
// CreateSessionDescriptionObserver implementation.
void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
void OnFailure(const std::string& error) override;
protected:
// Send a message to the remote peer.
void SendMessage(const std::string& json_object);
private:
void SendMessageToPeer(std::string* msg);
void NewStreamAdded(webrtc::MediaStreamInterface* stream);
void StreamRemoved(webrtc::MediaStreamInterface* stream);
int peer_id_;
bool loopback_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> peer_connection_factory_;
PeerConnectionClient* client_;
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel_;
std::unique_ptr<StreamingToolkit::InputDataChannelObserver> data_channel_observer_;
MainWindow* main_window_;
StreamingToolkit::WebRTCConfig* webrtc_config_;
StreamingToolkit::InputDataHandler* input_data_handler_;
StreamingToolkit::BufferRenderer* buffer_renderer_;
std::deque<std::string*> pending_messages_;
std::map<std::string, rtc::scoped_refptr<webrtc::MediaStreamInterface>> active_streams_;
std::string server_;
std::string turn_username_;
std::string turn_password_;
};
#endif // WEBRTC_CONDUCTOR_H_
| {
"content_hash": "d09b909afab9c36654802a59d41b8f30",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 89,
"avg_line_length": 29.272727272727273,
"alnum_prop": 0.6853002070393375,
"repo_name": "michaelperel/3dtoolkit",
"id": "794266640278892b3b46c47c2ce2ababac94a9cd",
"size": "5238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Plugins/NativeServerPlugin/inc/conductor.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "48"
},
{
"name": "C",
"bytes": "397583"
},
{
"name": "C#",
"bytes": "187111"
},
{
"name": "C++",
"bytes": "3523090"
},
{
"name": "Cuda",
"bytes": "7864"
},
{
"name": "Objective-C",
"bytes": "8451"
},
{
"name": "PowerShell",
"bytes": "43172"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.