text stringlengths 2 1.04M | meta dict |
|---|---|
module TranslationCenter
class Translation < ActiveRecord::Base
# attr_accessible :value, :lang, :translation_key_id, :user_id, :status
# serialize as we could store arrays
CHANGES_PER_PAGE = 5
NUMBER_PER_PAGE = 15
# Statuses
ACCEPTED = "accepted"
PENDING = "pending"
# Relations
belongs_to :translation_key
belongs_to :translator, polymorphic: true
# Validations
validates :translation_key_id, :lang, :status, :value, presence: true
validate :one_translation_per_lang_per_key, on: :create
# Scopes
# Returns accepted transations
scope :accepted, -> { where(status: ACCEPTED) }
# Returns translations in a certain language
scope :in, ->(lang) { where(lang: lang.to_s.strip) }
# Sorts translations by number of votes
scope :sorted_by_votes, -> do
where('votable_type IS NULL OR votable_type = ?', 'TranslationCenter::Translation')
.select('translation_center_translations.*, count(votes.id) as votes_count')
.joins('LEFT OUTER JOIN votes on votes.votable_id = translation_center_translations.id')
.group('translation_center_translations.id')
.order('votes_count desc')
end
# Callbacks
after_save :update_key_status
after_destroy :notify_key
alias_method :key, :translation_key
acts_as_votable
audited
# Serialize as we could store arrays
serialize :value
# called after save to update the key status
def update_key_status
self.key.update_status(self.lang)
end
# called before destory to update the key status
def notify_key
self.key.update_status(self.lang)
self.audits.destroy
end
# returns true if the status of the translation is accepted
def accepted?
self.status == ACCEPTED
end
# returns true if the status of the translation is pending
def pending?
self.status == PENDING
end
# Accept translation by changing its status and if there is an accepting translation
# make it pending
def accept
# If translation is accepted do nothing
unless self.accepted?
self.translation_key.accepted_translation_in(self.lang)
.try(:update_attribute, :status, TranslationKey::PENDING)
# reload the translation key as it has changed
self.translation_key.reload
self.update_attribute(:status, ACCEPTED)
end
end
# unaccept a translation
def unaccept
self.update_attribute(:status, PENDING)
end
# make sure user has one translation per key per lang
def one_translation_per_lang_per_key
translation_exists = Translation.exists?(
lang: self.lang,
translator_id: self.translator.id,
translator_type: self.translator.class.name,
translation_key_id: self.key.id
)
unless translation_exists
true
else
false
self.errors.add(:lang, I18n.t('.one_translation_per_lang_per_key'))
end
end
private
def translation_params
params.require(:translation).permit(:value, :lang, :translation_key_id, :user_id, :status)
end
end
end
| {
"content_hash": "f6a3a4715836cbe047e8ab48abf09b18",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 96,
"avg_line_length": 28.160714285714285,
"alnum_prop": 0.6642358909321496,
"repo_name": "BadrIT/translation_center",
"id": "47ed881480f95a48709883cfc76da5264ca98650",
"size": "3154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/models/translation_center/translation.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5548"
},
{
"name": "HTML",
"bytes": "27986"
},
{
"name": "JavaScript",
"bytes": "14604"
},
{
"name": "Ruby",
"bytes": "131387"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Proc. Roy. Philos. Soc. Glasgow 10: 292 (1877)
#### Original name
Calycidium cuneatum Stirt.
### Remarks
null | {
"content_hash": "702ed3c13835ff49156b07491e88c0cd",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 46,
"avg_line_length": 15.23076923076923,
"alnum_prop": 0.7070707070707071,
"repo_name": "mdoering/backbone",
"id": "6628949c55bf11832fd0ff01a684a31ec5a17d51",
"size": "248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Lecanoromycetes/Lecanorales/Calycidiaceae/Calycidium/Calycidium cuneatum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
Cloud computing
===============
Studio can be configured to submit jobs to the cloud. Right
now, only Google Cloud is supported (CPU only), as well as Amazon EC2
(CPU and GPU).
Once configured (see configuration instructions for `Google
Cloud <http://docs.studio.ml/en/latest/gcloud_setup.html>`__, and
`Amazon AWS <http://docs.studio.ml/en/latest/ec2_setup.html>`__) the command
::
studio run --cloud={gcloud|ec2|gcspot|ec2spot} my_script.py
will create an instance, set up the python environment, run
``my_script.py``, and shutdown the instance. You'll be able to see the
progress of the job in ``studio ui``. Different experiments might require
different hardware. Fortunately, Google Cloud offers flexibility of
instance configuration, and Amazon EC2 offers a variety of instances to
select from; Studio can leverage either. To specify the number of
cpus or gpus needed, use flags ``--cpus`` and ``--gpus`` respectively. That is,
the command:
::
studio run --cloud={gcloud|ec2|gcspot|ec2spot} --cpus=8 --gpus=1 my_script.py
will create an instance with 8 cpus and 1 gpu. The top of the line gpu
in Amazon EC2 is Tesla K80 at the moment, and that's the only one
available through Studio; we might provide some gpu selection flags
in the future as well.
The amount of ram and hard drive space can be configured via the
``--ram`` / ``--hdd`` flags (using standard suffixes like g(G,Gb,GiB), m(M,MiB)).
Note that the amount of RAM will be rounded up to the next factor of 256 Mb.
Also note that for now extended RAM for Google Cloud is not supported,
which means the amount of RAM per CPU should be between 1 and 6 Gb.
For Amazon EC2, Studio will find the cheapest instances with higher specs than required,
or throw an exception for too extravagant of a request.
Running on EC2 spot instances
-----------------------------
Basics
~~~~~~
Amazon EC2 offers so-called spot instances that are provided with a
substantial discount with the assumption that they can be taken from
the user at any moment. Google Compute Engine has a similar product called
preemptible instances, but Studio does not support it just yet. In
short, for spot instances the user specifies the max price to pay per
instance-hour. As long as the instance-hour price is below the specified
limit (bid), the user is pays the current price and uses the instance.
Otherwise, the instance shuts down and is given to the higher
bidder. For a more detailed explanation, refer to the spot instances user guide
https://aws.amazon.com/ec2/spot/.
As you might have guessed,
when running with the ``--cloud=ec2spot`` option the job is submitted to
spot instances. You can additionally specify how much are you
willing to pay for these instances via ``--bid=<bid_in_usd>`` or
``--bid=<percent_of_ondemand_price>%``. The latter format specifies bid
in percent of on-demand price. Unless you feel very generous towards
Amazon there is no reason to specify a price above 100% the on-demand
price (in fact, the spot instance user guide discourages users from doing
so).
Note that bid is the max price for *one* instance; number of instances will
vary (see below).
Autoscaling and number of instances
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given the ephemeral nature of spot workers, we need an additional mechanism
controlling / balancing number of such instances. This mechanism is
called auto-scaling, and in the simplest setting it tries to keep number
of running instances constant. Studio handles downsizing of the
auto-scaling groups when some workers are done and there is no work left
in the queue. You can specify this behaviour by setting the
``--num-workers`` flag.
Autoscaling allows more complex behaviour, such
as spinning up extra machines if there are too many messages in the queue.
The default behaviour of Studio is as follows - start start with one spot
worker, and scale up when the number of outstanding work messages in the
queue is above 0.
Running on Google Cloud spot (preemptible) instances
----------------------------------------------------
Google Cloud's analog of EC2 spot instances are called `preemptible
instances <https://cloud.google.com/preemptible-vms/>`__.
Preemptible instances are similar to EC2 spot instances in that
they are much cheaper than regular (on-demand) instances and that
they can be taken away at any moment with very little or no notice. They
are different from EC2 spot instances in the bidding / market system -
the prices on preemptible instances are fixed and depend only on
hardware configuration. Thus, ``--bid`` has no effect when running with
``--cloud=gcspot``.
Also, autoscaling on a queue for Google Cloud is in
an alpha state and has some serious limitations; as such, we do not
support it just yet. The required number of workers has to be
specified via ``--num-workers`` (the default is 1), and Google group will
try to keep it constant (that is, if the instances are taken away, it
will try to spin up their replacements). When instances run out
of work, they automatically spin down and eventually the instance group is deleted.
| {
"content_hash": "937690bb45ea6286a6d1091a6fcc7a92",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 89,
"avg_line_length": 48.056603773584904,
"alnum_prop": 0.7530427954456222,
"repo_name": "studioml/studio",
"id": "654652e59e0d7855c05d42de7f24b8d7ddd5ea3a",
"size": "5094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/cloud.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "484"
},
{
"name": "HTML",
"bytes": "27833"
},
{
"name": "Python",
"bytes": "435537"
},
{
"name": "Shell",
"bytes": "19536"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- From: file:/C:/Users/Jacques/AndroidStudioProjects/TP%20Formation%20Android/4%20-%20Reseau%20et%20stockage%20donnees/TP%20ProductsReader/app/build/intermediates/exploded-aar/com.android.support/appcompat-v7/21.0.2/res/values-v17/values.xml -->
<eat-comment/>
<style name="RtlOverlay.Widget.AppCompat.ActionBar.TitleItem" parent="android:Widget">
<item name="android:layout_gravity">center_vertical|start</item>
<item name="android:paddingEnd">8dp</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.ActionButton.CloseMode" parent="Base.Widget.AppCompat.ActionButton.CloseMode">
<item name="android:paddingStart">8dp</item>
<item name="android:layout_marginEnd">16dp</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.ActionButton.Overflow" parent="Base.Widget.AppCompat.ActionButton.Overflow">
<item name="android:paddingStart">0dp</item>
<item name="android:paddingEnd">12dp</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.PopupMenuItem" parent="android:Widget">
<item name="android:paddingEnd">16dp</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.PopupMenuItem.InternalGroup" parent="android:Widget">
<item name="android:layout_marginStart">16dp</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.PopupMenuItem.Text" parent="android:Widget">
<item name="android:layout_alignParentStart">true</item>
<item name="android:textAlignment">viewStart</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.Search.DropDown" parent="android:Widget">
<item name="android:paddingStart">@dimen/abc_dropdownitem_text_padding_left</item>
<item name="android:paddingEnd">4dp</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.Search.DropDown.Icon1" parent="android:Widget">
<item name="android:layout_alignParentStart">true</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.Search.DropDown.Icon2" parent="android:Widget">
<item name="android:layout_toStartOf">@id/edit_query</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.Search.DropDown.Query" parent="android:Widget">
<item name="android:layout_alignParentEnd">true</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.Search.DropDown.Text" parent="Base.Widget.AppCompat.DropDownItem.Spinner">
<item name="android:layout_toStartOf">@android:id/icon2</item>
<item name="android:layout_toEndOf">@android:id/icon1</item>
</style>
<style name="RtlOverlay.Widget.AppCompat.SearchView.MagIcon" parent="android:Widget">
<item name="android:layout_marginStart">@dimen/abc_dropdownitem_text_padding_left</item>
</style>
</resources> | {
"content_hash": "732b9d9eb8bbd8b91a21a34e48f9841f",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 252,
"avg_line_length": 60.212765957446805,
"alnum_prop": 0.715547703180212,
"repo_name": "jacquesgiraudel/TP-Formation-Android",
"id": "9d66ea4ad789b136c1e01a4a535caa2b469260a1",
"size": "2830",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "4 - Reseau et stockage donnees/TP ProductsReader/app/build/intermediates/res/debug/values-v17/values.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1142"
},
{
"name": "HTML",
"bytes": "6856"
},
{
"name": "Java",
"bytes": "6493986"
},
{
"name": "Makefile",
"bytes": "1349"
}
],
"symlink_target": ""
} |
close all; clear all; clc;
data_dir = '~/Dropbox/Documents/SIMPLEX/DataCollection/11_29_data_local/proc/3_bottle69_open_bottle_palm_2_tf_convert_merged_successes_proc/';
data_file = 'hand_only_with_tf_labels_data';
data = load(strcat(strcat(data_dir, data_file), '.mat'));
data = data.data;
num_samples = size(data, 1);
% normalize data. need column_min and column_max to unnormalize
[norm_data, column_min, column_max] = normalize(data);
% shift data to mean using mean pose
norm_mean_pose = mean(norm_data, 1);
shifted_data = norm_data - norm_mean_pose(ones(num_samples, 1), :);
% run pca
[evectors, scores, evalues] = pca(shifted_data);
% display the eigenvalues
normalized_evalues = evalues / sum(evalues);
figure, plot(cumsum(normalized_evalues));
xlabel('No. of eigenvectors'), ylabel('Variance accounted for');
ylim([0 1]), grid on;
% plot the top two eigenvectors
% proj = norm_data * evectors(:, 1:2);
% plot(proj(1,:), proj(2,:), 'r.');
% reconstruct
num_eigenvectors = 25;
% project into eigen subspace
projection = shifted_data * evectors(:, 1:num_eigenvectors); % same as top num_eigenvectors from scores from pca()
% reconstruction sample using the projection; adding mean unshifts data
reconstruction = projection * evectors(:, 1:num_eigenvectors)' + norm_mean_pose(ones(num_samples, 1), :);
% unnormalize data
reconstructed_data = zeros(size(data));
for i =1:num_samples
reconstructed_data(i,:) = reconstruction(i,:) .* (column_max - column_min) + column_min;
end
% compute reconstruction error
reconstructed_err = data - reconstructed_data;
% save reconstructed data to mat file
reconstruction_file = strcat(strcat(data_dir, data_file), '_reconstructed.mat');
save(reconstruction_file, 'reconstructed_data'); | {
"content_hash": "0a89aba43c6b32dc1176671e3439e740",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 142,
"avg_line_length": 34.8,
"alnum_prop": 0.725287356321839,
"repo_name": "xiaozhuchacha/OpenBottle",
"id": "a3f0532d0cecb6941ba5eba1b301ddafe55038b9",
"size": "1740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old/pca/eigengrasp_reduction_example.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "17514"
},
{
"name": "C++",
"bytes": "153149"
},
{
"name": "CMake",
"bytes": "175576"
},
{
"name": "Component Pascal",
"bytes": "66739"
},
{
"name": "Java",
"bytes": "728775"
},
{
"name": "MATLAB",
"bytes": "15776"
},
{
"name": "Makefile",
"bytes": "160500"
},
{
"name": "Python",
"bytes": "8885703"
},
{
"name": "Shell",
"bytes": "10157"
}
],
"symlink_target": ""
} |
#include "config.h"
#include "WebPopupMenuImpl.h"
#include "Cursor.h"
#include "FramelessScrollView.h"
#include "FrameView.h"
#include "IntRect.h"
#include "painting/GraphicsContextBuilder.h"
#include "PlatformKeyboardEvent.h"
#include "PlatformMouseEvent.h"
#include "PlatformWheelEvent.h"
#include "SkiaUtils.h"
#include "WebInputEvent.h"
#include "WebInputEventConversion.h"
#include "WebRect.h"
#include "WebWidgetClient.h"
#include <skia/ext/platform_canvas.h>
using namespace WebCore;
namespace WebKit {
// WebPopupMenu ---------------------------------------------------------------
WebPopupMenu* WebPopupMenu::create(WebWidgetClient* client)
{
// Pass the WebPopupMenuImpl's self-reference to the caller.
return adoptRef(new WebPopupMenuImpl(client)).leakRef();
}
// WebWidget ------------------------------------------------------------------
WebPopupMenuImpl::WebPopupMenuImpl(WebWidgetClient* client)
: m_client(client)
, m_widget(0)
{
// set to impossible point so we always get the first mouse pos
m_lastMousePosition = WebPoint(-1, -1);
}
WebPopupMenuImpl::~WebPopupMenuImpl()
{
if (m_widget)
m_widget->setClient(0);
}
void WebPopupMenuImpl::Init(FramelessScrollView* widget, const WebRect& bounds)
{
m_widget = widget;
m_widget->setClient(this);
if (m_client) {
m_client->setWindowRect(bounds);
m_client->show(WebNavigationPolicy()); // Policy is ignored
}
}
void WebPopupMenuImpl::MouseMove(const WebMouseEvent& event)
{
// don't send mouse move messages if the mouse hasn't moved.
if (event.x != m_lastMousePosition.x || event.y != m_lastMousePosition.y) {
m_lastMousePosition = WebPoint(event.x, event.y);
m_widget->handleMouseMoveEvent(PlatformMouseEventBuilder(m_widget, event));
}
}
void WebPopupMenuImpl::MouseLeave(const WebMouseEvent& event)
{
m_widget->handleMouseMoveEvent(PlatformMouseEventBuilder(m_widget, event));
}
void WebPopupMenuImpl::MouseDown(const WebMouseEvent& event)
{
m_widget->handleMouseDownEvent(PlatformMouseEventBuilder(m_widget, event));
}
void WebPopupMenuImpl::MouseUp(const WebMouseEvent& event)
{
mouseCaptureLost();
m_widget->handleMouseReleaseEvent(PlatformMouseEventBuilder(m_widget, event));
}
void WebPopupMenuImpl::MouseWheel(const WebMouseWheelEvent& event)
{
m_widget->handleWheelEvent(PlatformWheelEventBuilder(m_widget, event));
}
bool WebPopupMenuImpl::KeyEvent(const WebKeyboardEvent& event)
{
return m_widget->handleKeyEvent(PlatformKeyboardEventBuilder(event));
}
// WebWidget -------------------------------------------------------------------
void WebPopupMenuImpl::close()
{
if (m_widget)
m_widget->hide();
m_client = 0;
deref(); // Balances ref() from WebWidget::Create
}
void WebPopupMenuImpl::resize(const WebSize& newSize)
{
if (m_size == newSize)
return;
m_size = newSize;
if (m_widget) {
IntRect newGeometry(0, 0, m_size.width, m_size.height);
m_widget->setFrameRect(newGeometry);
}
if (m_client) {
WebRect damagedRect(0, 0, m_size.width, m_size.height);
m_client->didInvalidateRect(damagedRect);
}
}
void WebPopupMenuImpl::animate()
{
}
void WebPopupMenuImpl::layout()
{
}
void WebPopupMenuImpl::paint(WebCanvas* canvas, const WebRect& rect)
{
if (!m_widget)
return;
if (!rect.isEmpty())
m_widget->paint(&GraphicsContextBuilder(canvas).context(), rect);
}
void WebPopupMenuImpl::themeChanged()
{
notImplemented();
}
void WebPopupMenuImpl::composite(bool finish)
{
notImplemented();
}
bool WebPopupMenuImpl::handleInputEvent(const WebInputEvent& inputEvent)
{
if (!m_widget)
return false;
// TODO (jcampan): WebKit seems to always return false on mouse events
// methods. For now we'll assume it has processed them (as we are only
// interested in whether keyboard events are processed).
switch (inputEvent.type) {
case WebInputEvent::MouseMove:
MouseMove(*static_cast<const WebMouseEvent*>(&inputEvent));
return true;
case WebInputEvent::MouseLeave:
MouseLeave(*static_cast<const WebMouseEvent*>(&inputEvent));
return true;
case WebInputEvent::MouseWheel:
MouseWheel(*static_cast<const WebMouseWheelEvent*>(&inputEvent));
return true;
case WebInputEvent::MouseDown:
MouseDown(*static_cast<const WebMouseEvent*>(&inputEvent));
return true;
case WebInputEvent::MouseUp:
MouseUp(*static_cast<const WebMouseEvent*>(&inputEvent));
return true;
// In Windows, RawKeyDown only has information about the physical key, but
// for "selection", we need the information about the character the key
// translated into. For English, the physical key value and the character
// value are the same, hence, "selection" works for English. But for other
// languages, such as Hebrew, the character value is different from the
// physical key value. Thus, without accepting Char event type which
// contains the key's character value, the "selection" won't work for
// non-English languages, such as Hebrew.
case WebInputEvent::RawKeyDown:
case WebInputEvent::KeyDown:
case WebInputEvent::KeyUp:
case WebInputEvent::Char:
return KeyEvent(*static_cast<const WebKeyboardEvent*>(&inputEvent));
default:
break;
}
return false;
}
void WebPopupMenuImpl::mouseCaptureLost()
{
}
void WebPopupMenuImpl::setFocus(bool enable)
{
}
bool WebPopupMenuImpl::setComposition(
const WebString& text, const WebVector<WebCompositionUnderline>& underlines,
int selectionStart, int selectionEnd)
{
return false;
}
bool WebPopupMenuImpl::confirmComposition()
{
return false;
}
bool WebPopupMenuImpl::confirmComposition(const WebString& text)
{
return false;
}
WebTextInputType WebPopupMenuImpl::textInputType()
{
return WebTextInputTypeNone;
}
WebRect WebPopupMenuImpl::caretOrSelectionBounds()
{
return WebRect();
}
void WebPopupMenuImpl::setTextDirection(WebTextDirection direction)
{
}
//-----------------------------------------------------------------------------
// WebCore::HostWindow
void WebPopupMenuImpl::invalidateContents(const IntRect&, bool)
{
notImplemented();
}
void WebPopupMenuImpl::invalidateWindow(const IntRect&, bool)
{
notImplemented();
}
void WebPopupMenuImpl::invalidateContentsAndWindow(const IntRect& paintRect, bool /*immediate*/)
{
if (paintRect.isEmpty())
return;
if (m_client)
m_client->didInvalidateRect(paintRect);
}
void WebPopupMenuImpl::invalidateContentsForSlowScroll(const IntRect& updateRect, bool immediate)
{
invalidateContentsAndWindow(updateRect, immediate);
}
void WebPopupMenuImpl::scheduleAnimation()
{
}
void WebPopupMenuImpl::scroll(const IntSize& scrollDelta,
const IntRect& scrollRect,
const IntRect& clipRect)
{
if (m_client) {
int dx = scrollDelta.width();
int dy = scrollDelta.height();
m_client->didScrollRect(dx, dy, clipRect);
}
}
IntPoint WebPopupMenuImpl::screenToWindow(const IntPoint& point) const
{
notImplemented();
return IntPoint();
}
IntRect WebPopupMenuImpl::windowToScreen(const IntRect& rect) const
{
notImplemented();
return IntRect();
}
void WebPopupMenuImpl::scrollRectIntoView(const IntRect&, const ScrollView*) const
{
// Nothing to be done here since we do not have the concept of a container
// that implements its own scrolling.
}
void WebPopupMenuImpl::scrollbarsModeDidChange() const
{
// Nothing to be done since we have no concept of different scrollbar modes.
}
void WebPopupMenuImpl::setCursor(const WebCore::Cursor&)
{
}
//-----------------------------------------------------------------------------
// WebCore::FramelessScrollViewClient
void WebPopupMenuImpl::popupClosed(FramelessScrollView* widget)
{
ASSERT(widget == m_widget);
if (m_widget) {
m_widget->setClient(0);
m_widget = 0;
}
m_client->closeWidgetSoon();
}
} // namespace WebKit
| {
"content_hash": "8e29a00734381752864195c71ebb09d9",
"timestamp": "",
"source": "github",
"line_count": 323,
"max_line_length": 97,
"avg_line_length": 25.325077399380806,
"alnum_prop": 0.6755501222493887,
"repo_name": "Xperia-Nicki/android_platform_sony_nicki",
"id": "b4d4246c3f6010bcaf5021d4cd9681abfc393db5",
"size": "9742",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "external/webkit/Source/WebKit/chromium/src/WebPopupMenuImpl.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "212775"
},
{
"name": "Awk",
"bytes": "19252"
},
{
"name": "C",
"bytes": "68667466"
},
{
"name": "C#",
"bytes": "55625"
},
{
"name": "C++",
"bytes": "54670920"
},
{
"name": "CLIPS",
"bytes": "12224"
},
{
"name": "CSS",
"bytes": "283405"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Java",
"bytes": "4882"
},
{
"name": "JavaScript",
"bytes": "19597804"
},
{
"name": "Objective-C",
"bytes": "5849156"
},
{
"name": "PHP",
"bytes": "17224"
},
{
"name": "Pascal",
"bytes": "42411"
},
{
"name": "Perl",
"bytes": "1632149"
},
{
"name": "Prolog",
"bytes": "214621"
},
{
"name": "Python",
"bytes": "3493321"
},
{
"name": "R",
"bytes": "290"
},
{
"name": "Ruby",
"bytes": "78743"
},
{
"name": "Scilab",
"bytes": "554"
},
{
"name": "Shell",
"bytes": "265637"
},
{
"name": "TypeScript",
"bytes": "45459"
},
{
"name": "XSLT",
"bytes": "11219"
}
],
"symlink_target": ""
} |
<?php
namespace Test\IC\SlimAuthHmac\Auth;
use IC\SlimAuthHmac\Auth\HmacManager;
class HmacManagerTest extends \PHPUnit_Framework_TestCase
{
public function setUp()
{
}
public function testConstruct()
{
$hmacManager = new HmacManager();
$this->assertInstanceOf('IC\\SlimAuthHmac\\Auth\\HmacManager', $hmacManager);
}
public function testAlgorithm()
{
$hmacManager = new HmacManager();
$this->assertEquals($hmacManager::DEFAULT_ALGORITHM, $hmacManager->getAlgorithm());
}
public function testKeys()
{
$publicKey = sha1(uniqid());
$privateKey = sha1(uniqid());
$hmacManager = new HmacManager();
$hmacManager->setPublicKey($publicKey);
$hmacManager->setPrivateKey($privateKey);
$this->assertEquals($publicKey, $hmacManager->getPublicKey());
$this->assertEquals($privateKey, $hmacManager->getPrivateKey());
}
public function testGenerateHmac()
{
$algorithm = 'sha256';
$publicKey = sha1(uniqid());
$privateKey = sha1(uniqid());
$payload = sha1(uniqid());
$payLoadHmacHash = hash_hmac($algorithm, $payload, $privateKey, false);
$hmacManager = new HmacManager();
$hmacManager->setAlgorithm($algorithm);
$hmacManager->setPublicKey($publicKey);
$hmacManager->setPrivateKey($privateKey);
$hmacManager->setHmacSignature($payLoadHmacHash);
$hmacManager->setPayload($payload);
$hmacValue = $hmacManager->generateHmac();
$this->assertEquals($hmacValue, $payLoadHmacHash);
}
public function testIsValid()
{
$algorithm = 'sha256';
$publicKey = sha1(uniqid());
$privateKey = sha1(uniqid());
$payload = sha1(uniqid());
$payLoadHmacHash = hash_hmac($algorithm, $payload, $privateKey, false);
$hmacManager = new HmacManager();
$hmacManager->setAlgorithm($algorithm);
$hmacManager->setPublicKey($publicKey);
$hmacManager->setPrivateKey($privateKey);
$hmacManager->setHmacSignature($payLoadHmacHash);
$hmacManager->setPayload($payload);
$hmacValue = $hmacManager->generateHmac();
$this->assertTrue($hmacManager->isValid($payLoadHmacHash, $hmacValue));
}
}
| {
"content_hash": "4f52cd9fb407ffa0b34522919db6ae99",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 91,
"avg_line_length": 30.116883116883116,
"alnum_prop": 0.6326002587322122,
"repo_name": "ilanco/slim-auth-hmac",
"id": "10aae23a1340b9a8dd83e7e6dbd7922f26cc470c",
"size": "2319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/IC/SlimAuthHmac/Auth/HmacManagerTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "22570"
}
],
"symlink_target": ""
} |
import json
import re
from django.conf import settings
from django.contrib import admin
from django import forms
from django.template.loader import render_to_string
from django.urls import reverse, NoReverseMatch
from django.utils.html import format_html, mark_safe
from event_store.models import Event
from reviewer.filters import ReviewerOrganizationFilter, review_widget
from reviewer.message_sending import MessageSendingAdminMixin
from event_review.filters import (filter_with_emptyvalue,
CampaignFilter,
CollapsedListFilter,
EventAttendeeMaxFilter,
EventAttendeeCountFilter,
EventFullness,
EventMaxDateFilter,
EventMinDateFilter,
IsPrivateFilter,
HostStatusFilter,
PoliticalScopeFilter,
SortingFilter)
from huerta.filters import CollapsedListFilter, textinputfilter_factory
def host_format(modeladmin, event):
host = event.organization_host
host_line = [format_html('{}', host)]
host_items = []
if event.host_is_confirmed:
host_line.append(mark_safe(' (<span style="color:green">confirmed</span>) '))
else:
host_line.append(mark_safe(' (<span style="color:red">unconfirmed</span>) '))
host_line.append(mark_safe('<br />'))
if getattr(host, 'email', None):
host_items.append(format_html('<a data-system-pk="{}" href="mailto:{}">email</a>',
host.member_system_pk or '',
host.email,
host))
host_link = event.host_edit_url(edit_access=True)
if host_link:
host_items.append(format_html('<a href="{}">Act as host</a>', host_link))
if (getattr(host, 'email', None)
and getattr(settings, 'FROM_EMAIL', None)
and getattr(modeladmin, 'send_message_widget', None)):
host_items.append(modeladmin.send_message_widget(event))
# give settings a chance to tweak/alter/add items
customize_host_link = getattr(settings, 'EVENT_REVIEW_CUSTOM_HOST_DISPLAY', None)
if callable(customize_host_link):
host_items = customize_host_link(event, host_items)
# from the connector
extra_html=event.extra_management_html()
if extra_html:
host_items.append(extra_html)
host_items.insert(0, ' '.join(host_line))
return mark_safe(' <span class="glyphicon glyphicon-star-empty"></span>'.join(host_items))
def long_field(longtext, heading=''):
if not longtext:
return ''
return format_html(heading
+ '<div style="max-height: 7.9em; max-width: 600px; overflow-y: auto" class="well well-sm">{}</div>',
longtext)
class EventDisplayAdminMixin:
## BEGIN display
def event_list_display(self, obj, onecol=False):
scope = obj.get_political_scope_display()
if scope:
scope = ' ({})'.format(scope)
second_col = ''
if not onecol:
second_col = format_html(
"""
<div class="col-md-6">
<div><b>Private Phone:</b> {private_phone}</div>
<div><b>Event Status:</b> {active_status}</div>
{review_widget}
{internal_notes}
</div>
""",
private_phone=Event.phone_format(obj.private_phone),
active_status=obj.status,
review_widget=review_widget(obj, obj.organization_host_id),
internal_notes=(long_field(obj.internal_notes,'<b>Past Notes</b>') if obj.internal_notes else '')
)
return format_html(
"""
<div class="row">
<div class="col-md-6">
<h5>{title} ({pk}) {private}</h5>
<div><b>Host:</b> {host}</div>
<div><b>Where:</b>{political_scope}
<div>{venue}</div>
<div>{address}</div>
<div>{city}, {state}</div>
</div>
<div><b>When:</b> {when}</div>
<div><b>Attendees:</b> {attendee_count}{max_attendees}</div>
<div><b>Description</b> {description}</div>
<div><b>Directions</b> {directions}</div>
<div><b>Note to Attendees</b> {note_to_attendees}</div>
</div>
{second_col}
</div>
""",
title=obj.title,
pk=obj.organization_source_pk,
venue=obj.venue,
address='%s %s' % (obj.address1, obj.address2),
city=obj.city,
state=obj.state,
political_scope=scope,
when=obj.starts_at.strftime('%c'),
attendee_count=obj.attendee_count,
max_attendees=('/%s' % obj.max_attendees
if obj.max_attendees else ''),
private=(mark_safe('<div class="label label-danger">Private</div>')
if obj.is_private else ''),
host=host_format(self, obj),
second_col=second_col,
description=long_field(obj.public_description),
directions=long_field(obj.directions),
note_to_attendees=long_field(obj.note_to_attendees))
## END display
@admin.register(Event)
class EventAdmin(MessageSendingAdminMixin, admin.ModelAdmin, EventDisplayAdminMixin):
# NOTE: MessageSendingAdminMixin must appear before admin.ModelAdmin in order for its get_urls() method to be used
def changelist_view(self, request, extra_context=None):
extra_context = {'title': 'Event admin tool'}
return super(EventAdmin, self).changelist_view(request, extra_context=extra_context)
change_list_template = "admin/change_list_filters_top.html" #part of huerta
filters_collapsable = True
filters_require_submit = True
disable_list_headers = True
list_striped = True
list_display = ('event_list_display',)
list_filter = (ReviewerOrganizationFilter,
('organization_campaign', CampaignFilter),
('organization_status_review', filter_with_emptyvalue('new')),
('organization_status_prep', filter_with_emptyvalue('unclaimed')),
('state', CollapsedListFilter),
('political_scope', PoliticalScopeFilter),
IsPrivateFilter,
EventMinDateFilter,EventMaxDateFilter,
('status', CollapsedListFilter),
HostStatusFilter,
EventAttendeeMaxFilter,
EventAttendeeCountFilter,
EventFullness,
SortingFilter,
textinputfilter_factory('title',
'title'),
textinputfilter_factory('host email',
'organization_host__email'),
textinputfilter_factory('host name',
'organization_host__name'),
textinputfilter_factory('city',
'city'),
textinputfilter_factory('zip',
'zip'),
textinputfilter_factory('event ID number (comma-separated)',
'organization_source_pk',
accept_multiple=True),
)
list_display_links = None
def get_actions(self, request):
actions = super().get_actions(request) or {}
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request, obj=None):
return False
def get_queryset(self, *args, **kw):
qs = super(EventAdmin, self).get_queryset(*args, **kw)
qs = qs.select_related('organization_host', 'organization_source', 'organization')
return qs
## BEGIN EventAdmin Message Sending
send_a_message_placeholder = 'Optional message to host. Email will include a link to manage the event.'
def obj_person_noun(self):
return 'host(s)'
def message_template(self, message, event, user=None):
"""
NOTE: This takes a while to render, almost entirely because
get_host_event_link needs to get a login token by AK API, which takes some time.
This will SLOW mass message delivery
"""
src = event.organization_source
host_link = src.api.get_host_event_link(event, edit_access=True,
host_id=event.organization_host.member_system_pk,
confirm=True)
email_subject = 'Regarding your event'#'Regarding your event with %s' % event.organization.title
message = render_to_string(
'event_review/message_to_host_email.html',
{'host_name': event.organization_host.name,
'event': event,
'source': src.name,
'link': host_link,
'message': message,
'footer': getattr(settings, 'EVENT_REVIEW_MESSAGE_FOOTER',
"\nThanks for all you do.")})
to_list = [event.organization_host.email]
if hasattr(src.api, 'get_additional_hosts'):
additional_hosts = src.api.get_additional_hosts(event)
for ahost in additional_hosts:
if ahost.get('email'):
to_list.append(ahost['email'])
return {
'to': to_list,
'subject': email_subject,
'message_text': message,
'from_line': settings.FROM_EMAIL,
}
def message_obj_lookup(self, event_id, organization, request):
event = Event.objects.filter(id=event_id).select_related('organization_host', 'organization').first()
if event and event.organization_host_id and event.organization_host.email:
src = event.organization_source
if src and hasattr(src.api, 'get_host_event_link'):
return event
def obj2subjectid(self, event):
return event.organization_host.member_system_pk
## END EventAdmin Message Sending
| {
"content_hash": "2fcc1637b03a3090df274bc769162505",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 124,
"avg_line_length": 42.32142857142857,
"alnum_prop": 0.5483356774496015,
"repo_name": "MoveOnOrg/eventroller",
"id": "fdc5ecf0ce0df278b42007ec6d83f55b3ad63284",
"size": "10665",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "event_review/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "609"
},
{
"name": "HTML",
"bytes": "7391"
},
{
"name": "JavaScript",
"bytes": "24435"
},
{
"name": "Python",
"bytes": "182608"
}
],
"symlink_target": ""
} |
namespace touchstone {
using Clock = std::conditional<std::chrono::high_resolution_clock::is_steady,
std::chrono::high_resolution_clock,
std::chrono::steady_clock>::type;
using ElapseRecorder = double (*)(Clock::time_point, Clock::time_point);
template <class Duration>
ElapseRecorder make_elapse_recorder() {
return [](Clock::time_point a, Clock::time_point b) -> double {
auto duration = b - a;
return std::chrono::duration_cast<Duration>(duration).count();
};
}
class BenchmarkSet {
static const int kNullBenchmarkId = -1;
public:
using Runner = std::function<void(BenchmarkSet&)>;
BenchmarkSet(std::string&& name, Runner&& runner)
: _name(std::move(name)),
_runner(std::move(runner)),
_elapse_recorder(make_elapse_recorder<std::chrono::nanoseconds>()) {}
int add(const Benchmark& benchmark) {
int id = _benchmarks.size();
auto was_inserted = _benchmarks.insert(benchmark);
if (!was_inserted.second) {
std::cerr << "bechmark " << benchmark.name()
<< " is not unique for benchmark " << _name << std::endl;
std::abort();
}
was_inserted.first->set_id(id);
return id;
}
void run() {
assert(_runner && "runner is not set");
assert(_num_trials > 0 && "num trials is not set");
assert(_num_epochs > 0 && "num epochs is not set");
// run through the benchmark set a single time
// to invoke callbacks that add each individual
// benchmark
State::get_n() =
_enumeration_range->begin(); // make sure we use a valid value of n
_runner(*this);
// repeatedly invoke the runner function until each benchmark
// has been run range_size x num_epochs x num_trials times.
// The global random number generator should be identically
// seeded for every (range_value, trial_id) pair.
std::random_device random_device;
for (_epoch_id = 0; _epoch_id < _num_epochs; ++_epoch_id) {
_n = _enumeration_range->begin();
int last = _enumeration_range->end();
for (; _n < last; _n = _enumeration_range->advance(_n)) {
State::get_n() = _n;
auto random_seed = random_device();
for (auto& benchmark : _benchmarks) {
_current_benchmark = &benchmark;
for (int trial_id = 0; trial_id < _num_trials; ++trial_id) {
State::get_random_number_generator().seed(random_seed);
_runner(*this);
}
}
}
}
_current_benchmark = nullptr;
}
void record_elapse(double elapse) {
_current_benchmark->add_result(_n, _epoch_id, elapse);
}
void record_elapse(Clock::time_point a, Clock::time_point b) {
_current_benchmark->add_result(_n, _epoch_id, _elapse_recorder(a, b));
}
void set_enumeration_range(
std::shared_ptr<const EnumerationRange>&& enumeration_range) {
_enumeration_range = std::move(enumeration_range);
}
void set_elapse_recorder(ElapseRecorder elapse_recorder) {
_elapse_recorder = elapse_recorder;
}
void print_results() const {
std::ofstream out(_name + ".tsv");
print_header(out);
int last = _enumeration_range->end();
for (int n = _enumeration_range->begin(); n < last;
n = _enumeration_range->advance(n)) {
print_row(n, out);
}
}
const std::string& name() const { return _name; }
void set_num_epochs(int num_epochs) { _num_epochs = num_epochs; }
void set_num_trials(int num_trials) { _num_trials = num_trials; }
int num_epochs() const { return _num_epochs; }
int num_trials() const { return _num_trials; }
int current_benchmark_id() const {
if (_current_benchmark)
return _current_benchmark->id();
else
return kNullBenchmarkId;
}
private:
void print_header(std::ostream& out) const {
out << "n";
for (const auto& benchmark : _benchmarks) {
const auto& name = benchmark.name();
out << "\t" << name << "_avg"
<< "\t" << name << "_worst";
}
out << "\n";
}
void print_row(int n, std::ostream& out) const {
out << n;
for (const auto& benchmark : _benchmarks) {
print_benchmark_results(n, benchmark, out);
}
out << "\n";
}
void print_benchmark_results(int n, const Benchmark& benchmark,
std::ostream& out) const {
const auto& epoch_trials = benchmark.get_epochs(n);
std::vector<double> epochs;
epochs.reserve(_num_epochs);
// estimate the elapse time for each (range_value, epoch_id) pair. This
// follows the practice of the facebooks folly library and uses the
// minimum of the collection as the estimator.
for (const auto& trials : epoch_trials) {
assert(!trials.empty() && "the trial cannot be empty");
double min = *std::min_element(trials.begin(), trials.end());
epochs.push_back(min);
}
double avg =
std::accumulate(epochs.begin(), epochs.end(), 0) / epochs.size();
double worst;
if (epochs.size() > 1)
worst = *std::max_element(epochs.begin(), epochs.end());
else
worst = avg;
out << "\t" << avg << "\t" << worst;
}
std::string _name;
Runner _runner;
int _num_epochs = 1;
int _num_trials = 1;
std::shared_ptr<const EnumerationRange> _enumeration_range =
std::make_shared<EnumerationRange>();
boost::container::flat_set<Benchmark> _benchmarks;
int _n;
int _epoch_id;
Benchmark* _current_benchmark = nullptr;
ElapseRecorder _elapse_recorder;
};
inline bool operator==(const BenchmarkSet& lhs, const BenchmarkSet& rhs) {
return lhs.name() == rhs.name();
}
inline bool operator<(const BenchmarkSet& lhs, const BenchmarkSet& rhs) {
return lhs.name() < rhs.name();
}
} // end namespace
| {
"content_hash": "b3bbef0d8cf6eed26f2a85e9f434c43d",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 77,
"avg_line_length": 31.642857142857142,
"alnum_prop": 0.6119117902413613,
"repo_name": "rnburn/touchstone",
"id": "08ed09a73b11a94542619675efa15b2dc0c1ccad",
"size": "6095",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmark_set.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "18192"
},
{
"name": "CMake",
"bytes": "344"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>W29551_text</title>
<link rel="stylesheet" type="text/css" href="style.css" />
</head>
<body>
<div style="margin-left: auto; margin-right: auto; width: 800px; overflow: hidden;">
<div style="float: left;">
<a href="page24.html">«</a>
</div>
<div style="float: right;">
</div>
</div>
<hr/>
<div style="position: absolute; margin-left: 495px; margin-top: 302px;">
<p class="styleSans24.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"></p>
</div>
<div style="position: absolute; margin-left: 275px; margin-top: 440px;">
<p class="styleSans22.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">L, CQnfilfifintal </p>
</div>
<div style="position: absolute; margin-left: 522px; margin-top: 715px;">
<p class="styleSans17.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">Continental Resources <br/>Dunn County, North Dakota (NAD 83) Bonneville 4-23H1 <br/>Wellbore #1 </p>
</div>
<div style="position: absolute; margin-left: 522px; margin-top: 1319px;">
<p class="styleSans12.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">Plan: Design #2 </p>
</div>
<div style="position: absolute; margin-left: 522px; margin-top: 1567px;">
<p class="styleSans16.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">Standard <br/>26 August, 2014 </p>
</div>
<div style="position: absolute; margin-left: 981px; margin-top: 1567px;">
<p class="styleSans17.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">Planning </p>
</div>
<div style="position: absolute; margin-left: 1413px; margin-top: 1567px;">
<p class="styleSans16.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">Report </p>
</div>
<div style="position: absolute; margin-left: 1045px; margin-top: 2722px;">
<p class="styleSans24.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">#5 Energy Service-$9 <br/>WWW.MSENERGYSERVICES.COM </p>
</div>
</body>
</html>
| {
"content_hash": "fb5f49eb7da8b6f9f1eb84270652cc27",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 223,
"avg_line_length": 48.07843137254902,
"alnum_prop": 0.6757748776508973,
"repo_name": "datamade/elpc_bakken",
"id": "2c6dc574b337f0690ab641421e1712d6966df3f6",
"size": "2459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ocr_extracted/W29551_text/page25.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17512999"
},
{
"name": "HTML",
"bytes": "421900941"
},
{
"name": "Makefile",
"bytes": "991"
},
{
"name": "Python",
"bytes": "7186"
}
],
"symlink_target": ""
} |
raise ImportError("Invalid bundle")
| {
"content_hash": "90b81787e90a8ad013a195fd12579bf7",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 35,
"avg_line_length": 36,
"alnum_prop": 0.8055555555555556,
"repo_name": "tcalmant/ipopo",
"id": "cbc1e1cbf2ad06ff06fd4a2d6079237557e89e17",
"size": "87",
"binary": false,
"copies": "3",
"ref": "refs/heads/v1",
"path": "tests/framework/vault/pkg_fail/pkg2/invalid.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2183067"
}
],
"symlink_target": ""
} |
package com.one.action_dept;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.one.dao.DeptDao;
import com.one.vo.Dept;
@WebServlet("/GetAllDeptAction")
public class GetAllDeptAction extends HttpServlet {
private static final long serialVersionUID = 1L;
private DeptDao deptDao = new DeptDao();
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ArrayList<Dept> list = new ArrayList<>();
list = deptDao.getAllDept();
request.setAttribute("deptList", list);
request.getRequestDispatcher("departments.jsp").forward(request, response);
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request, response);
}
}
| {
"content_hash": "fcd8d351e3f687e1e72db96958c52536",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 119,
"avg_line_length": 32.45161290322581,
"alnum_prop": 0.8031809145129225,
"repo_name": "qql7267/ICSSshixi",
"id": "87b1e0798a1d991d7178f691c87d95cbcc72bcce",
"size": "1006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CoolMeeting/src/com/one/action_dept/GetAllDeptAction.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6728"
},
{
"name": "HTML",
"bytes": "306"
},
{
"name": "Java",
"bytes": "304763"
}
],
"symlink_target": ""
} |
from pylib.base import base_test_result
class InstrumentationTestResult(base_test_result.BaseTestResult):
"""Result information for a single instrumentation test."""
def __init__(self, full_name, test_type, dur, log=''):
"""Construct an InstrumentationTestResult object.
Args:
full_name: Full name of the test.
test_type: Type of the test result as defined in ResultType.
dur: Duration of the test run in milliseconds.
log: A string listing any errors.
"""
super().__init__(full_name, test_type, dur, log)
name_pieces = full_name.rsplit('#')
if len(name_pieces) > 1:
self._test_name = name_pieces[1]
self._class_name = name_pieces[0]
else:
self._class_name = full_name
self._test_name = full_name
def SetDuration(self, duration):
"""Set the test duration."""
self._duration = duration
| {
"content_hash": "4c92c51f80f0b2b10501e996d1b6e912",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 66,
"avg_line_length": 32.592592592592595,
"alnum_prop": 0.6579545454545455,
"repo_name": "chromium/chromium",
"id": "e7893bfdf7649ba1e68dedcb3f56c37651c1dad7",
"size": "1022",
"binary": false,
"copies": "11",
"ref": "refs/heads/main",
"path": "build/android/pylib/instrumentation/test_result.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
FROM eu.gcr.io/kyma-project/external/golang:1.19.3-alpine3.16 as builder
ARG DOCK_PKG_DIR=/compass-runtime-agent
WORKDIR $DOCK_PKG_DIR
COPY . $DOCK_PKG_DIR
RUN echo "nobody:x:65534:5534:nobody:/:" > /etc_passwd
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o compass-runtime-agent ./cmd
FROM eu.gcr.io/kyma-project/external/alpine:3.16.2 as certs
RUN apk add -U --no-cache ca-certificates
FROM scratch
LABEL source=git@github.com:kyma-project/kyma.git
WORKDIR /app
COPY --from=certs /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=builder $DOCK_PKG_DIR/compass-runtime-agent .
COPY --from=builder /etc_passwd /etc/passwd
USER nobody
CMD ["/app/compass-runtime-agent"]
| {
"content_hash": "ebc6454dcc4d0526056669564968ff87",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 90,
"avg_line_length": 26.25925925925926,
"alnum_prop": 0.7517630465444288,
"repo_name": "kyma-project/kyma",
"id": "a8298c8d07088da37098750d23074a13f10aaff7",
"size": "709",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "components/compass-runtime-agent/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "26889"
},
{
"name": "Gherkin",
"bytes": "20860"
},
{
"name": "Go",
"bytes": "3700670"
},
{
"name": "JavaScript",
"bytes": "277880"
},
{
"name": "Makefile",
"bytes": "51042"
},
{
"name": "Mustache",
"bytes": "84812"
},
{
"name": "Python",
"bytes": "12593"
},
{
"name": "Shell",
"bytes": "46102"
},
{
"name": "Smarty",
"bytes": "48181"
}
],
"symlink_target": ""
} |
using System.Collections.Generic;
using System.Linq;
using JetBrains.TestFramework.Utils;
using JetBrains.Util;
using NUnit.Framework;
namespace XunitContrib.Runner.ReSharper.Tests.AcceptanceTests.Source
{
[Category("Source discovery")]
public abstract class XunitSourceTest : XunitSourceTestBase
{
protected override string RelativeTestDataPath
{
get { return @"Exploration\" + RelativeTestDataPathSuffix; }
}
protected abstract string RelativeTestDataPathSuffix { get; }
[TestCaseSource("GetAllCSharpFilesInDirectory")]
public void TestFile(string filename)
{
DoTestSolution(GetTestDataFilePath(filename));
}
// ReSharper disable once MemberCanBePrivate.Global
public IEnumerable<string> GetAllCSharpFilesInDirectory()
{
InferProductHomeDir();
return TestDataPath2.GetChildFiles("*.cs").Select(path => path.Name);
}
private void InferProductHomeDir()
{
// TestCases is called before the environment fixture is run, which would either
// ensure Product.Root exists, or infer %JetProductHomeDir%. By using SoutionItemsBasePath
// in TestCases, we fallback to the non-extension friendly implementation that expects
// the source to be laid out as if we're building the product, not an extension, and it
// requires Product.Root. We'll infer it instead.
TestUtil.SetHomeDir(GetType().Assembly);
}
}
} | {
"content_hash": "26c2e2cd14fb7b75cbcc6ec5e7b5d2be",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 103,
"avg_line_length": 37.23809523809524,
"alnum_prop": 0.670076726342711,
"repo_name": "xunit/resharper-xunit",
"id": "c0fe34f005655e04514243129214db63ee995076",
"size": "1564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resharper/test/src/tests/AcceptanceTests/Source/XunitSourceTest.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6639"
},
{
"name": "C#",
"bytes": "588173"
},
{
"name": "HTML",
"bytes": "74054"
},
{
"name": "Visual Basic",
"bytes": "28289"
},
{
"name": "XSLT",
"bytes": "81460"
}
],
"symlink_target": ""
} |
FROM balenalib/var-som-mx6-debian:bullseye-run
ENV GO_VERSION 1.17.7
# gcc for cgo
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
gcc \
libc6-dev \
make \
pkg-config \
git \
&& rm -rf /var/lib/apt/lists/*
RUN set -x \
&& fetchDeps=' \
curl \
' \
&& apt-get update && apt-get install -y $fetchDeps --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& mkdir -p /usr/local/go \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/golang/v$GO_VERSION/go$GO_VERSION.linux-armv7hf.tar.gz" \
&& echo "e4f33e7e78f96024d30ff6bf8d2b86329fc04df1b411a8bd30a82dbe60f408ba go$GO_VERSION.linux-armv7hf.tar.gz" | sha256sum -c - \
&& tar -xzf "go$GO_VERSION.linux-armv7hf.tar.gz" -C /usr/local/go --strip-components=1 \
&& rm -f go$GO_VERSION.linux-armv7hf.tar.gz
ENV GOROOT /usr/local/go
ENV GOPATH /go
ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH
RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH"
WORKDIR $GOPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/613d8e9ca8540f29a43fddf658db56a8d826fffe/scripts/assets/tests/test-stack@golang.sh" \
&& echo "Running test-stack@golang" \
&& chmod +x test-stack@golang.sh \
&& bash test-stack@golang.sh \
&& rm -rf test-stack@golang.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Bullseye \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nGo v1.17.7 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "7f5f8501e1b52f16298d4615521c270e",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 674,
"avg_line_length": 50.82608695652174,
"alnum_prop": 0.7065868263473054,
"repo_name": "resin-io-library/base-images",
"id": "c733d01bd4962614cee4ba82c4155a034a62337b",
"size": "2359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/golang/var-som-mx6/debian/bullseye/1.17.7/run/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "71234697"
},
{
"name": "JavaScript",
"bytes": "13096"
},
{
"name": "Shell",
"bytes": "12051936"
},
{
"name": "Smarty",
"bytes": "59789"
}
],
"symlink_target": ""
} |
package com.github.cqljmeter.sampler;
public class CqlSamplerBeanInfo extends AbstractCqlSamplerBeanInfo {
public CqlSamplerBeanInfo() {
super(CqlSampler.class);
}
}
| {
"content_hash": "dea18aa94f92dbc63abd0c1eb045c1c1",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 68,
"avg_line_length": 13.615384615384615,
"alnum_prop": 0.7796610169491526,
"repo_name": "Mishail/CqlJmeter",
"id": "fcf5d1534c4fab4c8cc56255219869e612b91ada",
"size": "1334",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/github/cqljmeter/sampler/CqlSamplerBeanInfo.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "18283"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?><!--
Copyright (c) 2014 Michal Dabski
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-->
<resources>
<string name="app_name">File Manager Pro</string>
<string name="title_activity_about">Informazioni</string>
<string name="cannot_read_directory">Impossibile leggere la directory %s</string>
<string name="open_file_with_">Apri %s con:</string>
<string name="size_s">Dimensione: %s</string>
<string name="folder">%d file</string>
<string name="open_drawer">Apri drawer</string>
<string name="close_drawer">Chiudi drawer</string>
<string name="remove_favourite">Rimuovi preferito</string>
<string name="folder_empty">Cartella vuota</string>
<string name="delete">Elimina</string>
<string name="multi_objects">%d file</string>
<string name="delete_d_items_">Eliminare %d file?</string>
<string name="select_all">Seleziona tutti</string>
<string name="rename">Rinomina</string>
<string name="share">Condividi</string>
<string name="multi_files_deleted">%d file eliminati</string>
<string name="add_to_homescreen">Crea collegamento</string>
<string name="add_to_homescreen_multiple">Crea collegamenti</string>
<string name="shortcut_created">Collegamento aggiunto alla homescreen</string>
<string name="root">Root</string>
<string name="cut">Taglia</string>
<string name="paste_here">Incolla qui</string>
<string name="objects_cut_to_clipboard">File copiato negli appunti</string>
<string name="files_pasted">File incollato</string>
<string name="clipboard_is_empty">Gli appunti sono vuoti</string>
<string name="refresh">Ricarica</string>
<string name="copy">Copia</string>
<string name="objects_copied_to_clipboard">File copiati negli appunti</string>
<string name="create_folder">Crea cartella</string>
<string name="create">Crea</string>
<string name="folder_created_successfully">Cartella creata con successo</string>
<string name="loading_">Caricamento…</string>
<string name="folder_created_failed">La cartella non è stata creata</string>
<string name="pasting_files_">Incollando i file…</string>
<string name="downloads">Download</string>
<string name="music">Musica</string>
<string name="pictures">Immagini</string>
<string name="movies">Video</string>
<string name="dcim">DCIM</string>
<string name="credits">App sviluppata da Michal Dabski www.michaldabski.com
\n
\n
Icone in-app da www.iconsdb.com</string>
<string name="app_icon">App icon</string>
<string name="up">Su</string>
<string name="error">Errore</string>
<string name="rename_file">Rinomina File</string>
<string name="rename_folder">Rinomina Cartella</string>
<string name="file_renamed">File rinominato</string>
<string name="file_could_not_be_renamed_to_s">Il File non può essere rinominato in %s</string>
<string name="photos">Foto</string>
<string name="add_favourite">Aggiungi ai preferiti</string>
<string name="other_apps">Altre app</string>
<string name="feedback">Feedback</string>
<string name="file_info">Informazioni sul File</string>
<string name="mime_type_s">MIME Type: %s</string>
</resources>
| {
"content_hash": "51277512a6a17a3c35e1c206492425ee",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 98,
"avg_line_length": 52.851851851851855,
"alnum_prop": 0.7154870357393133,
"repo_name": "AiJiaZone/filemanager",
"id": "cd46bf70d9bbcb5029bc745440f1df9ea171753f",
"size": "4287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "res/values-it/strings.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "197409"
}
],
"symlink_target": ""
} |
class CreateListings < ActiveRecord::Migration
def change
create_table :listings do |t|
t.string :type_of
t.integer :price
t.integer :bed_count
t.integer :bath_count
t.string :neighborhood
t.boolean :featured
t.string :address
t.text :description
t.boolean :exclusive
t.boolean :no_fee
t.string :main_photo_url
t.timestamps null: false
end
end
end
| {
"content_hash": "0fe75194cf838b4d36ce0153722e5175",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 46,
"avg_line_length": 22.789473684210527,
"alnum_prop": 0.6304849884526559,
"repo_name": "jennyheath/lions-share",
"id": "55e500c23e4ead353a6f0bf06e6dc1f41be7d35b",
"size": "433",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "db/migrate/20150717021408_create_listings.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "28340"
},
{
"name": "HTML",
"bytes": "115005"
},
{
"name": "JavaScript",
"bytes": "2983377"
},
{
"name": "Ruby",
"bytes": "60195"
}
],
"symlink_target": ""
} |
<?php
namespace NS\SentinelBundle\Entity;
use DateTime;
use Doctrine\ORM\Mapping as ORM;
use InvalidArgumentException;
use JMS\Serializer\Annotation as Serializer;
use NS\SentinelBundle\Entity\ValueObjects\YearMonth;
use NS\SentinelBundle\Form\Types\CaseStatus;
use NS\SentinelBundle\Form\Types\Gender;
use NS\SentinelBundle\Form\Types\TripleChoice;
use NS\SentinelBundle\Validators as LocalAssert;
use NS\UtilBundle\Validator\Constraints\ArrayChoiceConstraint;
use Symfony\Bridge\Doctrine\Validator\Constraints\UniqueEntity;
use Symfony\Component\Validator\Constraints as Assert;
/**
* @ORM\MappedSuperclass
* @SuppressWarnings(PHPMD.ShortVariable)
* @UniqueEntity(fields={"site","case_id"}, message="The case id already exists for this site!")
*
* @LocalAssert\GreaterThanDate(atPath="adm_date",lessThanField="birthdate",greaterThanField="admDate",message="The date of birth is past the date of admission")
* @LocalAssert\BirthdayOrAge()
*/
abstract class BaseCase
{
public const
AGE_DISTRIBUTION_UNKNOWN = -1,
AGE_DISTRIBUTION_00_TO_05 = 1,
AGE_DISTRIBUTION_06_TO_11 = 2,
AGE_DISTRIBUTION_12_TO_23 = 3,
AGE_DISTRIBUTION_24_TO_59 = 4;
/**
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="\NS\SentinelBundle\Entity\Generator\BaseCaseGenerator")
* @var string $id
* @ORM\Column(name="id",type="string")
* @Serializer\Groups({"api","export"})
*/
protected $id;
/**
* @var string|null
* @ORM\Column(name="lastName",type="string",nullable=true)
* @Serializer\Groups({"api","export"})
* @Assert\NotBlank(groups={"AMR"})
*/
protected $lastName;
/**
* @var string|null
* @ORM\Column(name="parentalName",type="string",nullable=true)
* @Serializer\Groups({"api","export"})
*/
protected $parentalName;
/**
* @var string|null
* @ORM\Column(name="firstName",type="string",nullable=true)
* @Serializer\Groups({"api","export"})
* @Assert\NotBlank(groups={"AMR"})
*/
protected $firstName;
/**
* case_ID
* @var string
* @ORM\Column(name="case_id",type="string",nullable=false)
* @Assert\NotBlank(groups={"Default","AMR","Completeness"})
* @Serializer\Groups({"api","export"})
*/
protected $case_id;
/**
* @var string|null
* @ORM\Column(name="district",type="string",nullable=true)
* @Serializer\Groups({"api","export"})
* @Assert\NotBlank(groups={"Completeness"})
*/
protected $district;
/**
* @var string|null
* @ORM\Column(name="state",type="string",nullable=true)
*/
protected $state;
/**
* @var DateTime|null
* @ORM\Column(name="birthdate",type="date",nullable=true)
* @Assert\Date
* @Assert\NotBlank(groups={"Completeness"})
* @LocalAssert\NoFutureDate()
* @Serializer\Groups({"api","export"})
* @Serializer\Type(name="DateTime<'Y-m-d'>")
*/
protected $birthdate;
/**
* @var TripleChoice|null
* @ORM\Column(name="dobKnown",type="TripleChoice",nullable=true)
* @Serializer\Groups("export")
* @Serializer\SerializedName("dobKnown")
* @ArrayChoiceConstraint(groups={"Completeness"})
*/
protected $dobKnown;
/** @var YearMonth */
protected $dobYearMonths;
/**
* @var int|null
* @ORM\Column(name="age_months",type="integer",nullable=true)
* @Serializer\Groups({"api","export"})
* @Assert\NotBlank(groups={"Completeness"})
*/
protected $age_months;
/**
* @var int|null
* @ORM\Column(name="ageDistribution",type="integer",nullable=true)
*/
protected $ageDistribution;
/**
* @var Gender|null
* @ORM\Column(name="gender",type="Gender",nullable=true)
* @Serializer\Groups({"api","export"})
* @ArrayChoiceConstraint(groups={"AMR","Completeness"})
*/
protected $gender;
/**
* @var DateTime|null
* @ORM\Column(name="adm_date",type="date",nullable=true)
* @Serializer\Groups({"api","export"})
* @Serializer\Type(name="DateTime<'Y-m-d'>")
* @Assert\NotBlank(groups={"AMR","Completeness"})
* @Assert\Date()
* @LocalAssert\NoFutureDate()
*/
protected $adm_date;
/**
* @var CaseStatus
* @ORM\Column(name="status",type="CaseStatus")
* @Serializer\Groups({"api","export"})
*/
protected $status;
/**
* @var DateTime $updatedAt
* @ORM\Column(name="updatedAt",type="datetime")
* @Serializer\Groups({"api"})
* @Serializer\Type(name="DateTime<'Y-m-d H:i:s'>")
*/
protected $updatedAt;
/**
* @var DateTime
* @ORM\Column(name="createdAt",type="datetime")
* @Serializer\Groups({"api"})
* @Serializer\Type(name="DateTime<'Y-m-d H:i:s'>")
*/
protected $createdAt;
/**
* @var Region
* @ORM\ManyToOne(targetEntity="\NS\SentinelBundle\Entity\Region")
* @ORM\JoinColumn(nullable=false,referencedColumnName="code")
* @Serializer\Groups({"api","export"})
*/
protected $region;
/**
* @var Country
* @ORM\ManyToOne(targetEntity="\NS\SentinelBundle\Entity\Country")
* @ORM\JoinColumn(nullable=false,referencedColumnName="code")
* @Serializer\Groups({"api","export"})
*/
protected $country;
/**
* @var Site
* @ORM\ManyToOne(targetEntity="\NS\SentinelBundle\Entity\Site")
* @ORM\JoinColumn(nullable=true,referencedColumnName="code")
* @Serializer\Groups({"api","export"})
*/
protected $site;
protected $siteLab;
/**
* @var boolean
* @ORM\Column(name="hasWarning",type="boolean")
*/
protected $warning = false;
// TODO evaluate if these could just be nullable??
/** @var int|BaseExternalLab $referenceLab */
protected $referenceLab = -1;
/** @var int|BaseExternalLab $referenceLab */
protected $nationalLab = -1;
/**
* @Serializer\Exclude()
*/
protected $siteLabClass = null;
/**
* @Serializer\Exclude()
*/
protected $referenceClass = null;
/**
* @Serializer\Exclude()
*/
protected $nationalClass = null;
public function __construct()
{
if (!is_string($this->nationalClass) || empty($this->nationalClass)) {
throw new InvalidArgumentException('The NationalLab class is not set');
}
if (!is_string($this->referenceClass) || empty($this->referenceClass)) {
throw new InvalidArgumentException('The ReferenceLab class is not set');
}
if (!is_string($this->siteLabClass) || empty($this->siteLabClass)) {
throw new InvalidArgumentException('The SiteLab class is not set');
}
$this->status = new CaseStatus(CaseStatus::OPEN);
$this->createdAt = new DateTime();
$this->updatedAt = new DateTime();
}
public function __clone()
{
$this->setId(null);
if (is_object($this->siteLab)) {
$this->setSiteLab(clone $this->siteLab);
}
if (is_object($this->referenceLab)) {
$this->setReferenceLab(clone $this->referenceLab);
}
if (is_object($this->nationalLab)) {
$this->setNationalLab(clone $this->nationalLab);
}
}
public function __toString()
{
return $this->id ?? '';
}
public function getId(): ?string
{
return $this->id;
}
/**
* @param string $id
*/
public function setId($id): void
{
$this->id = $id;
}
public function hasId(): bool
{
return !empty($this->id);
}
public function setRegion(Region $region = null): void
{
$this->region = $region;
}
public function getRegion(): Region
{
return $this->region;
}
public function setCountry(Country $country = null): void
{
$this->country = $country;
$this->setRegion($country->getRegion());
}
public function getCountry(): ?Country
{
return $this->country;
}
public function setSite(Site $site = null): void
{
$this->site = $site;
$this->setCountry($site->getCountry());
}
public function getSite(): ?Site
{
return $this->site;
}
public function getStatus(): CaseStatus
{
return $this->status;
}
public function setStatus(CaseStatus $status): void
{
$this->status = $status;
}
public function setReferenceLab(BaseExternalLab $lab): void
{
$lab->setCaseFile($this);
$this->referenceLab = $lab;
}
public function setNationalLab(BaseExternalLab $lab): void
{
$lab->setCaseFile($this);
$this->nationalLab = $lab;
}
/**
* @return int|BaseExternalLab
*/
public function getReferenceLab()
{
return $this->referenceLab;
}
public function hasReferenceLab(): bool
{
return $this->referenceLab instanceof $this->referenceClass;
}
/**
* @return int|BaseExternalLab
*/
public function getNationalLab()
{
return $this->nationalLab;
}
public function hasNationalLab(): bool
{
return $this->nationalLab !== null;
}
public function getSentToReferenceLab(): bool
{
if ($this->siteLab && method_exists($this->siteLab,'getSentToReferenceLab') && $this->siteLab->getSentToReferenceLab()) {
return true;
}
return $this->nationalLab && $this->nationalLab->getSentToReferenceLab();
}
public function getSentToNationalLab(): bool
{
return $this->siteLab ? $this->siteLab->getSentToNationalLab() : false;
}
public function hasSiteLab(): bool
{
return $this->siteLab instanceof $this->siteLabClass;
}
public function getSiteLab(): ?BaseSiteLabInterface
{
return $this->siteLab;
}
public function setSiteLab(BaseSiteLabInterface $siteLab): void
{
$siteLab->setCaseFile($this);
$this->siteLab = $siteLab;
}
public function isComplete(): bool
{
return (int)$this->status->getValue() === CaseStatus::COMPLETE;
}
public function getUpdatedAt(): ?DateTime
{
return $this->updatedAt;
}
public function setUpdatedAt(DateTime $updatedAt): void
{
$this->updatedAt = $updatedAt;
}
public function getCreatedAt(): DateTime
{
return $this->createdAt;
}
public function getYear(): string
{
return $this->createdAt->format('Y');
}
/**
* @return DateTime
*/
public function getDob()
{
return $this->birthdate;
}
/**
* @return DateTime
*/
public function getBirthdate()
{
return $this->birthdate;
}
/**
* @return DateTime
*/
public function getAdmDate()
{
return $this->adm_date;
}
/**
* @return string
*/
public function getCaseId()
{
return $this->case_id;
}
/**
*
* @return integer
*/
public function getAge()
{
return $this->age_months;
}
/**
* @return integer
*/
public function getAgeMonths()
{
return $this->age_months;
}
/**
*
* @return Gender
*/
public function getGender()
{
return $this->gender;
}
/**
*
* @return TripleChoice
*/
public function getDobKnown()
{
return $this->dobKnown;
}
/**
*
* @param TripleChoice $dobKnown
*/
public function setDobKnown(TripleChoice $dobKnown)
{
$this->dobKnown = $dobKnown;
}
/**
* @return YearMonth
*/
public function getDobYearMonths()
{
if ($this->age_months > 0) {
$this->dobYearMonths = new YearMonth($this->age_months / 12, $this->age_months % 12);
}
return $this->dobYearMonths;
}
/**
* @param YearMonth $dobMonthYears
*/
public function setDobYearMonths(YearMonth $dobMonthYears=null): void
{
$this->dobYearMonths = $dobMonthYears;
if ($dobMonthYears) {
$this->age_months = $dobMonthYears->getMonths();
}
}
public function setBirthdate(?DateTime $birthdate): void
{
$this->birthdate = $birthdate;
}
public function setDob(DateTime $dob = null): void
{
$this->birthdate = $dob;
}
public function setAdmDate(?DateTime $admDate = null): void
{
$this->adm_date = $admDate;
}
public function setCaseId(?string $caseId): void
{
$this->case_id = $caseId;
}
public function setAge(?int $age): void
{
$this->age_months = $age;
}
public function setAgeMonths(?int $age_months): void
{
$this->age_months = $age_months;
}
public function setGender(?Gender $gender): void
{
$this->gender = $gender;
}
public function getLastName(): ?string
{
return $this->lastName;
}
public function getFirstName(): ?string
{
return $this->firstName;
}
public function getParentalName(): ?string
{
return $this->parentalName;
}
public function setParentalName(?string $parentalName): void
{
$this->parentalName = $parentalName;
}
public function setLastName(?string $lastName): void
{
$this->lastName = $lastName;
}
public function setFirstName(?string $firstName): void
{
$this->firstName = $firstName;
}
public function getAgeDistribution(): ?int
{
return $this->ageDistribution;
}
public function setAgeDistribution(?int $ageDistribution): void
{
$this->ageDistribution = $ageDistribution;
}
public function getDistrict(): ?string
{
return $this->district;
}
public function getState(): ?string
{
return $this->state;
}
public function setDistrict(?string $district): void
{
$this->district = $district;
}
public function setState(?string $state): void
{
$this->state = $state;
}
public function hasWarning(): bool
{
return $this->warning ?? false;
}
public function setWarning(bool $warning): void
{
$this->warning = $warning;
}
public function isUnlinked(): bool
{
return (strpos($this->id, '-XXX-') !== false);
}
}
| {
"content_hash": "4b5de697de4ad5f4599efde93b9eef14",
"timestamp": "",
"source": "github",
"line_count": 627,
"max_line_length": 161,
"avg_line_length": 23.317384370015947,
"alnum_prop": 0.583515731874145,
"repo_name": "IBVPD/Nuvi",
"id": "6f73e40ba67e1560b0da1cca4589655a59c13561",
"size": "14620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/NS/SentinelBundle/Entity/BaseCase.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3810"
},
{
"name": "HTML",
"bytes": "534456"
},
{
"name": "JavaScript",
"bytes": "8914"
},
{
"name": "PHP",
"bytes": "2146082"
},
{
"name": "PostScript",
"bytes": "402419"
},
{
"name": "Shell",
"bytes": "1503"
}
],
"symlink_target": ""
} |
const request = require('request');
const Feedback = require('../models/feedback');
const properties = require('../config/properties')
testAddFeedback(console.log);
testDeleteFeedback(console.log);
function testAddFeedback(callback) {
let passedMessage = "testAddFeedback passed!";
let failedMessage = "testAddFeedback failed!";
addFeedback((err, res, body) => {
if (err) return callback(failedMessage);
let feedbackId = res.body._id;
getAdminToken((token) => {
getFeedbacks(token, (err, res, body) => {
if (err) return callback(failedMessage);
for (let feedback of JSON.parse(res.body)) {
if (feedback._id == feedbackId) {
return removeFeedbackById(feedbackId, callback(passedMessage));
}
}
return callback(failedMessage);
});
})
});
}
function testDeleteFeedback(callback) {
let passedMessage = "testDeleteFeedback passed!";
let failedMessage = "testDeleteFeedback failed!";
addFeedback((err, res, body) => {
if (err) return callback(failedMessage);
let feedbackId = res.body._id;
removeFeedbackById(feedbackId, () => {
getAdminToken((token) => {
getFeedbacks(token, (err, res, body) => {
if (err) return callback(failedMessage);
for (let feedback of JSON.parse(res.body)) {
if (feedback._id == feedbackId) {
return callback(failedMessage);
}
}
return callback(passedMessage);
});
});
});
});
}
function addFeedback(callback) {
let settings = {
uri: 'http://localhost:3000/api/feedbacks',
method: 'POST',
json: {
"name": "test",
"email": "test@test.ee",
"message": "test"
}
};
request(settings, callback);
}
function getFeedbacks(token, callback) {
let settings = {
uri: 'http://localhost:3000/api/feedbacks',
method: 'GET',
headers: {
"x-access-token": token
}
};
request(settings, callback);
}
function getAdminToken(callback) {
let postSettings = {
uri: 'http://localhost:3000/api/users/authenticate',
method: 'POST',
json: {
"email": properties.admin.email,
"password": properties.admin.password
}
};
request(postSettings, (err, res, body) => {
callback(res.body.token);
});
}
function removeFeedbackById(feedbackId, callback) {
getAdminToken((token) => {
let postSettings = {
uri: 'http://localhost:3000/api/feedbacks/' + feedbackId,
method: 'DELETE',
headers: {
"x-access-token": token
}
};
request(postSettings, callback);
});
} | {
"content_hash": "0374544fe2a8d59e0981c8e1311ed73a",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 87,
"avg_line_length": 25.41322314049587,
"alnum_prop": 0.5216260162601626,
"repo_name": "karlveskus/moodlebox",
"id": "53c137ace7ec574c40ab6dd812d7300b25e3cb8b",
"size": "3075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/apiTest.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16358"
},
{
"name": "HTML",
"bytes": "16764"
},
{
"name": "JavaScript",
"bytes": "26402"
},
{
"name": "TypeScript",
"bytes": "53878"
}
],
"symlink_target": ""
} |
package ru.stqa.pft.addressbook.tests;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import java.util.Arrays;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by User on 08.05.2016.
*/
public class ContactEmailTests extends TestBase {
@BeforeMethod
public void ensurePreconditions() {
if (app.db().contacts().isEmpty()) {
app.goTo().homePage();
app.contact().create(new ContactData().withFirstName("Elena").withLastName("Nevzorova")
.withEmail("elena@gmail.com").withEmail2("elena2@gmail.com").withEmail3("elena3@gmail.com")
.withHomePhone("(123)").withMobilePhone("22-22").withWorkPhone("33 33").withGroup("test1")
.withAddress("SUITE 5A-1204 799 E DRAGRAM TUCSON AZ 85705 USA"));
}
}
@Test
public void testContactEmails() {
app.goTo().homePage();
ContactData contact = app.contact().all().iterator().next();
ContactData contactInfoFromEditForm = app.contact().infoFromEditForm(contact);
assertThat(contact.getAllEmails(), equalTo(mergeEmails(contactInfoFromEditForm)));
}
private String mergeEmails(ContactData contact) {
return Arrays.asList(contact.getEmail(), contact.getEmail2(), contact.getEmail3())
.stream().filter((s) -> ! s.equals(""))
.collect(Collectors.joining("\n"));
}
}
| {
"content_hash": "8f5ddbc1dc0212cf9ea31e1770c47112",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 111,
"avg_line_length": 37.785714285714285,
"alnum_prop": 0.665406427221172,
"repo_name": "HelenNevzorova/java_pft",
"id": "5f31147bda1711bf08fe7efb6e4546a0a34ea737",
"size": "1587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/ContactEmailTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "128930"
},
{
"name": "PHP",
"bytes": "242"
}
],
"symlink_target": ""
} |
package com.mooregreatsoftware.gradle.util;
import lombok.val;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayDeque;
import java.util.Optional;
import java.util.function.Function;
@SuppressWarnings("Convert2MethodRef")
public final class FileUtils {
private FileUtils() {
}
/**
* Search the path tree breadth-first, returning the first path that meets "matcher."
*
* @param sourcePath where to start the search
* @param matcher the predicate to match
* @return empty() if nothing matches
*/
public static Optional<Path> findBreadthFirst(Path sourcePath, Function<Path, Boolean> matcher) throws IOException {
if (Files.notExists(sourcePath)) return Optional.empty();
val dirQueue = new ArrayDeque<Path>();
dirQueue.add(sourcePath);
while (!dirQueue.isEmpty()) {
try (val pathStream = Files.list(dirQueue.removeFirst())) {
val foundPath = pathStream.
peek(path -> {
if (Files.isDirectory(path)) dirQueue.add(path);
}).
filter(path -> matcher.apply(path)).findAny();
if (foundPath.isPresent()) return foundPath;
}
}
return Optional.empty();
}
}
| {
"content_hash": "436ddbdb4e545e463ad1f2679badfdaa",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 120,
"avg_line_length": 28.76595744680851,
"alnum_prop": 0.6198224852071006,
"repo_name": "jdigger/gradle-defaults",
"id": "a8385f58e79c96b18b9fb89c36d6066af35c7a5e",
"size": "1972",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/kotlin/com/mooregreatsoftware/gradle/util/FileUtils.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "86017"
},
{
"name": "Java",
"bytes": "109023"
},
{
"name": "Kotlin",
"bytes": "42224"
}
],
"symlink_target": ""
} |
package com.renren.mobile.chat.base.views;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.TranslateAnimation;
import android.widget.Button;
/**
* @author dingwei.chen
* @说明 可移动button
* */
public class MoveButton extends Button{
static AnimationSet sLeftDissmissAnim = null;
static AnimationSet sRightDissmissAnim = null;
static AnimationSet sLeftShowAnim = null;
static AnimationSet sRightShowAnim = null;
static{
sLeftDissmissAnim = new AnimationSet(false);
Animation left_tran = new TranslateAnimation(0f, -50f, 0, 0);
sLeftDissmissAnim.addAnimation(left_tran);
Animation alpha_to_dissmiss = new AlphaAnimation(1f,0);
Animation alpha_to_show = new AlphaAnimation(0f,1);
sLeftDissmissAnim.addAnimation(alpha_to_dissmiss);
sLeftDissmissAnim.setDuration(200);
sLeftShowAnim = new AnimationSet(false);
left_tran = new TranslateAnimation(50f, 0f, 0, 0);
sLeftShowAnim.addAnimation(left_tran);
sLeftShowAnim.addAnimation(alpha_to_show);
sLeftShowAnim.setDuration(200);
sRightShowAnim = new AnimationSet(false);
Animation right_tran = new TranslateAnimation(-50f, 0f, 0, 0);
sRightShowAnim.addAnimation(right_tran);
sRightShowAnim.addAnimation(alpha_to_show);
sRightShowAnim.setDuration(200);
sRightDissmissAnim = new AnimationSet(false);
right_tran = new TranslateAnimation(0f, 50f, 0, 0);
sRightDissmissAnim.addAnimation(right_tran);
sRightDissmissAnim.addAnimation(alpha_to_dissmiss);
}
public MoveButton(Context context, AttributeSet attrs) {
super(context, attrs);
}
public enum DIRECTION{
LEFT,
RIGHT
}
/**
* @param dir 代表的是控件移动朝向
* */
public void moveToDissmiss(DIRECTION dir){
switch(dir){
case LEFT:
this.startAnimation(sLeftDissmissAnim);
;break;
case RIGHT:
this.startAnimation(sRightDissmissAnim);
;break;
}
this.setVisibility(View.GONE);
}
/**
* @param dir 代表的是控件移动朝向
* */
public void moveToShow(DIRECTION dir){
switch(dir){
case LEFT:
this.startAnimation(sLeftShowAnim);
;break;
case RIGHT:
this.startAnimation(sRightShowAnim);
;break;
}
this.setVisibility(View.VISIBLE);
}
}
| {
"content_hash": "086f3f18c45f3136bfa8abf2340d0afa",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 64,
"avg_line_length": 26.933333333333334,
"alnum_prop": 0.7215346534653465,
"repo_name": "MichaelSun/SiXin",
"id": "48a51bcf0f56406ac8e6f7c7602e465ae7b26dca",
"size": "2474",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Sixin_Newest/src/base/com/renren/mobile/chat/base/views/MoveButton.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2463"
},
{
"name": "Java",
"bytes": "3919516"
}
],
"symlink_target": ""
} |
using Xunit;
namespace Take.Elephant.Tests.Sql.SqlServer
{
[Collection(nameof(SqlServer)), Trait("Category", nameof(SqlServer))]
public class SqlServerItemSetWithIdentityFacts : SqlItemSetWithIdentityFacts
{
public SqlServerItemSetWithIdentityFacts(SqlServerFixture serverFixture) : base(serverFixture)
{
}
}
}
| {
"content_hash": "3703ac09908daf01d133e5b30d29af34",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 102,
"avg_line_length": 29.416666666666668,
"alnum_prop": 0.7280453257790368,
"repo_name": "takenet/elephant",
"id": "30202685e640ebbfded582ed0422db00181ab5c6",
"size": "355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Take.Elephant.Tests/Sql/SqlServer/SqlServerItemSetWithIdentityFacts.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "927326"
}
],
"symlink_target": ""
} |
<?php
namespace common\modules\tracking\models;
use Yii;
/**
* This is the model class for table "tracking.driver".
*
* @property string $Firstname
* @property string $Lastname
* @property string $Address
* @property string $Phonr
* @property string $IMEI
* @property string $Gender
* @property string $Ambulance_Number
* @property string $id
*
* @property Location[] $locations
* @property Coordinate $coordinate
* @property Status $status
*/
class Driver extends \yii\db\ActiveRecord
{
public function extraFields()
{
return ['locations','coordinate','status'];
}
/**
* @inheritdoc
*/
public static function tableName()
{
return 'tracking.driver';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['Firstname', 'Lastname', 'Address', 'Phonr', 'IMEI', 'Gender'], 'required'],
[['Firstname', 'Lastname', 'Address', 'Phonr', 'IMEI'], 'string', 'max' => 255],
[['Gender'], 'string', 'max' => 20],
[['Ambulance_Number'], 'string', 'max' => 25],
[['IMEI'], 'unique']
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'Firstname' => Yii::t('app', 'Firstname'),
'Lastname' => Yii::t('app', 'Lastname'),
'Address' => Yii::t('app', 'Address'),
'Phonr' => Yii::t('app', 'Phonr'),
'IMEI' => Yii::t('app', 'Imei'),
'Gender' => Yii::t('app', 'Gender'),
'Ambulance_Number' => Yii::t('app', 'Ambulance Number'),
'id' => Yii::t('app', 'ID'),
];
}
/**
* @return \yii\db\ActiveQuery
*/
public function getLocations()
{
return $this->hasMany(Location::className(), ['device_id' => 'IMEI']);
}
/**
* @return \yii\db\ActiveQuery
*/
public function getCoordinate()
{
return $this->hasOne(Coordinate::className(), ['device_id' => 'IMEI']);
}
/**
* @return \yii\db\ActiveQuery
*/
public function getStatus()
{
return $this->hasOne(Status::className(), ['IMEI' => 'IMEI']);
}
public function behaviors()
{
return [
[
'class' => 'mdm\behaviors\ar\RelatedBehavior',
],
[
'class' => 'mdm\behaviors\ar\RelationBehavior',
],
];
}
//https://github.com/yiisoft/yii2/issues/1282
public function getModelRelations(){
$reflector = new \ReflectionClass($this->modelClass);
$model = new $this->modelClass;
$stack = array();
foreach ($reflector->getMethods() AS $method) {
if (substr($method->name,0,3) !== 'get') continue;
if ($method->name === 'getRelation') continue;
if ($method->name === 'getBehavior') continue;
if ($method->name === 'getFirstError') continue;
if ($method->name === 'getAttribute') continue;
if ($method->name === 'getAttributeLabel') continue;
if ($method->name === 'getOldAttribute') continue;
$relation = call_user_func(array($model,$method->name));
if($relation instanceof yii\db\ActiveRelation) {
$stack[] = $relation;
}
}
return $stack;
}
}
| {
"content_hash": "f81acea2189c27fbfd2d2573e2c6b3dc",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 92,
"avg_line_length": 27.306451612903224,
"alnum_prop": 0.5162433549911399,
"repo_name": "girc/dmis",
"id": "63fe9c4cecdc0ae04dfb3aadc7c980e71f63f920",
"size": "3386",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/modules/tracking/models/Driver.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "838"
},
{
"name": "CSS",
"bytes": "220710"
},
{
"name": "Emacs Lisp",
"bytes": "2410"
},
{
"name": "HTML",
"bytes": "1616675"
},
{
"name": "JavaScript",
"bytes": "33905102"
},
{
"name": "PHP",
"bytes": "1202081"
},
{
"name": "Python",
"bytes": "76407"
},
{
"name": "Shell",
"bytes": "3444"
}
],
"symlink_target": ""
} |
<?php
declare(strict_types=1);
namespace Sphp\Tests\Html\Attributes;
use PHPUnit\Framework\TestCase;
use Sphp\Html\Attributes\PropertyParser;
use Sphp\Html\Attributes\Exceptions\AttributeException;
use Sphp\Exceptions\BadMethodCallException;
use Sphp\Exceptions\InvalidArgumentException;
class PropertyParserTest extends TestCase {
/**
* @return scalar[]
*/
public function cssToArrayMap(): iterable {
yield ['a:url(ftp://a.b);c:#777;', ['a' => 'url(ftp://a.b)', 'c' => '#777']];
yield ['c:#777;a:url(ftp://a.b);', ['c' => '#777', 'a' => 'url(ftp://a.b)']];
yield ['c:#777;a: url(ftp://a.b);', ['c' => '#777', 'a' => 'url(ftp://a.b)']];
}
/**
* @dataProvider cssToArrayMap
*
* @param string $css
* @param array $props
* @return void
*/
public function testInlineCSSToCases(string $css, array $props): void {
$parser = new PropertyParser(':', ';');
$this->assertSame($props, $parser->parse($css));
$this->assertSame($props, $parser->parseStringToProperties($css));
}
/**
* @return scalar[]
*/
public function constructorParameters(): array {
return [
['=', ','],
[':', ';'],
];
}
/**
* @dataProvider constructorParameters
*
* @param string $delim
* @param string $sep
* @return void
*/
public function testConstructorWithParams(string $delim, string $sep): void {
$value = 'a' . $delim . 'b' . $sep . 'c' . $delim . 'd';
$parser = new PropertyParser($delim, $sep);
$this->assertEquals(['a' => 'b', 'c' => 'd'], $parser->parseStringToProperties($value));
}
/**
* @return scalar[]
*/
public function invalidConstructorParameters(): array {
return [
[' ', ' '],
['', ';'],
[':', ''],
];
}
/**
* @dataProvider invalidConstructorParameters
*
* @param string $delim
* @param string $sep
* @return void
*/
public function testConstructorWithInvalidParams(string $delim, string $sep): void {
$this->expectException(InvalidArgumentException::class);
new PropertyParser($delim, $sep);
}
/**
* @return array
*/
public function validStrings(): array {
return [
[['a' => 'b', 'c' => 'd'], 'a:b;c:d;'],
[['a' => 'b', 'c' => 'd'], ';a:b;c:d;'],
[['a' => 'b'], 'a:b'],
[['a' => 'b'], 'a:b;'],
];
}
/**
* @dataProvider validStrings
*
* @param array $expected
* @param string $value
* @return void
*/
public function testParseStringToProperties(array $expected, string $value): void {
$parser = new PropertyParser();
$this->assertEquals($expected, $parser->parseStringToProperties($value));
}
public function invalidStrings(): iterable {
yield ['a;b'];
yield ['a'];
yield [':a'];
yield ['a:'];
yield ['a:;;b'];
yield [':;'];
yield [':'];
yield [';'];
}
/**
* @dataProvider invalidStrings
*
* @param array $expected
* @param string $value
* @return void
*/
public function testParseInvalidStringToProperties(string $value): void {
$parser = new PropertyParser();
$this->expectException(AttributeException::class);
$out = $parser->parseStringToProperties($value);
print_r($out);
}
/**
* @return array
*/
public function invalidArrays(): array {
return [
[['arr' => []]],
[['' => '0']],
[['obj' => new \stdClass()]],
];
}
/**
* @dataProvider invalidArrays
*
* @param array $expected
* @param string $value
* @return void
*/
public function testParseInvalidArrayToProperties(array $value): void {
$parser = new PropertyParser();
$this->expectException(AttributeException::class);
$parser->parse($value);
}
public function validValues(): iterable {
yield [['a' => 'b', 'c' => 'd']];
yield ['a:b;c:d;'];
yield ['a:b;c:d;'];
}
/**
* @dataProvider validValues
*
* @param string|array $value
* @return void
*/
public function testValidParsing($value): void {
$parser = new PropertyParser();
$this->assertEquals($parser->parse($value), ['a' => 'b', 'c' => 'd']);
}
public function invalidValues(): iterable {
yield [['a' => '', '' => 'd']];
yield ['a:;:d;'];
yield [';ab;'];
}
/**
* @dataProvider invalidValues
*
* @param scalar $value
* @return void
*/
public function testInvalidParsing($value): void {
$parser = new PropertyParser();
$this->expectException(AttributeException::class);
$parser->parse($value);
}
public function alwaysInvalidValues(): iterable {
yield [new \stdClass()];
yield [true];
yield [false];
yield [1];
yield [1.2];
}
/**
* @dataProvider alwaysInvalidValues
*
* @param scalar $value
* @return void
*/
public function testAlwaysInvalidParsing($value): void {
$parser = new PropertyParser();
$this->expectException(AttributeException::class);
$parser->parse($value);
}
public function testSingeltons() {
$p1 = PropertyParser::singelton();
$this->assertSame($p1, PropertyParser::singelton());
$p2 = PropertyParser::singelton('a', 'b');
$this->assertSame($p2, PropertyParser::singelton('a', 'b'));
}
}
| {
"content_hash": "a276a7abdb31ebd6cc81e9154b0555ae",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 92,
"avg_line_length": 24.258064516129032,
"alnum_prop": 0.5718085106382979,
"repo_name": "samhol/SPHP-framework",
"id": "afadda2c8a7560c328bbecb2d723a91ce4691c37",
"size": "5555",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sphp/php/tests/classes/Sphp/Tests/Html/Attributes/PropertyParserTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Hack",
"bytes": "19"
},
{
"name": "JavaScript",
"bytes": "22252"
},
{
"name": "PHP",
"bytes": "3138045"
},
{
"name": "SCSS",
"bytes": "34618"
}
],
"symlink_target": ""
} |
/*This class creates a gameboard/grid that allows objects to plot themselves
and allows it to be printed to the console*/
#include "GameObject.h"
#ifndef View_h
#define View_h
const int view_maxsize = 20;
class View
{
private:
int size;
double scale;
CartPoint origin;
char grid[view_maxsize + 2][view_maxsize + 2][2];
//Gets the row and column that the locatoin passed should be plotted in the array
bool get_subscripts(int &ix, int &iy, CartPoint location);
public:
//Constructor sets the size, scale and origin of the view.
View();
//Sets all value of the view array to either '.' or ' ' depending on the location
void clear();
//Gets objects to draw themselves into the spot in the array
void plot(GameObject* ptr);
//Prints the view to the console
void draw();
};
#endif | {
"content_hash": "cc04efb9c7ecc52f9f14f0ec37d844fd",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 82,
"avg_line_length": 22.857142857142858,
"alnum_prop": 0.7225,
"repo_name": "saribe0/Sea-",
"id": "e9729b0355f13cccaca0db911c322b35b3500699",
"size": "800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Header Files/View.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "61914"
}
],
"symlink_target": ""
} |
namespace llvm {
class MCAsmBackend;
class MCAsmLayout;
class MCContext;
class MCCodeEmitter;
class MCFragment;
class MCObjectWriter;
class MCSection;
class MCValue;
// FIXME: This really doesn't belong here. See comments below.
struct IndirectSymbolData {
MCSymbol *Symbol;
MCSection *Section;
};
// FIXME: Ditto this. Purely so the Streamer and the ObjectWriter can talk
// to one another.
struct DataRegionData {
// This enum should be kept in sync w/ the mach-o definition in
// llvm/Object/MachOFormat.h.
enum KindTy { Data = 1, JumpTable8, JumpTable16, JumpTable32 } Kind;
MCSymbol *Start;
MCSymbol *End;
};
class MCAssembler {
friend class MCAsmLayout;
public:
using SectionListType = std::vector<MCSection *>;
using SymbolDataListType = std::vector<const MCSymbol *>;
using const_iterator = pointee_iterator<SectionListType::const_iterator>;
using iterator = pointee_iterator<SectionListType::iterator>;
using const_symbol_iterator =
pointee_iterator<SymbolDataListType::const_iterator>;
using symbol_iterator = pointee_iterator<SymbolDataListType::iterator>;
using symbol_range = iterator_range<symbol_iterator>;
using const_symbol_range = iterator_range<const_symbol_iterator>;
using const_indirect_symbol_iterator =
std::vector<IndirectSymbolData>::const_iterator;
using indirect_symbol_iterator = std::vector<IndirectSymbolData>::iterator;
using const_data_region_iterator =
std::vector<DataRegionData>::const_iterator;
using data_region_iterator = std::vector<DataRegionData>::iterator;
/// MachO specific deployment target version info.
// A Major version of 0 indicates that no version information was supplied
// and so the corresponding load command should not be emitted.
using VersionInfoType = struct {
bool EmitBuildVersion;
union {
MCVersionMinType Type; ///< Used when EmitBuildVersion==false.
MachO::PlatformType Platform; ///< Used when EmitBuildVersion==true.
} TypeOrPlatform;
unsigned Major;
unsigned Minor;
unsigned Update;
};
private:
MCContext &Context;
std::unique_ptr<MCAsmBackend> Backend;
std::unique_ptr<MCCodeEmitter> Emitter;
std::unique_ptr<MCObjectWriter> Writer;
SectionListType Sections;
SymbolDataListType Symbols;
std::vector<IndirectSymbolData> IndirectSymbols;
std::vector<DataRegionData> DataRegions;
/// The list of linker options to propagate into the object file.
std::vector<std::vector<std::string>> LinkerOptions;
/// List of declared file names
std::vector<std::string> FileNames;
MCDwarfLineTableParams LTParams;
/// The set of function symbols for which a .thumb_func directive has
/// been seen.
//
// FIXME: We really would like this in target specific code rather than
// here. Maybe when the relocation stuff moves to target specific,
// this can go with it? The streamer would need some target specific
// refactoring too.
mutable SmallPtrSet<const MCSymbol *, 32> ThumbFuncs;
/// The bundle alignment size currently set in the assembler.
///
/// By default it's 0, which means bundling is disabled.
unsigned BundleAlignSize;
bool RelaxAll : 1;
bool SubsectionsViaSymbols : 1;
bool IncrementalLinkerCompatible : 1;
/// ELF specific e_header flags
// It would be good if there were an MCELFAssembler class to hold this.
// ELF header flags are used both by the integrated and standalone assemblers.
// Access to the flags is necessary in cases where assembler directives affect
// which flags to be set.
unsigned ELFHeaderEFlags;
/// Used to communicate Linker Optimization Hint information between
/// the Streamer and the .o writer
MCLOHContainer LOHContainer;
VersionInfoType VersionInfo;
/// Evaluate a fixup to a relocatable expression and the value which should be
/// placed into the fixup.
///
/// \param Layout The layout to use for evaluation.
/// \param Fixup The fixup to evaluate.
/// \param DF The fragment the fixup is inside.
/// \param Target [out] On return, the relocatable expression the fixup
/// evaluates to.
/// \param Value [out] On return, the value of the fixup as currently laid
/// out.
/// \param WasForced [out] On return, the value in the fixup is set to the
/// correct value if WasForced is true, even if evaluateFixup returns false.
/// \return Whether the fixup value was fully resolved. This is true if the
/// \p Value result is fixed, otherwise the value may change due to
/// relocation.
bool evaluateFixup(const MCAsmLayout &Layout, const MCFixup &Fixup,
const MCFragment *DF, MCValue &Target,
uint64_t &Value, bool &WasForced) const;
/// Check whether a fixup can be satisfied, or whether it needs to be relaxed
/// (increased in size, in order to hold its value correctly).
bool fixupNeedsRelaxation(const MCFixup &Fixup, const MCRelaxableFragment *DF,
const MCAsmLayout &Layout) const;
/// Check whether the given fragment needs relaxation.
bool fragmentNeedsRelaxation(const MCRelaxableFragment *IF,
const MCAsmLayout &Layout) const;
/// Perform one layout iteration and return true if any offsets
/// were adjusted.
bool layoutOnce(MCAsmLayout &Layout);
/// Perform one layout iteration of the given section and return true
/// if any offsets were adjusted.
bool layoutSectionOnce(MCAsmLayout &Layout, MCSection &Sec);
bool relaxInstruction(MCAsmLayout &Layout, MCRelaxableFragment &IF);
bool relaxPaddingFragment(MCAsmLayout &Layout, MCPaddingFragment &PF);
bool relaxLEB(MCAsmLayout &Layout, MCLEBFragment &IF);
bool relaxDwarfLineAddr(MCAsmLayout &Layout, MCDwarfLineAddrFragment &DF);
bool relaxDwarfCallFrameFragment(MCAsmLayout &Layout,
MCDwarfCallFrameFragment &DF);
bool relaxCVInlineLineTable(MCAsmLayout &Layout,
MCCVInlineLineTableFragment &DF);
bool relaxCVDefRange(MCAsmLayout &Layout, MCCVDefRangeFragment &DF);
/// finishLayout - Finalize a layout, including fragment lowering.
void finishLayout(MCAsmLayout &Layout);
std::tuple<MCValue, uint64_t, bool>
handleFixup(const MCAsmLayout &Layout, MCFragment &F, const MCFixup &Fixup);
public:
std::vector<std::pair<StringRef, const MCSymbol *>> Symvers;
/// Construct a new assembler instance.
//
// FIXME: How are we going to parameterize this? Two obvious options are stay
// concrete and require clients to pass in a target like object. The other
// option is to make this abstract, and have targets provide concrete
// implementations as we do with AsmParser.
MCAssembler(MCContext &Context, std::unique_ptr<MCAsmBackend> Backend,
std::unique_ptr<MCCodeEmitter> Emitter,
std::unique_ptr<MCObjectWriter> Writer);
MCAssembler(const MCAssembler &) = delete;
MCAssembler &operator=(const MCAssembler &) = delete;
~MCAssembler();
/// Compute the effective fragment size assuming it is laid out at the given
/// \p SectionAddress and \p FragmentOffset.
uint64_t computeFragmentSize(const MCAsmLayout &Layout,
const MCFragment &F) const;
/// Find the symbol which defines the atom containing the given symbol, or
/// null if there is no such symbol.
const MCSymbol *getAtom(const MCSymbol &S) const;
/// Check whether a particular symbol is visible to the linker and is required
/// in the symbol table, or whether it can be discarded by the assembler. This
/// also effects whether the assembler treats the label as potentially
/// defining a separate atom.
bool isSymbolLinkerVisible(const MCSymbol &SD) const;
/// Emit the section contents to \p OS.
void writeSectionData(raw_ostream &OS, const MCSection *Section,
const MCAsmLayout &Layout) const;
/// Check whether a given symbol has been flagged with .thumb_func.
bool isThumbFunc(const MCSymbol *Func) const;
/// Flag a function symbol as the target of a .thumb_func directive.
void setIsThumbFunc(const MCSymbol *Func) { ThumbFuncs.insert(Func); }
/// ELF e_header flags
unsigned getELFHeaderEFlags() const { return ELFHeaderEFlags; }
void setELFHeaderEFlags(unsigned Flags) { ELFHeaderEFlags = Flags; }
/// MachO deployment target version information.
const VersionInfoType &getVersionInfo() const { return VersionInfo; }
void setVersionMin(MCVersionMinType Type, unsigned Major, unsigned Minor,
unsigned Update) {
VersionInfo.EmitBuildVersion = false;
VersionInfo.TypeOrPlatform.Type = Type;
VersionInfo.Major = Major;
VersionInfo.Minor = Minor;
VersionInfo.Update = Update;
}
void setBuildVersion(MachO::PlatformType Platform, unsigned Major,
unsigned Minor, unsigned Update) {
VersionInfo.EmitBuildVersion = true;
VersionInfo.TypeOrPlatform.Platform = Platform;
VersionInfo.Major = Major;
VersionInfo.Minor = Minor;
VersionInfo.Update = Update;
}
/// Reuse an assembler instance
///
void reset();
MCContext &getContext() const { return Context; }
MCAsmBackend *getBackendPtr() const { return Backend.get(); }
MCCodeEmitter *getEmitterPtr() const { return Emitter.get(); }
MCObjectWriter *getWriterPtr() const { return Writer.get(); }
MCAsmBackend &getBackend() const { return *Backend; }
MCCodeEmitter &getEmitter() const { return *Emitter; }
MCObjectWriter &getWriter() const { return *Writer; }
MCDwarfLineTableParams getDWARFLinetableParams() const { return LTParams; }
void setDWARFLinetableParams(MCDwarfLineTableParams P) { LTParams = P; }
/// Finish - Do final processing and write the object to the output stream.
/// \p Writer is used for custom object writer (as the MCJIT does),
/// if not specified it is automatically created from backend.
void Finish();
// Layout all section and prepare them for emission.
void layout(MCAsmLayout &Layout);
// FIXME: This does not belong here.
bool getSubsectionsViaSymbols() const { return SubsectionsViaSymbols; }
void setSubsectionsViaSymbols(bool Value) { SubsectionsViaSymbols = Value; }
bool isIncrementalLinkerCompatible() const {
return IncrementalLinkerCompatible;
}
void setIncrementalLinkerCompatible(bool Value) {
IncrementalLinkerCompatible = Value;
}
bool getRelaxAll() const { return RelaxAll; }
void setRelaxAll(bool Value) { RelaxAll = Value; }
bool isBundlingEnabled() const { return BundleAlignSize != 0; }
unsigned getBundleAlignSize() const { return BundleAlignSize; }
void setBundleAlignSize(unsigned Size) {
assert((Size == 0 || !(Size & (Size - 1))) &&
"Expect a power-of-two bundle align size");
BundleAlignSize = Size;
}
/// \name Section List Access
/// @{
iterator begin() { return Sections.begin(); }
const_iterator begin() const { return Sections.begin(); }
iterator end() { return Sections.end(); }
const_iterator end() const { return Sections.end(); }
size_t size() const { return Sections.size(); }
/// @}
/// \name Symbol List Access
/// @{
symbol_iterator symbol_begin() { return Symbols.begin(); }
const_symbol_iterator symbol_begin() const { return Symbols.begin(); }
symbol_iterator symbol_end() { return Symbols.end(); }
const_symbol_iterator symbol_end() const { return Symbols.end(); }
symbol_range symbols() { return make_range(symbol_begin(), symbol_end()); }
const_symbol_range symbols() const {
return make_range(symbol_begin(), symbol_end());
}
size_t symbol_size() const { return Symbols.size(); }
/// @}
/// \name Indirect Symbol List Access
/// @{
// FIXME: This is a total hack, this should not be here. Once things are
// factored so that the streamer has direct access to the .o writer, it can
// disappear.
std::vector<IndirectSymbolData> &getIndirectSymbols() {
return IndirectSymbols;
}
indirect_symbol_iterator indirect_symbol_begin() {
return IndirectSymbols.begin();
}
const_indirect_symbol_iterator indirect_symbol_begin() const {
return IndirectSymbols.begin();
}
indirect_symbol_iterator indirect_symbol_end() {
return IndirectSymbols.end();
}
const_indirect_symbol_iterator indirect_symbol_end() const {
return IndirectSymbols.end();
}
size_t indirect_symbol_size() const { return IndirectSymbols.size(); }
/// @}
/// \name Linker Option List Access
/// @{
std::vector<std::vector<std::string>> &getLinkerOptions() {
return LinkerOptions;
}
/// @}
/// \name Data Region List Access
/// @{
// FIXME: This is a total hack, this should not be here. Once things are
// factored so that the streamer has direct access to the .o writer, it can
// disappear.
std::vector<DataRegionData> &getDataRegions() { return DataRegions; }
data_region_iterator data_region_begin() { return DataRegions.begin(); }
const_data_region_iterator data_region_begin() const {
return DataRegions.begin();
}
data_region_iterator data_region_end() { return DataRegions.end(); }
const_data_region_iterator data_region_end() const {
return DataRegions.end();
}
size_t data_region_size() const { return DataRegions.size(); }
/// @}
/// \name Data Region List Access
/// @{
// FIXME: This is a total hack, this should not be here. Once things are
// factored so that the streamer has direct access to the .o writer, it can
// disappear.
MCLOHContainer &getLOHContainer() { return LOHContainer; }
const MCLOHContainer &getLOHContainer() const {
return const_cast<MCAssembler *>(this)->getLOHContainer();
}
struct CGProfileEntry {
const MCSymbolRefExpr *From;
const MCSymbolRefExpr *To;
uint64_t Count;
};
std::vector<CGProfileEntry> CGProfile;
/// @}
/// \name Backend Data Access
/// @{
bool registerSection(MCSection &Section);
void registerSymbol(const MCSymbol &Symbol, bool *Created = nullptr);
ArrayRef<std::string> getFileNames() { return FileNames; }
void addFileName(StringRef FileName) {
if (!is_contained(FileNames, FileName))
FileNames.push_back(FileName);
}
/// Write the necessary bundle padding to \p OS.
/// Expects a fragment \p F containing instructions and its size \p FSize.
void writeFragmentPadding(raw_ostream &OS, const MCEncodedFragment &F,
uint64_t FSize) const;
/// @}
void dump() const;
};
/// Compute the amount of padding required before the fragment \p F to
/// obey bundling restrictions, where \p FOffset is the fragment's offset in
/// its section and \p FSize is the fragment's size.
uint64_t computeBundlePadding(const MCAssembler &Assembler,
const MCEncodedFragment *F, uint64_t FOffset,
uint64_t FSize);
} // end namespace llvm
#endif // LLVM_MC_MCASSEMBLER_H
| {
"content_hash": "761bb3844f480e5b31086c129779562a",
"timestamp": "",
"source": "github",
"line_count": 430,
"max_line_length": 80,
"avg_line_length": 34.81395348837209,
"alnum_prop": 0.7061456245824983,
"repo_name": "endlessm/chromium-browser",
"id": "0f9499d705e4666a33b650be21c932e5a2e71fb7",
"size": "15920",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "third_party/swiftshader/third_party/llvm-7.0/llvm/include/llvm/MC/MCAssembler.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
layout: post
title: How to install Netgear A6100 USB Wifi-adapter on Ubuntu 16.04
---
The Netgear A6100 USB Wifi-adapter doesn't work by default on Linux. You have quite a lot of resources about how to install it. I add just one more article on that subject for those on Ubuntu 16.04 who are not sure if it still works, short answer, yes, everything works fine! While the installation process can seem quite threatening for those who have never seen `modprobe` used, or who never compiled from source, it is simple and works well.


I shamelessly stole those pictures from that article on the same topic [http://www.seanbreeden.com/netgear-a6100-wifi-usb-mini-adapter-install-for-ubuntu-lubuntu/](http://www.seanbreeden.com/netgear-a6100-wifi-usb-mini-adapter-install-for-ubuntu-lubuntu/).
The driver for this Wifi adapter has the barbaric name **Realtek 8812AU**, or more shortly **rtl8812AU** or just **8812au**.
## 1. Find a way to get the code on your computer
As you most likely don't have a Wifi connection on this computer, you can either use an Ethernet cable to download the code, or use a USB key to transfer it from another computer or OS where you have internet access.
I used rtl8812AU-driver-4.3.14 which seems to be working fine.
You can either download it from the Web interface https://github.com/diederikdehaas/rtl8812AU `Clone or download > Download ZIP`.
Or you can download it from the terminal with `git clone https://github.com/diederikdehaas/rtl8812AU.git`.
## 2. Install some basic requirements (just in case)
On my computer it was already installed, but in some tutorials they advise you to check that linux-headers and build-essentials are installed. gcc-5 is the default C compiler on Ubuntu 16.04, so it should also already be installed.
```
sudo apt-get install linux-headers-$(uname -r) build-essential gcc-5
```
## 3. Compile and install the driver
Unzip the file. Then in the terminal, move to the code folder with `cd`. Then build the binary using `make`. Then install using `make install`. Then add the module to your kernel using `modprobe`. Then add **8812au** to the end of the file /etc/modules with `tee -a` to tell the OS to load the driver when it boots (I am not sure this is mandatory, but do it anyway, it cannot be bad!).
```
cd rtl8812AU-driver-4.3.14
make CC=/usr/bin/gcc-5
sudo make install
sudo modprobe 8812au
echo 8812au | sudo tee -a /etc/modules
```
That's it, it seems freightening if you never did it before, but no, there shoudn't be any issue.
The usual Wifi icon should appear on the top-right corner of your screen. You can add and edit your Wifi password in `Edit Connections...` if necessary.
## Bonus: Keep the files in case of kernel update
After a Ubuntu kernel update, the driver was deleted. I rerun the previous steps and it went back (the Wifi parameters were still in memory).
So keep the code on your computer just in case! And create a text file with the commands to run, to be able to copy/paste it easily offline. By then, you will most likely have forgotten!
Linux wouldn't be Linux if it was easy!
## Bonus 2017: If you want a test of 2017 Wifi-adapters, look here
This article is getting old, if you are looking to buy a more up to date Wifi-adapter, here is a test: [http://www.2kreviews.com/best-usb-wifi-adapter/](http://www.2kreviews.com/best-usb-wifi-adapter/)
That being said, the drivers may be different, maybe it will be managed by default, maybe not. You never know what you will get, that's part of the magic of Linux!
| {
"content_hash": "9d75a2908b63bbdf2e4fe07d376e72e4",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 444,
"avg_line_length": 60.131147540983605,
"alnum_prop": 0.7614503816793893,
"repo_name": "MarCnu/marcnu.github.io",
"id": "d7a82b443590d2c63e39b57c3c994a962e22d8ef",
"size": "3672",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2016-09-10-How-to-install-Netgear-A6100-USB-Wifi-adapter-on-Ubuntu-16.04.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63882"
},
{
"name": "HTML",
"bytes": "6335"
}
],
"symlink_target": ""
} |
#instagram-plugin-instructions dl dt {
clear: both;
font-weight: bold;
margin-top: 5px;
}
#instagram-plugin-instructions dl dd {
margin: 0;
padding: 0;
font-style: italic;
}
#instagram-plugin-instructions dl {
clear: both;
overflow: hidden;
}
#instagram-plugin-instructions dl dd textarea {
width: 98%;
height: 100px;
}
#instagram-plugin-instructions li {
padding: 20px 0;
border-bottom: 1px solid #bbb;
}
.lboxWrapper {
position: fixed;
left: 0px;
right: 0px;
bottom: 0px;
top: 0px;
opacity: 0;
filter: alpha(opacity=0);
display: none;
}
.lboxWrapper.visible {
display: block;
opacity: 1;
filter: alpha(opacity=100);
z-index: 999999;
}
.lboxWrapper .lboxOverlay, #lboxWrapper .lboxOverlay {
position: fixed;
top: 0px;
left: 0px;
z-index: 10000;
background: #000000;
opacity: 0.6;
filter: alpha(opacity=60);
right: 0px;
bottom: 0px;
cursor: pointer;
}
.lboxWrapper .lboxFrame, #lboxWrapper .lboxFrame {
position: fixed;
margin: 0px;
width: 500px;
z-index: 100001;
height: 440px;
padding: 20px;
left: 50%;
margin-left: -280px;
background: #FFF;
border: 1px solid #E5E5E5;
box-shadow: 0px 1px 1px rgba(0,0,0,0.04);
}
.lboxWrapper .lboxFrame iframe, #lboxWrapper .lboxFrame iframe {
width: 800px;
height: 400px;
border: 0px none;
overflow: hidden;
}
.lboxWrapper .lboxClose, #lboxWrapper .lboxClose {
position: absolute;
top: -30px;
line-height: 25px;
font-size: 25px;
font-weight: bold;
right: 0px;
color: #FFF;
text-shadow: 0px 1px 1px rgba(0,0,0,0.04);
cursor: pointer;
}
.simpleSetupButton {
width: 100%;
text-align: center;
padding-left: 0px !important;
padding-right: 0px !important;
margin: 0px 0px 15px 0px !important;
font-weight: bold;
text-shadow: 0px 1px 1px rgba(0,0,0,0.04);
}
.simpleSetupButton.button-secondary,
.simpleSetupButton.button-secondary:hover {
background: #FF4545;
color: #000;
font-weight: bold;
text-shadow: none;
}
hr.divider {
border: 0px none;
border-top: 1px solid #CCC;
}
.instagram-widget-admin-form .widefat.short {
width: 100px;
}
.instagram-widget-admin-form .widefat.half {
width: 49%;
box-sizing: border-box;
margin-bottom: 10px;
}
.instagram-widget-admin-form .widefat.half:nth-of-type(2n+1) {
margin-right: 1%;
}
.instagram-widget-admin-form label {
display: block;
font-size: 14px;
position: relative;
padding-bottom: 4px;
}
.instagram-widget-admin-form p:first-of-type {
margin-top: 0px;
}
.instagram-widget-admin-form label .help-icon {
position: absolute;
right: 0px;
top: 0px;
bottom: 0px;
width: 20px;
text-align: right;
overflow: hidden;
}
.instagram-widget-admin-form label .help-icon .block {
display: block;
opacity: 0;
transition: opacity 0.3s;
width: 250px;
padding: 8px 10px 10px 10px;
color: #FFF;
text-align: left;
background: rgba(0,0,0,0.7);
font-size: 11px;
border-radius: 3px;
position: absolute;
right: -5px;
top: 26px;
}
.instagram-widget-admin-form label .help-icon .block .block-arrow {
border-left: 8px solid transparent;
border-right: 8px solid transparent;
border-bottom: 8px solid rgba(0,0,0,0.7);
position: absolute;
right: 8px;
top: -8px;
}
.instagram-widget-admin-form label .help-icon:hover {
overflow: visible;
}
.instagram-widget-admin-form label .help-icon:hover .block {
opacity: 1;
}
.instagram-widget-admin-form input[type="button"] {
position: absolute;
bottom: 10px;
right: 10px;
}
.tabs-panel {
top: 1px;
position: relative;
display: none;
border: 1px solid #DFDFDF;
background: #FDFDFD;
z-index: 1;
padding: 10px;
margin-bottom: 7px;
height: 345px;
}
.tabs-panel.active {
display: block;
}
li.tabber a:focus {
box-shadow: none;
}
li.tabber {
border: 1px solid transparent;
}
li.tabber.active {
border: 1px #DFDFDF solid;
background: #FDFDFD;
position: relative;
z-index: 2;
border-bottom: 0px none;
}
li.tabber.error {
background: #EACCCC;
}
li.tabber.error a {
color: #a94442;
}
.instagram-widget-admin-form p.error input, .instagram-widget-admin-form p.error select {
background: #EACCCC;
color: #a94442;
}
.instagram-widget-admin-form p .errorMessage {
display: none;
position: absolute;
background: #EACCCC;
color: #A94442;
padding: 5px;
right: 100%;
width: 200px;
border-radius: 3px;
margin-top: 23px;
margin-right: 5px;
}
.instagram-widget-admin-form p .errorMessage .block-arrow {
border-top: 8px solid transparent;
border-bottom: 8px solid transparent;
border-left: 8px solid #EACCCC;
position: absolute;
right: -8px;
top: 8px;
}
.instagram-widget-admin-form p.error .errorMessage {
display: block;
}
.instagram-widget-admin-form .linkBar {
float: right;
width: 40%;
height: 285px;
border: 1px solid #e5e5e5;
border-radius: 3px;
padding: 10px;
background: #fafafa;
}
.instagram-widget-admin-form .contactMessage {
width: 50%;
}
.instagram-widget-admin-form .linkBar ul {
margin-top: 10px;
margin-bottom: 0px;
}
.instagram-widget-admin-form .linkBar li.break {
padding-top: 3px;
margin-bottom: 6px;
border-bottom: 1px solid #e5e5e5;
}
.instagram-widget-admin-form .linkBar li:first-of-type {
font-weight: bold;
}
.lboxContent h2 {
margin-top: 0px;
}
.instagram-widget-powered-by {
float: right;
}
.instagram-widget-powered-by a {
background: url('img/logo.png') no-repeat center right;
background-size: contain;
width: 100px;
height: 24px;
display: block;
}
/* nasty hack, it would be nice if wordpress put a class with the widget name onto the wrapper for each widget */
#widget-wpinstagram-widget-1-savewidget,
#widget-wpinstagram-widget-2-savewidget,
#widget-wpinstagram-widget-3-savewidget,
#widget-wpinstagram-widget-4-savewidget,
#widget-wpinstagram-widget-5-savewidget,
#widget-wpinstagram-widget-6-savewidget,
#widget-wpinstagram-widget-7-savewidget,
#widget-wpinstagram-widget-8-savewidget,
#widget-wpinstagram-widget-9-savewidget,
#widget-wpinstagram-widget-10-savewidget,
#widget-wpinstagram-widget-11-savewidget,
#widget-wpinstagram-widget-12-savewidget,
#widget-wpinstagram-widget-13-savewidget,
#widget-wpinstagram-widget-14-savewidget,
#widget-wpinstagram-widget-15-savewidget,
#widget-wpinstagram-widget-16-savewidget,
#widget-wpinstagram-widget-17-savewidget,
#widget-wpinstagram-widget-18-savewidget,
#widget-wpinstagram-widget-19-savewidget,
#widget-wpinstagram-widget-20-savewidget,
#widget-wpinstagram-widget-21-savewidget,
#widget-wpinstagram-widget-22-savewidget,
#widget-wpinstagram-widget-23-savewidget,
#widget-wpinstagram-widget-24-savewidget,
#widget-wpinstagram-widget-25-savewidget,
#widget-wpinstagram-widget-26-savewidget,
#widget-wpinstagram-widget-27-savewidget {
display: none;
}
#otherUserResults, #locationResults {
position: absolute;
border: 1px solid #DDD;
box-shadow: inset 0px 1px 2px rgba(0,0,0,0.07);
color: #333;
border-top: 0px none;
top: 52px;
left: 0px;
right: 0px;
display: none;
background: #FFF;
z-index: 10;
max-height: 189px;
overflow: hidden;
}
#otherUserResults div, #locationResults div {
padding: 3px 5px;
font-size: 14px;
}
#otherUserResults div:hover, #locationResults div:hover {
background: #2ea2cc;
cursor: pointer;
color: #FFF;
}
#otherUserResults.visible, #locationResults.visible {
display: block;
}
#anotherUser, #location {
position: relative;
}
.wpinstagram-error {
margin-top: 15px;
text-align: center;
background: #fcf8e3; /* Old browsers */
background: -moz-linear-gradient(top, #fcf8e3 0%, #f8efc0 100%); /* FF3.6+ */
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,#fcf8e3), color-stop(100%,#f8efc0)); /* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, #fcf8e3 0%,#f8efc0 100%); /* Chrome10+,Safari5.1+ */
background: -o-linear-gradient(top, #fcf8e3 0%,#f8efc0 100%); /* Opera 11.10+ */
background: -ms-linear-gradient(top, #fcf8e3 0%,#f8efc0 100%); /* IE10+ */
background: linear-gradient(to bottom, #fcf8e3 0%,#f8efc0 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#fcf8e3', endColorstr='#f8efc0',GradientType=0 ); /* IE6-9 */
color: #000;
border: 1px solid #f5e79e;
border-radius: 3px;
padding: 10px;
}
.wpinstagram-error b {
color: #000;
font-size: 110%;
}
.wpinstagram-error-description {
margin-bottom: 0px !important;
}
.wpinstagram_widget_loader {
background: url('img/ajax-loader.gif') no-repeat center center;
width: 16px;
height: 16px;
position: absolute;
right: 5px;
top: 30px;
display: none;
}
.wploading .wpinstagram_widget_loader {
display: block;
}
.instagram_activate {
border: 1px solid #111;
border-radius: 3px;
background: #212121;
padding: 5px 15px 5px 5px;
margin: 15px 0px;
position: relative;
color: #FFF;
}
.instagram_activate .instagram_bg {
background: url('img/logo-white.png') no-repeat center right;
background-size: auto 75%;
}
.instagram_activate .instagram_button {
background: #517FA4;
font-weight: bold;
font-size: 15px;
text-align: center;
padding: 9px 0px 8px 0px;
color: #FFF;
border-radius: 3px;
border: 5px solid #555;
width: 266px;
cursor: pointer;
display: inline-block;
}
.instagram_activate .instagram_description {
display: inline-block;
font-size: 15px;
padding-left: 35px;
}
.instagram_setup_instructions {
position: relative;
}
.instagram_setup_img {
overflow: hidden;
position: absolute;
width: 45%;
left: 0px;
top: 0px;
bottom: 0px;
height: 410px;
transition: width 0.4s ease;
z-index: 2;
border: 1px solid #DDD;
border-radius: 3px;
}
.instagram_setup_img img {
position: absolute;
left: 0px;
top: 0px;
bottom: 0px;
height: 410px;
}
.instagram_setup_img:hover {
width: 100%;
}
.instagram_setup_img_message {
z-index: 10;
opacity: 1;
background: rgba(0,0,0,0.6);
position: absolute;
top: 0px;
left: 0px;
right: 0px;
bottom: 0px;
display: table-cell;
text-align: center;
color: #FFF;
transition: opacity 0.4s ease;
}
.instagram_setup_img_message div {
position: absolute;
top: 50%;
margin-top: -10px;
text-align: center;
left: 0px;
right: 0px;
}
.instagram_setup_img:hover .instagram_setup_img_message {
opacity: 0;
}
.instagram_setup_text {
z-index: 1;
position: absolute;
left: 48%;
right: 0px;
top: 0px;
height: 410px;
}
.instagram_setup_text p:first-of-type {
margin-top: 0px;
padding-top: 0px;
}
.instagram_setup_text a.instagram_setup_button {
background: #517FA4;
font-weight: bold;
font-size: 15px;
text-align: center;
padding: 9px 0px 8px 0px;
color: #FFF;
border-radius: 3px;
cursor: pointer;
text-decoration: none;
display: block;
}
.instagram_activate .instagram_button.instagram_review_button {
background-image: url('img/star.png');
background-position: -20px center;
background-repeat: no-repeat;
background-size: 60px auto;
}
.instagram_review .instagram_hide_review {
float: right;
padding-top: 23px;
padding-right: 145px;
}
.instagram_review .instagram_hide_review a {
font-size: 12px;
color: #EEE;
text-decoration: underline;
}
.instagram_review .instagram_hide_review a:hover {
text-decoration: none;
color: #FFF;
}
.instagram_setup_instructions p.code {
padding: 10px;
background: #DDD;
border: 1px solid #D1D1D1;
border-radius: 4px;
display: block;
width: 480px;
height: 250px;
overflow: auto;
} | {
"content_hash": "53ad97068ada35424a3be787329b4442",
"timestamp": "",
"source": "github",
"line_count": 565,
"max_line_length": 140,
"avg_line_length": 20.198230088495574,
"alnum_prop": 0.7008412197686645,
"repo_name": "mandino/www.bloggingshakespeare.com",
"id": "4b0f70ab0b9106973a6d6c3fb434260542670fac",
"size": "11412",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "reviewing-shakespeare/wp-content/plugins/instagram-for-wordpress/trunk/wpinstagram-admin.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5211957"
},
{
"name": "CoffeeScript",
"bytes": "552"
},
{
"name": "HTML",
"bytes": "525757"
},
{
"name": "JavaScript",
"bytes": "6055696"
},
{
"name": "Modelica",
"bytes": "10338"
},
{
"name": "PHP",
"bytes": "44197755"
},
{
"name": "Perl",
"bytes": "2554"
},
{
"name": "Ruby",
"bytes": "3917"
},
{
"name": "Smarty",
"bytes": "27821"
},
{
"name": "XSLT",
"bytes": "34552"
}
],
"symlink_target": ""
} |
package org.efix.util.parse;
import org.efix.util.BenchmarkUtil;
import org.efix.util.MutableInt;
import org.efix.util.buffer.Buffer;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import static org.efix.message.FieldUtil.FIELD_SEPARATOR;
@State(Scope.Benchmark)
public class IntParserBenchmark {
private final MutableInt offset = new MutableInt();
private String[] strings;
private Buffer[] buffers;
@Setup
public void setup() {
strings = new String[]{
"7391639", "-312", "0", "231", "7313", "-654046431",
"-5743215", "734091", "9192", "-3", "1013", "-5", "8651",
"33", "-931", "-3031", "981", "10123", "-39187614", "1321311",
"1321", "7", "-1931913", "-203", "-3495", "-1031", "30313"
};
buffers = new Buffer[strings.length];
for (int i = 0; i < strings.length; i++)
buffers[i] = BenchmarkUtil.makeMessage(strings[i] + "|");
}
@Benchmark
public void baseLine() {
}
@Benchmark
public void parseInt() {
for (Buffer buffer : buffers) {
offset.set(0);
IntParser.parseInt(FIELD_SEPARATOR, buffer, offset, buffer.capacity());
}
}
@Benchmark
public void parseIntString() {
for (String string : strings)
Integer.parseInt(string);
}
}
| {
"content_hash": "f5939b73ad9f2fd9033d9e8a2bceec4c",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 83,
"avg_line_length": 27.64814814814815,
"alnum_prop": 0.5974547890154053,
"repo_name": "artyomkorzun/efix",
"id": "a3bc194d08ec9d50e6a94ac92afc1789a77dc534",
"size": "1493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/jmh/java/org/efix/util/parse/IntParserBenchmark.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "624084"
}
],
"symlink_target": ""
} |
@interface CLLuckyLabel : UILabel
@property(nonatomic)int numberTag;
@property(nonatomic)int placeTag;
@end
| {
"content_hash": "d5f74eb04bfef31a55fb6f65739b04c4",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 34,
"avg_line_length": 18.333333333333332,
"alnum_prop": 0.8,
"repo_name": "blighli/iPhone2015",
"id": "0e2325272ff448981eb5be1201d961f40dc334e6",
"size": "271",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nb15033王国军/王国军IOS大作业/源代码/My2048/my2048/my2048/CLLuckyLabel.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "57551727"
},
{
"name": "C",
"bytes": "989362"
},
{
"name": "C++",
"bytes": "841211"
},
{
"name": "CMake",
"bytes": "17255"
},
{
"name": "CSS",
"bytes": "45762"
},
{
"name": "GLSL",
"bytes": "4840"
},
{
"name": "HTML",
"bytes": "147506"
},
{
"name": "JavaScript",
"bytes": "545537"
},
{
"name": "Makefile",
"bytes": "5543"
},
{
"name": "Metal",
"bytes": "2713"
},
{
"name": "Objective-C",
"bytes": "21892970"
},
{
"name": "Objective-C++",
"bytes": "263154"
},
{
"name": "Ruby",
"bytes": "1031"
},
{
"name": "Shell",
"bytes": "36609"
},
{
"name": "Swift",
"bytes": "194188"
}
],
"symlink_target": ""
} |
FROM registry.fedoraproject.org/fedora:latest
# Don't include container-selinux and remove
# directories used by yum that are just taking
# up space.
RUN useradd build; yum -y update; rpm --restore shadow-utils 2>/dev/null; yum -y install cpp buildah fuse-overlayfs xz --exclude container-selinux; rm -rf /var/cache /var/log/dnf* /var/log/yum.*;
ADD https://raw.githubusercontent.com/containers/buildah/main/contrib/buildahimage/stable/containers.conf /etc/containers/
# Adjust storage.conf to enable Fuse storage.
RUN chmod 644 /etc/containers/containers.conf; sed -i -e 's|^#mount_program|mount_program|g' -e '/additionalimage.*/a "/var/lib/shared",' -e 's|^mountopt[[:space:]]*=.*$|mountopt = "nodev,fsync=0"|g' /etc/containers/storage.conf
RUN mkdir -p /var/lib/shared/overlay-images /var/lib/shared/overlay-layers /var/lib/shared/vfs-images /var/lib/shared/vfs-layers; touch /var/lib/shared/overlay-images/images.lock; touch /var/lib/shared/overlay-layers/layers.lock; touch /var/lib/shared/vfs-images/images.lock; touch /var/lib/shared/vfs-layers/layers.lock
# Define uid/gid ranges for our user https://github.com/containers/buildah/issues/3053
RUN echo -e "build:1:999\nbuild:1001:64535" > /etc/subuid; \
echo -e "build:1:999\nbuild:1001:64535" > /etc/subgid; \
mkdir -p /home/build/.local/share/containers; \
chown -R build:build /home/build
VOLUME /var/lib/containers
VOLUME /home/build/.local/share/containers
# Set an environment variable to default to chroot isolation for RUN
# instructions and "buildah run".
ENV BUILDAH_ISOLATION=chroot
| {
"content_hash": "3025870996ce6b27615083200d2af883",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 320,
"avg_line_length": 62.48,
"alnum_prop": 0.7580025608194623,
"repo_name": "rhatdan/buildah",
"id": "85b5f24c32747ab03ba84cdbb938048533a2291f",
"size": "1865",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "contrib/buildahimage/stable/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "512"
},
{
"name": "Dockerfile",
"bytes": "17295"
},
{
"name": "Go",
"bytes": "1157837"
},
{
"name": "Makefile",
"bytes": "8172"
},
{
"name": "Nix",
"bytes": "5882"
},
{
"name": "Perl",
"bytes": "10721"
},
{
"name": "Roff",
"bytes": "485"
},
{
"name": "Shell",
"bytes": "517890"
}
],
"symlink_target": ""
} |
import * as React from 'react'
import {render} from '@testing-library/react'
import Downshift from '../'
const MyDiv = ({innerRef, ...rest}) => <div ref={innerRef} {...rest} />
const MyDivWithForwardedRef = React.forwardRef((props, ref) => (
<div ref={ref} {...props} />
))
const oldError = console.error
beforeEach(() => {
console.error = jest.fn()
})
afterEach(() => {
console.error = oldError
})
test('no children provided renders nothing', () => {
const MyComponent = () => <Downshift />
expect(render(<MyComponent />).container).toBeEmptyDOMElement()
})
test('returning null renders nothing', () => {
const MyComponent = () => <Downshift children={() => null} />
expect(render(<MyComponent />).container).toBeEmptyDOMElement()
})
test('returning a composite component without calling getRootProps results in an error', () => {
const MyComponent = () => <Downshift children={() => <MyDiv />} />
expect(() => render(<MyComponent />)).toThrowErrorMatchingSnapshot()
})
test('returning a composite component and calling getRootProps without a refKey results in an error', () => {
const MyComponent = () => (
<Downshift children={({getRootProps}) => <MyDiv {...getRootProps()} />} />
)
render(<MyComponent />)
expect(console.error.mock.calls[0][0]).toMatchSnapshot()
})
test('returning a DOM element and calling getRootProps with a refKey results in an error', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => <div {...getRootProps({refKey: 'blah'})} />}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls[0][0]).toMatchSnapshot()
})
test('not applying the ref prop results in an error', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => {
const {onClick} = getRootProps()
return <div onClick={onClick} />
}}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls[0][0]).toMatchSnapshot()
})
test('renders fine when rendering a composite component and applying getRootProps properly', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => (
<MyDiv {...getRootProps({refKey: 'innerRef'})} />
)}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('returning a composite component and calling getRootProps without a refKey does not result in an error if suppressRefError is true', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => (
<MyDiv {...getRootProps({}, {suppressRefError: true})} />
)}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('returning a DOM element and calling getRootProps with a refKey does not result in an error if suppressRefError is true', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => (
<div {...getRootProps({refKey: 'blah'}, {suppressRefError: true})} />
)}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('not applying the ref prop results in an error does not result in an error if suppressRefError is true', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => {
const {onClick} = getRootProps({}, {suppressRefError: true})
return <div onClick={onClick} />
}}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('renders fine when rendering a composite component and applying getRootProps properly even if suppressRefError is true', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => (
<MyDiv
{...getRootProps({refKey: 'innerRef'}, {suppressRefError: true})}
/>
)}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('renders fine when rendering a composite component and suppressRefError prop is true', () => {
const MyComponent = () => (
<Downshift
suppressRefError
children={({getRootProps}) => <MyDiv {...getRootProps()} />}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('renders fine when rendering a composite component that uses refs forwarding', () => {
const MyComponent = () => (
<Downshift
children={({getRootProps}) => (
<MyDivWithForwardedRef {...getRootProps()} />
)}
/>
)
render(<MyComponent />)
expect(console.error.mock.calls).toHaveLength(0)
})
test('has access to element when a ref is passed to getRootProps', () => {
const ref = {current: null}
const MyComponent = () => (
<Downshift
children={({getRootProps}) => (
<MyDivWithForwardedRef
{...getRootProps({
ref: e => {
ref.current = e
},
})}
/>
)}
/>
)
render(<MyComponent />)
expect(ref.current).not.toBeNull()
expect(ref.current).toBeInstanceOf(HTMLDivElement)
})
| {
"content_hash": "0530c0080ce7852336c78316cba4fb9c",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 145,
"avg_line_length": 29.358381502890172,
"alnum_prop": 0.6213821618428824,
"repo_name": "paypal/downshift",
"id": "a8bfd08a175f2de678be7958558ae1e40ea0f870",
"size": "5079",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/__tests__/downshift.get-root-props.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "139596"
},
{
"name": "TypeScript",
"bytes": "3643"
}
],
"symlink_target": ""
} |
<footer class="page-footer">
<div class="footer-copyright">
<div class="container">
{{.Site.Copyright}}
<div class="right">Design <a class="grey-text text-lighten-4" href="http://pdevty.github.io/blog/">pdevty</a></div>
</div>
</div>
</footer>
<script src="https://code.jquery.com/jquery-2.1.4.min.js"></script>
<script src="{{.Site.BaseURL}}/js/materialize.min.js"></script>
<script src="{{.Site.BaseURL}}/js/init.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.6/highlight.min.js"></script>
<script>hljs.initHighlightingOnLoad();</script>
{{with .Site.Params.googleAnalyticsUserID }}
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', '{{.}}', 'auto');
ga('send', 'pageview');
</script>
{{end}}
</body>
</html>
| {
"content_hash": "f24c698a0b1146b0e9096f72093fc1b9",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 121,
"avg_line_length": 42.76923076923077,
"alnum_prop": 0.6276978417266187,
"repo_name": "stremovsky/victor-hugo",
"id": "b9788cb0dc892e128fcb09eca04636ee654a7265",
"size": "1112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "site/themes/material-design/layouts/partials/footer.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "117330"
},
{
"name": "HTML",
"bytes": "47905"
},
{
"name": "JavaScript",
"bytes": "4687"
}
],
"symlink_target": ""
} |
{-# LANGUAGE RecordWildCards #-}
module Main where
import System.IO
import System.Random
import System.Environment
import System.Console.GetOpt
import System.Exit
import Control.Monad (liftM, liftM2, foldM, forM_, replicateM_)
import Data.Maybe (fromMaybe)
import Text.Printf (printf)
import Data.Function (on)
import Data.IORef
import qualified Data.Map as M
import Data.Map
( Map
, fromList
, elems
, assocs
, empty
, insert
, union
, unionWith
, fold
, foldWithKey
, filterWithKey
, findWithDefault
)
import Opts (simOptions)
import Simulation (SimConfig(..), Statistic(..), PackedStrategy, mergeStats)
main = do
hSetBuffering stdout NoBuffering
(reps, cfg, strategy, strategyName) <- simOptions
putStr $ showTitle strategyName
putStr $ showConfig reps cfg
let summedStats = sumStats reps cfg strategy
report <- liftM (showReport reps) summedStats
putStr report
-- | Format the strategy's name as a title.
showTitle :: String -> String
showTitle name = let title = "Testing " ++ name
line = replicate (length title) '='
in "\n" ++ title ++ "\n" ++ line ++ "\n\n"
-- | Show the (partially) provided configuration parameters from the command
-- line in human readable form.
showConfig :: Int -> SimConfig -> String
showConfig reps (SimConfig{..}) =
"Time Limit: " ++ show timeLimit ++ " [s]\n" ++
"Robot Speed: " ++ show robotSpeed ++ " [mm/s]\n" ++
"Code Length: " ++ show codeLength ++ "\n" ++
"CPU Slowness: " ++ show cpuFactor ++ "\n" ++
"Simulations: " ++ show reps ++ "\n\n" ++
"[..........]\n "
-- | Show the stats in human readable form.
showReport :: Int -> Statistic -> String
showReport reps (Statistic succsPerRound thinkingTime drivingTime) =
printf "Results\n" ++
printf "-------\n\n" ++
printf "Successes: %.5f\n" succsAvg ++
printf "Expected: %.5f [Guesses/Success]\n\n" expAvg ++
printf "Driving Time: %6.2f [%%]\n" drivingRel ++
printf "Thinking Time: %6.2f [%%]\n\n" thinkingRel ++
printf "| 1 | 2 | 3 | 4 | 5 | 6 | 7 | >=8 | [Round]\n" ++
printf "|===============================================================|\n" ++
printf "| " ++ concatMap format (elems relSuccsPerRound) ++ printf "[%%]\n\n"
where format n = printf (if n == 100 then "%05.1f | " else "%05.2f | ") n
succsSum = sum $ elems succsPerRound
expSum = sum $ map (uncurry (*)) $ assocs succsPerRound
succsAvg = succsSum /. reps :: Double
expAvg = expSum /. succsSum :: Double
totalTime = thinkingTime + drivingTime
thinkingRel = thinkingTime /. totalTime * 100 :: Double
drivingRel = drivingTime /. totalTime * 100 :: Double
maxRound = 8
succsOverEqMax = sum . elems . filterWithKey (\k _ -> k >= maxRound)
$ succsPerRound
newSuccsPerRound = (`union` (fromList $ zip [1..maxRound] [0,0..]))
. insert maxRound succsOverEqMax
. filterWithKey (\k _ -> k < maxRound)
$ succsPerRound
relSuccsPerRound = M.map (\s -> s /. succsSum * 100)
newSuccsPerRound :: Map Int Double
-- TODO: Is it possible to write that more beautifully?
-- | Repeat a simulation /n/ times, output the progress as ASCII to stdout and
-- finally return the summation of the individual stats for each executed
-- simulation.
sumStats :: Int -> SimConfig -> PackedStrategy -> IO Statistic
sumStats n cfg packedStrategy = do
let length = 10
printed <- newIORef 0
acc <- newIORef $ Statistic empty 0 0
forM_ [0..n-1] $ \i -> do
let toPrint = floor $ i /. n * fromIntegral length :: Int
diff <- liftM (toPrint -) (readIORef printed)
replicateM_ diff $ putStr "^"
modifyIORef printed (+diff)
acc' <- liftM2 mergeStats (readIORef acc) (stats !! i)
writeIORef acc acc'
rest <- liftM (length -) (readIORef printed)
replicateM_ rest $ putStr "^"
putStr "\n\n"
readIORef acc
where stats = take n $ map packedStrategy args
args = iterate updateCfg cfg
-- Update the generator so that successive simulations do not yield
-- the same result which would rather diminish the purpose of
-- repeated simulations.
updateCfg cfg = cfg { stdGen = fst $ split $ stdGen cfg }
-- | Helper function for division to reduce \'fromIntegral noise\'.
(/.) :: (Real a, Real b, Fractional c) => a -> b -> c
(/.) x y = fromRational $ if y == 0
then 0
else toRational x / toRational y
| {
"content_hash": "84c67ecca410442f14cf6a279a1097dc",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 91,
"avg_line_length": 37.8359375,
"alnum_prop": 0.577534586000413,
"repo_name": "eugenkiss/mastermind",
"id": "d6828e884905cd68e21a891459239f467eea2cf8",
"size": "4894",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "haskell/Main.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Haskell",
"bytes": "38801"
},
{
"name": "Java",
"bytes": "34566"
},
{
"name": "Lua",
"bytes": "31417"
}
],
"symlink_target": ""
} |
// Copyright 2014 The Rector & Visitors of the University of Virginia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Xamarin.Forms;
using Xamarin.Forms.Platform.iOS;
using UIKit;
using Sensus.iOS.UI;
using Sensus.UI.Inputs;
// register the slider input effect
[assembly: ExportEffect(typeof(HideSliderEffect), SliderWithOptionsInput.EFFECT_RESOLUTION_EFFECT_NAME)]
namespace Sensus.iOS.UI
{
public class HideSliderEffect : PlatformEffect
{
private UIImage _visibleThumbImage;
private UISlider _nativeSlider;
protected override void OnAttached()
{
_nativeSlider = Control as UISlider;
Slider formsSlider = Element as Slider;
if (_visibleThumbImage == null)
{
_visibleThumbImage = _nativeSlider.ThumbImage(UIControlState.Normal);
}
_nativeSlider.SetValue(_nativeSlider.MinValue, false);
_nativeSlider.SetThumbImage(new UIImage(), UIControlState.Normal);
_nativeSlider.AddGestureRecognizer(new UILongPressGestureRecognizer(pressRecognizer =>
{
float percent = (float)pressRecognizer.LocationInView(pressRecognizer.View).X / (float)(pressRecognizer.View.Frame.Width - 25);
float value = _nativeSlider.MinValue + (percent * (_nativeSlider.MaxValue - _nativeSlider.MinValue));
_nativeSlider.SetThumbImage(_visibleThumbImage, UIControlState.Normal);
formsSlider.Value = value;
})
{ MinimumPressDuration = 0 });
}
protected override void OnDetached()
{
_nativeSlider.SetThumbImage(_visibleThumbImage, UIControlState.Normal);
}
}
} | {
"content_hash": "d8992b2db4d6402e911505412c47067c",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 131,
"avg_line_length": 32.67741935483871,
"alnum_prop": 0.7532082922013821,
"repo_name": "predictive-technology-laboratory/sensus",
"id": "4765c259e80d7e9f3c5528efb0d418bac46edf28",
"size": "2028",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Sensus.iOS.Shared/UI/HideSliderEffect.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "2340482"
},
{
"name": "HTML",
"bytes": "109896"
},
{
"name": "JavaScript",
"bytes": "1068"
},
{
"name": "Python",
"bytes": "13651"
},
{
"name": "R",
"bytes": "30597"
},
{
"name": "Shell",
"bytes": "42994"
}
],
"symlink_target": ""
} |
"""
Sub client class for Cinder API v1
"""
from . import snapshot
from . import volume
from . import volume_type
class Client(object):
def __init__(self, client, *args, **kwargs):
self.volume = volume.Manager(client)
self.volume_snapshot = snapshot.Manager(client)
self.volume_type = volume_type.Manager(client)
client.volume = self.volume
client.volume_snapshot = self.volume_snapshot
client.volume_type = self.volume_type
| {
"content_hash": "d4d38ae68d69422cba807ee67207bb4d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 55,
"avg_line_length": 25.31578947368421,
"alnum_prop": 0.6673596673596673,
"repo_name": "yosshy/osclient2",
"id": "02b706b4d6ea41bfca38ad37753c261c14ab6058",
"size": "1148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "osclient2/cinder/v1/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "397008"
},
{
"name": "Shell",
"bytes": "398"
}
],
"symlink_target": ""
} |
<?php
// vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4:
/**
* Html rule end renderer for Xhtml
*
* PHP versions 4 and 5
*
* @category Text
* @package Text_Wiki
* @author Paul M. Jones <pmjones@php.net>
* @license http://www.gnu.org/copyleft/lesser.html LGPL License 2.1
* @version CVS: $Id: Html.php 191862 2005-07-30 08:03:29Z toggg $
* @link http://pear.php.net/package/Text_Wiki
*/
/**
* This class renders preformated html in XHTML.
*
* @category Text
* @package Text_Wiki
* @author Paul M. Jones <pmjones@php.net>
* @license http://www.gnu.org/copyleft/lesser.html LGPL License 2.1
* @version Release: @package_version@
* @link http://pear.php.net/package/Text_Wiki
*/
class Text_Wiki_Render_Xhtml_Html extends Text_Wiki_Render {
/**
*
* Renders a token into text matching the requested format.
*
* @access public
*
* @param array $options The "options" portion of the token (second
* element).
*
* @return string The text rendered from the token options.
*
*/
function token($options)
{
return $options['text'];
}
}
?>
| {
"content_hash": "5c588496534915a3c7837ca2bd97ba35",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 72,
"avg_line_length": 24.914893617021278,
"alnum_prop": 0.6242527754056362,
"repo_name": "liuyangning/WX_web",
"id": "7ca4218df64f6f9700e0050f4ed33449d85d9aa5",
"size": "1171",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "xampp/php/pear/Text/Wiki/Render/Xhtml/Html.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "40196"
},
{
"name": "Assembly",
"bytes": "1489"
},
{
"name": "Awk",
"bytes": "367"
},
{
"name": "Batchfile",
"bytes": "96192"
},
{
"name": "C",
"bytes": "4114153"
},
{
"name": "C++",
"bytes": "2300829"
},
{
"name": "CSS",
"bytes": "1107659"
},
{
"name": "Forth",
"bytes": "212968"
},
{
"name": "Frege",
"bytes": "2309873"
},
{
"name": "HTML",
"bytes": "6619889"
},
{
"name": "Java",
"bytes": "409346"
},
{
"name": "JavaScript",
"bytes": "5956769"
},
{
"name": "Makefile",
"bytes": "10254"
},
{
"name": "NSIS",
"bytes": "78480"
},
{
"name": "Objective-C",
"bytes": "25832"
},
{
"name": "PHP",
"bytes": "17998453"
},
{
"name": "PLSQL",
"bytes": "598162"
},
{
"name": "Pascal",
"bytes": "728079"
},
{
"name": "Perl",
"bytes": "13786002"
},
{
"name": "Perl 6",
"bytes": "4153046"
},
{
"name": "PostScript",
"bytes": "3228"
},
{
"name": "Prolog",
"bytes": "1099979"
},
{
"name": "Pure Data",
"bytes": "456"
},
{
"name": "Python",
"bytes": "18268"
},
{
"name": "R",
"bytes": "27878"
},
{
"name": "Roff",
"bytes": "40670"
},
{
"name": "SQLPL",
"bytes": "3276"
},
{
"name": "Shell",
"bytes": "53914"
},
{
"name": "Smarty",
"bytes": "6675"
},
{
"name": "TeX",
"bytes": "2582"
},
{
"name": "Visual Basic",
"bytes": "439"
},
{
"name": "XSLT",
"bytes": "110585"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "0bd19f85339d6262435284011731965e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "07c9b916afd4a199785bb2249a89891c1133f025",
"size": "187",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Rhodophyta/Florideophyceae/Ceramiales/Rhodomelaceae/Polysiphonia/Polysiphonia caspica/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace PICalculator.Model.Input
{
/// <summary>
/// The poverty status of an individual in a given year
/// </summary>
public class PovertyStatus
{
public PovertyStatus(int year, bool isPoor, double povertyGap) {
Year = year;
IsPoor = isPoor;
PovertyGap = povertyGap;
}
public int Year { get; private set; }
public bool IsPoor { get; private set; }
public double PovertyGap { get; private set; }
}
}
| {
"content_hash": "8b04539feaf4f42bbe91b51648940a00",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 72,
"avg_line_length": 28.055555555555557,
"alnum_prop": 0.5801980198019802,
"repo_name": "renatius/PIndexCalculator",
"id": "b9583e184668c62f0340b825c17a5f278f129d9d",
"size": "507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PIndexCalculator.Model/Input/PovertyStatus.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "62223"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>colour.algebra.coordinates — Colour 0.3.1 documentation</title>
<link rel="stylesheet" href="../../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/bootswatch-3.1.0/colour/bootstrap.min.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/bootstrap-sphinx.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/styles.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../../../',
VERSION: '0.3.1',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true
};
</script>
<script type="text/javascript" src="../../../_static/jquery.js"></script>
<script type="text/javascript" src="../../../_static/underscore.js"></script>
<script type="text/javascript" src="../../../_static/doctools.js"></script>
<script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/javascript" src="../../../_static/js/jquery-1.11.0.min.js"></script>
<script type="text/javascript" src="../../../_static/js/jquery-fix.js"></script>
<script type="text/javascript" src="../../../_static/bootstrap-3.1.0/js/bootstrap.min.js"></script>
<script type="text/javascript" src="../../../_static/bootstrap-sphinx.js"></script>
<link rel="top" title="Colour 0.3.1 documentation" href="../../../index.html" />
<link rel="up" title="colour.algebra" href="../algebra.html" />
<meta charset='utf-8'>
<meta http-equiv='X-UA-Compatible' content='IE=edge,chrome=1'>
<meta name='viewport' content='width=device-width, initial-scale=1.0, maximum-scale=1'>
<meta name="apple-mobile-web-app-capable" content="yes">
</head>
<body>
<div id="navbar" class="navbar navbar-default navbar-fixed-top">
<div class="container">
<div class="navbar-header">
<!-- .btn-navbar is used as the toggle for collapsed navbar content -->
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="../../../index.html"><img src="../../../_static/Colour_Logo_Icon_001.png">
Colour 0.3</a>
<!--<span class="navbar-text navbar-version pull-left"><b>0.3</b></span>-->
</div>
<div class="collapse navbar-collapse nav-collapse">
<ul class="nav navbar-nav">
<li class="divider-vertical"></li>
<li><a href="http://colour-science.org">colour-science.org</a></li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><i class="fa fa-life-ring"> Documentation</i><b class="caret"></b></a>
<ul class="dropdown-menu">
<li>
<a href="api.html" class="fa fa-life-ring"> API Reference</a>
</li>
<li>
<a href="http://nbviewer.ipython.org/github/colour-science/colour-ipython/blob/master/notebooks/colour.ipynb', True)" class="fa fa-book"> IPython Notebooks</a>
</li>
<li>
<a href="http://colour-science.org/features.php" class="fa fa-lightbulb-o"> Features</a>
</li>
<li>
<a href="http://colour-science.org/contributing.php"><span class="fa fa-gears"> Contributing</span></a>
</li>
</ul>
</li>
</ul>
<form class="navbar-form navbar-right" action="../../../search.html" method="get">
<div class="form-group">
<input type="text" name="q" class="form-control" placeholder="Search" />
</div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="col-md-12">
<h1>Source code for colour.algebra.coordinates</h1><div class="highlight"><pre>
<span class="c">#!/usr/bin/env python</span>
<span class="c"># -*- coding: utf-8 -*-</span>
<span class="kn">from</span> <span class="nn">__future__</span> <span class="kn">import</span> <span class="n">absolute_import</span>
<span class="kn">from</span> <span class="nn">.transformations</span> <span class="kn">import</span> <span class="n">cartesian_to_spherical</span><span class="p">,</span> <span class="n">spherical_to_cartesian</span>
<span class="kn">from</span> <span class="nn">.transformations</span> <span class="kn">import</span> <span class="n">cartesian_to_cylindrical</span><span class="p">,</span> <span class="n">cylindrical_to_cartesian</span>
<span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span><span class="s">'cartesian_to_spherical'</span><span class="p">,</span>
<span class="s">'spherical_to_cartesian'</span><span class="p">,</span>
<span class="s">'cartesian_to_cylindrical'</span><span class="p">,</span>
<span class="s">'cylindrical_to_cartesian'</span><span class="p">]</span>
</pre></div>
</div>
</div>
</div>
<footer class="footer">
<div class="container">
<p class="pull-right">
<a href="#">Back to top</a>
</p>
<p>
© Copyright 2013 - 2014, Colour Developers.<br/>
Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.2.2.<br/>
</p>
</div>
</footer>
</body>
</html> | {
"content_hash": "b11f33f288b23af412233549211378ad",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 220,
"avg_line_length": 44.364285714285714,
"alnum_prop": 0.562228304620834,
"repo_name": "colour-science/colour-website",
"id": "9dc44fb5505fa16670426b832676f6b9e05d7b6d",
"size": "6211",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "api/0.3.1/html/_modules/colour/algebra/coordinates.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "44"
},
{
"name": "CSS",
"bytes": "585578"
},
{
"name": "HTML",
"bytes": "165359615"
},
{
"name": "JavaScript",
"bytes": "358954"
},
{
"name": "Makefile",
"bytes": "1224"
},
{
"name": "PHP",
"bytes": "213935"
},
{
"name": "Python",
"bytes": "8858"
},
{
"name": "Ruby",
"bytes": "470"
}
],
"symlink_target": ""
} |
// GUIController.java
// Copyright (c) 2010 William Whitney
// All rights reserved.
// This software is released under the BSD license.
// Please see the accompanying LICENSE.txt for details.
package net.sourceforge.javaocr.gui;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import javax.swing.UIManager;
import net.sourceforge.javaocr.gui.characterTracer.TracerFrame;
import net.sourceforge.javaocr.gui.handwritingRecognizer.ConfigPanel;
import net.sourceforge.javaocr.gui.handwritingRecognizer.HandWritingFormProcessor;
import net.sourceforge.javaocr.gui.handwritingRecognizer.ProcessPanel;
import net.sourceforge.javaocr.gui.handwritingRecognizer.TrainingPanel;
import net.sourceforge.javaocr.gui.meanSquareOCR.TrainingImageSpec;
import net.sourceforge.javaocr.ocrPlugins.mseOCR.OCRScanner;
import net.sourceforge.javaocr.ocrPlugins.mseOCR.TrainingImageLoader;
import net.sourceforge.javaocr.ocrPlugins.charExtractor.CharacterExtractor;
import net.sourceforge.javaocr.ocrPlugins.charTracer.CharacterTracer;
import net.sourceforge.javaocr.ocrPlugins.lineExtractor.LineExtractor;
import net.sourceforge.javaocr.ocrPlugins.handWriting.HandwritingOCR;
import net.sourceforge.javaocr.ocrPlugins.mseOCR.CharacterRange;
import net.sourceforge.javaocr.scanner.FoundWord;
import net.sourceforge.javaocr.scanner.TrainingImage;
/**
* Allows all GUI elements to be controlled.
* @author William Whitney
*/
public class GUIController
{
private MainFrame mainFrame;
private TrainingPanel handWriteTrainingPanel;
private ConfigPanel handWriteConfigPanel;
private ProcessPanel handWriteProcess;
public GUIController()
{
setLookandFeel();
handWriteTrainingPanel = new TrainingPanel(this);
handWriteConfigPanel = new ConfigPanel(this);
handWriteProcess = new ProcessPanel(this);
mainFrame = new MainFrame(this);
}
public String performMSEOCR(ArrayList<TrainingImageSpec> imgs, String targImageLoc) throws Exception
{
OCRScanner ocrScanner = new OCRScanner();
HashMap<Character, ArrayList<TrainingImage>> trainingImages = getTrainingImageMap(imgs);
ocrScanner.addTrainingImages(trainingImages);
BufferedImage targetImage = ImageIO.read(new File(targImageLoc));
List<FoundWord> words = ocrScanner.scan(targetImage, 0, 0, 0, 0, null);
StringBuilder text = new StringBuilder();
for (FoundWord word : words) {
text.append(word.getRecognizedString());
text.append(' ');
}
return text.toString();
}
public void traceChars(File imageFile)
{
CharacterTracer tracer = new CharacterTracer();
BufferedImage img = tracer.getTracedImage(imageFile);
int width = img.getWidth();
if (width > 1000)
{
//Make image always 1000px wide
double scaleAmount = 1000.0 / width;
AffineTransform tx = new AffineTransform();
tx.scale(scaleAmount, scaleAmount);
AffineTransformOp op = new AffineTransformOp(tx, AffineTransformOp.TYPE_BILINEAR);
img = op.filter(img, null);
}
TracerFrame tFrame = new TracerFrame(img);
tFrame.showFrame();
}
public void extractLines(File imageFile, File outDir)
{
LineExtractor slicer = new LineExtractor();
slicer.slice(imageFile, outDir);
}
public void extractChars(File inputImage, File outputDir, int std_width, int std_height)
{
CharacterExtractor slicer = new CharacterExtractor();
slicer.slice(inputImage, outputDir, std_width, std_height);
}
private HashMap<Character, ArrayList<TrainingImage>> getTrainingImageMap(ArrayList<TrainingImageSpec> imgs) throws Exception
{
TrainingImageLoader loader = new TrainingImageLoader();
HashMap<Character, ArrayList<TrainingImage>> trainingImageMap = new HashMap<Character, ArrayList<TrainingImage>>();
for (int i = 0; i < imgs.size(); i++)
{
loader.load(
imgs.get(i).getFileLocation(),
imgs.get(i).getCharRange(),
trainingImageMap);
}
return trainingImageMap;
}
public String processHandwriting(File sourceImage, File targetImage, HandWritingFormProcessor form) throws Exception
{
//Make temp directory
File tempDir = new File("./temp");
if (!tempDir.mkdir())
{
removeAllFiles(tempDir.listFiles());
}
//Extract all the lines to it
LineExtractor lineExtractor = new LineExtractor();
lineExtractor.slice(sourceImage, tempDir);
ArrayList<TrainingImageSpec> imgs = new ArrayList<TrainingImageSpec>();
File[] files = sortFiles(tempDir.listFiles());
int linesUsed = 0;
if (form.isLearnZeroToNine())
{
TrainingImageSpec trainImage = new TrainingImageSpec();
trainImage.setFileLocation(files[linesUsed].getAbsolutePath());
trainImage.setCharRange(new CharacterRange((int) '0', (int) '9'));
imgs.add(trainImage);
linesUsed++;
}
if (form.isLearnLowerAtoZ())
{
TrainingImageSpec trainImage = new TrainingImageSpec();
trainImage.setFileLocation(files[linesUsed].getAbsolutePath());
trainImage.setCharRange(new CharacterRange((int) 'a', (int) 'z'));
imgs.add(trainImage);
linesUsed++;
}
if (form.isLearnUpperAtoZ())
{
TrainingImageSpec trainImage = new TrainingImageSpec();
trainImage.setFileLocation(files[linesUsed].getAbsolutePath());
trainImage.setCharRange(new CharacterRange((int) 'A', (int) 'Z'));
imgs.add(trainImage);
linesUsed++;
}
HashMap<Character, ArrayList<TrainingImage>> trainingImages = getTrainingImageMap(imgs);
HandwritingOCR handwritingOCR = new HandwritingOCR(trainingImages);
handwritingOCR.setEnableMSEOCR(form.isMSEOCR());
handwritingOCR.setEnableAspectOCR(form.isAspectOCR());
BufferedImage targetBfImage = ImageIO.read(targetImage);
String text = handwritingOCR.scan(targetBfImage);
removeAllFiles(files);
tempDir.delete();
return text;
}
public void showGUI()
{
mainFrame.setVisible(true);
}
public TrainingPanel getHandWriteTrainingPanel()
{
return handWriteTrainingPanel;
}
public ConfigPanel getHandWriteConfigPanel()
{
return handWriteConfigPanel;
}
public ProcessPanel getHandWriteProcess()
{
return handWriteProcess;
}
public void repaint()
{
mainFrame.repaint();
}
private void setLookandFeel()
{
try
{
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
catch (Exception ex)
{
Logger.getLogger(GUIController.class.getName()).log(Level.SEVERE, null, ex);
}
}
private File[] sortFiles(File[] files)
{
ArrayList<File> fileList = new ArrayList<File>();
fileList.addAll(Arrays.asList(files));
Collections.sort(fileList, new Comparator<File>()
{
public int compare(File f1, File f2)
{
String fileName1 = f1.getName();
String fileName2 = f2.getName();
return fileName1.compareTo(fileName2);
}
});
for (int i = 0; i < files.length; i++)
{
files[i] = fileList.get(i);
}
return files;
}
private void removeAllFiles(File[] files)
{
for (int i = 0; i < files.length; i++)
{
files[i].delete();
}
}
private static final Logger LOG = Logger.getLogger(GUIController.class.getName());
}
| {
"content_hash": "94557ab36fa643f3533fc754bd7f6751",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 128,
"avg_line_length": 32.51394422310757,
"alnum_prop": 0.6594780051464282,
"repo_name": "ceejii/cwjonsson-receipt-ocr",
"id": "55fd0314fccbe3569637872b4756097390481310",
"size": "8161",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/src/main/java/net/sourceforge/javaocr/gui/GUIController.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "247346"
}
],
"symlink_target": ""
} |
/*
var Account: any = function(ledgers) {
if (!ledgers) ledgers = [];
Ledgers <- ledgers;
}
*/
/*
var Account = DCI.Context.extend(function() {
this.bindRoles = function(ledgers) {
if (!ledgers) ledgers = [];
Ledgers <- ledgers;
};
this.increaseBalance = function(amount) {
Ledgers.addEntry('depositing', amount);
};
this.decreaseBalance = function(amount) {
Ledgers.addEntry('withdrawing', 0 - amount);
};
this.getBalance = function() {
return Ledgers.getBalance();
}
//TypeScript doesn't support this syntax yet.
//If it did, in ES5 environments, a native-like getter could be created:
//get balance() {
// return Ledgers.getBalance();
//}
role Ledgers {
addEntry(message, amount) {
Ledgers.push(new LedgerEntry(message, amount));
}
getBalance() {
var sum = 0;
Ledgers.forEach(function(ledgerEntry) {
sum += ledgerEntry.amount;
});
return sum;
}
}
});
function LedgerEntry(message, amount) {
this.message = message;
this.amount = amount;
}
//export the LedgerEntry constructor
Account.LedgerEntry = LedgerEntry;
*/
/*
function Account(initialBalance) {
this._balance = initialBalance || 0;
}
Account.prototype = {
constructor: Account,
increaseBalance: function(amount) {
this._balance += amount;
},
decreaseBalance: function(amount) {
this._balance -= amount;
},
getBalance: function() {
return this._balance;
}
//In ES5 environments, a more natural getter could be created:
//(TypeScript doesn't support this syntax yet)
// get balance() {
// return this._balance;
// }
}
*/ | {
"content_hash": "cd7fb9136c45489bcc4c78435ecc6370",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 73,
"avg_line_length": 18.987951807228917,
"alnum_prop": 0.6675126903553299,
"repo_name": "mbrowne/typescript-dci",
"id": "f28f3d52c88187149cf92cb28dadcc111e58230a",
"size": "1576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/dci/js/TransferMoney/Account--tmp.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Elixir",
"bytes": "3294"
},
{
"name": "HTML",
"bytes": "7930"
},
{
"name": "JavaScript",
"bytes": "23053088"
},
{
"name": "Shell",
"bytes": "386"
},
{
"name": "TypeScript",
"bytes": "16490923"
}
],
"symlink_target": ""
} |
var win = Ti.UI.currentWindow;
var santa = Titanium.UI.createImageView({
image:'../images/santahat.png',
height:350,
width:250,
top:20
});
var box = Ti.UI.createView({
borderRadius:2,
borderColor:'red',
height:190,
width:170
})
var button = Titanium.UI.createButton({
bottom:5,
right:10,
height:40,
left:10,
font:{fontSize:20,fontWeight:'bold',fontFamily:'Helvetica Neue'},
title:'Take Picture'
});
var overlay = Titanium.UI.createView();
overlay.add(santa);
overlay.add(button);
button.addEventListener('click',function()
{
//Ti.Media.vibrate();
Ti.Media.takePicture();
});
Titanium.Media.showCamera({
success:function(event)
{
alert('Great Picture, now just move the santa hat in place and save the photo.');
var santa2 = Titanium.UI.createImageView({
image:'../images/santahat.png',
height:350,
width:250,
center:{x:Ti.Platform.displayCaps.platformWidth/2, y:195}
});
var view = Ti.UI.createView({top:0, left:0, right:0, bottom:0});
// place our picture into our window
var imageView = Ti.UI.createImageView({
image:event.media,
width:win.width,
height:win.height
});
view.add(imageView);
//var santaZoom = Ti.UI.createScrollView({minZoomScale:1.0, maxZoomScale:3.0, height:350, width:250});
//santaZoom.add(santa);
//view.add(santaZoom);
santa2.addEventListener('touchmove', function(e){
santa2.center = e.globalPoint;
});
view.add(santa2);
win.add(view);
// programatically hide the camera
//Ti.Media.hideCamera();
var save = Ti.UI.createButton({
title:'Save',
bottom:5,
right:5,
width:60,
height:40
});
if(Ti.Platform.osname != 'android'){
win.rightNavButton = save;
}
save.addEventListener('click', function(){
Titanium.Media.saveToPhotoGallery(view.toImage());
alert('Image saved to Gallery');
win.close();
});
},
cancel:function()
{
},
error:function(error)
{
var a = Titanium.UI.createAlertDialog({title:'Camera'});
if (error.code == Titanium.Media.NO_CAMERA)
{
a.setMessage('Sorry, you need a camera.');
}
else
{
a.setMessage('Unexpected error: ' + error.code);
}
a.show();
},
overlay:overlay,
showControls:false, // don't show system controls
mediaTypes:Ti.Media.MEDIA_TYPE_PHOTO,
autohide:true // tell the system not to auto-hide and we'll do it ourself
});
| {
"content_hash": "d4a17e5fb0e91efe3d809fa554880d9e",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 104,
"avg_line_length": 19.899159663865547,
"alnum_prop": 0.6659628378378378,
"repo_name": "alanleard/CascadeChristmas",
"id": "e7c122b597cf51f18375d7fce269b938df0e8677",
"size": "2368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Resources/games/santaPic.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "143650"
},
{
"name": "C++",
"bytes": "52682"
},
{
"name": "D",
"bytes": "577253"
},
{
"name": "JavaScript",
"bytes": "83370"
},
{
"name": "Matlab",
"bytes": "1981"
},
{
"name": "Objective-C",
"bytes": "2992039"
},
{
"name": "Shell",
"bytes": "141"
}
],
"symlink_target": ""
} |
"""
configure tests - returns a reference to the app
"""
import run
import pytest
import os
from json import dumps
@pytest.fixture
def app(request):
# get app from main
app = run.app
app.config['PRESERVE_CONTEXT_ON_EXCEPTION'] = False
return app
@pytest.fixture
def client(app):
return app.test_client()
GLOBAL_ID_TOKEN = None
# export TEST_OS_PASSWORD="password"
# export TEST_OS_USERNAME="test_user"
# export TEST_OS_USER_DOMAIN_NAME="testing"
@pytest.fixture(autouse=True)
def myglobal(app, request):
global GLOBAL_ID_TOKEN
if GLOBAL_ID_TOKEN:
request.function.func_globals['global_id_token'] = GLOBAL_ID_TOKEN
return
# login and return bearer token
client = app.test_client()
r = client.post('/api/v1/ohsulogin',
data=dumps({"domain":
os.environ.get('TEST_OS_USER_DOMAIN_NAME'),
"user": os.environ.get('TEST_OS_USERNAME'),
"password": os.environ.get('TEST_OS_PASSWORD')
}),
content_type='application/json')
assert r.status_code == 200
assert r.json['id_token']
request.function.func_globals['global_id_token'] = r.json['id_token']
GLOBAL_ID_TOKEN = r.json['id_token']
# import run
# import pytest
# import elastic_client
#
# @pytest.fixture
# def app(request, test_users):
# # get app from main
# app = run.app
#
# # Establish an application context before running the tests.
# ctx = app.app_context()
# ctx.push()
#
# # Add setup here if necessary
# # ...
#
# # remove that context when done
# def teardown():
# # clean up data tests have created
# db = app.data.driver.db
# collections = ['file']
# for collection in collections:
# db[collection].delete_many({})
# elastic_client.drop_index('aggregated_resource')
# ctx.pop()
#
# request.addfinalizer(teardown)
# return app
# @pytest.fixture() # pragma nocoverage
# def db(app, request):
# return app.data.driver.db
# @pytest.fixture(scope="session")
# def test_users(request):
# app = run.app
#
# # Establish an application context before running the tests.
# ctx = app.app_context()
# ctx.push()
#
# db = app.data.driver.db
# db.auth_roles.insert_one({'mail': 'tesla@ldap.forumsys.com',
# 'roles': ['admin']})
#
# # remove that context when done
# def teardown():
# # clean up data tests have created
# # clean up data tests have created
# db.auth_roles.delete_many({})
# ctx.pop()
#
# request.addfinalizer(teardown)
| {
"content_hash": "542bc84d472368c942d7be0164e6e42f",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 78,
"avg_line_length": 26.307692307692307,
"alnum_prop": 0.5910087719298246,
"repo_name": "ohsu-computational-biology/euler",
"id": "b7a44633bbcf4d4781b7dc243c2d082f73e73f8c",
"size": "2758",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "services/api/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "711"
},
{
"name": "HTML",
"bytes": "1111"
},
{
"name": "Python",
"bytes": "66130"
},
{
"name": "Shell",
"bytes": "6206"
}
],
"symlink_target": ""
} |
/**
*
* @file quark.h
*
* Dynamic scheduler functions
*
* PLASMA is a software package provided by Univ. of Tennessee,
* Univ. of California Berkeley and Univ. of Colorado Denver
*
* @version 2.4.2
* @author Asim YarKhan
* @date 2010-11-15
*
**/
#ifndef QUARK_H
#define QUARK_H
#include <limits.h>
#include <stdio.h>
#if defined( _WIN32 )
/* This must be included before INPUT is defined below, otherwise we
have a name clash/problem */
#include <windows.h>
#include <limits.h>
#else
#include <inttypes.h>
#endif
#include "quark_unpack_args.h"
#if defined(c_plusplus) || defined(__cplusplus)
extern "C" {
#endif
#ifdef DBGQUARK
/* #define DBGPRINTF(str, ...) { fprintf(stderr, "%s:%d: [%s] " str, __FILE__, __LINE__, __FUNCTION__, __VA_ARGS__); } */
#define DBGPRINTF(...) { fprintf(stderr, __VA_ARGS__); }
#else
#define DBGPRINTF(...) if (0) {};
#endif
#define QUARK_SUCCESS 0
#define QUARK_ERR -1
#define QUARK_ERR_UNEXPECTED -1
#define QUARK_ERR_NOT_SUPPORTED -2
/* A bitmask of 8 bits to to hold region markers */
#define QUARK_REGION_BITMASK 0x0000FF
#define QUARK_REGION_ALL 0x0FF
typedef enum { QUARK_REGION_0=1<<0, QUARK_REGION_1=1<<1, QUARK_REGION_2=1<<2, QUARK_REGION_3=1<<3,
QUARK_REGION_4=1<<4, QUARK_REGION_5=1<<5, QUARK_REGION_6=1<<6, QUARK_REGION_7=1<<7 } quark_data_region_t;
typedef enum { QUARK_REGION_L=QUARK_REGION_0|QUARK_REGION_1|QUARK_REGION_2,
QUARK_REGION_D=QUARK_REGION_3|QUARK_REGION_4,
QUARK_REGION_U=QUARK_REGION_5|QUARK_REGION_6|QUARK_REGION_7 } quark_ldu_region_t;
/* Data items can be: */
/* INPUT, OUTPUT, INOUT: these data items create dependencies */
/* VALUE: these data items get copied over */
/* NODEP: these data items get copied over, and are not used for dependencies */
/* SCRATCH: these data items can be allocated (and deallocted) by the scheduler when tasks execute */
#define QUARK_DIRECTION_BITMASK 0x000F00
typedef enum { QINPUT=0x100, OUTPUT=0x200, INOUT=0x300, VALUE=0x400, NODEP=0x500, SCRATCH=0x600} quark_direction_t;
#define INPUT 0x100
#define QUARK_VALUE_FLAGS_BITMASK 0xFFF000
/* Data locality flag; ie keep data on the same core if possible */
#define LOCALITY ( 1 << 12 )
#define NOLOCALITY 0x00
/* A data address with a sequence of ACCUMULATOR dependencies will allow the related tasks to be reordered */
#define ACCUMULATOR ( 1 << 13 )
#define NOACCUMULATOR 0x00
/* A data address with a sequence of GATHERV dependencies will allow the related tasks to be run in parallel */
#define GATHERV ( 1 << 14 )
#define NOGATHERV 0x00
/* The following are task level flags, that can be either provided as additional arguments to the task, or via SET functions */
/* The task label; should be provided as a null terminated string */
#define TASK_LABEL ( 1 << 15 )
#define TASKLABEL TASK_LABEL /* depreciated label */
/* The task color; should be provided as a null terminated string */
#define TASK_COLOR ( 1 << 16 )
#define TASKCOLOR TASK_COLOR /* depreciated label */
/* The priority of the task, provided as an integer */
#define TASK_PRIORITY ( 1 << 17 )
/* Lock the task to a specific thread number (0 ... NTHREADS-1), provided as an integer */
#define TASK_LOCK_TO_THREAD ( 1 << 18 )
/* The sequence pointer to be associated with the task, provided as a pointer */
#define TASK_SEQUENCE ( 1 << 19 )
/* An integere for the number of threads require */
#define TASK_THREAD_COUNT ( 1 << 20 )
/* The thread that runs this task should have manual scheduling enabled (1) or disabled (0) */
#define THREAD_SET_TO_MANUAL_SCHEDULING ( 1 << 21 )
/* Lock the task to a thead mask (0 ... NTHREADS-1) bits long, provided as a character array (byte array) */
#define TASK_LOCK_TO_THREAD_MASK ( 1 << 22 )
/* The range for priority values */
#define QUARK_TASK_MIN_PRIORITY 0
#define QUARK_TASK_MAX_PRIORITY INT_MAX
/* Definition of structure holding scheduler information */
typedef struct quark_s Quark;
/* Structure holding task information */
typedef struct quark_task_s Quark_Task;
/* Create a type for setting task flags */
struct quark_task_flags_s {
int task_priority;
int task_lock_to_thread;
char *task_color;
char *task_label;
void *task_sequence;
int task_thread_count;
int thread_set_to_manual_scheduling;
unsigned char *task_lock_to_thread_mask;
};
typedef struct quark_task_flags_s Quark_Task_Flags;
/* Static initializer for Quark_Task_Flags_t */
#define Quark_Task_Flags_Initializer { (int)0, (int)-1, (char *)NULL, (char *)NULL, (void *)NULL, (int)1, (int)-1, (unsigned char *)NULL }
/* Setup scheduler data structures, assumes threads are managed seperately */
Quark *QUARK_Setup(int num_threads);
/* Setup scheduler data structures, spawn worker threads, start the workers working */
Quark *QUARK_New(int num_threads);
/* Add a task, called by the master process (thread_rank 0) */
unsigned long long QUARK_Insert_Task(Quark * quark, void (*function) (Quark *), Quark_Task_Flags *task_flags, ...);
/* Main work loop, called externally by everyone but the master
* (master manages this internally to the insert_task and waitall
* routines). Each worker thread can call work_main_loop( quark,
* thread_rank), where thread rank is 1...NUMTHREADS ) */
void QUARK_Worker_Loop(Quark *quark, int thread_rank);
/* Finish work and return. Workers do not exit */
void QUARK_Barrier(Quark * quark);
/* Just wait for current tasks to complete, the scheduler and
* strutures remain as is... should allow for repeated use of the
* scheduler. The workers return from their loops.*/
void QUARK_Waitall(Quark * quark);
/* Delete scheduler, shutdown threads, finish everything, free structures */
void QUARK_Delete(Quark * quark);
/* Free scheduling data structures */
void QUARK_Free(Quark * quark);
/* Cancel a specific task */
int QUARK_Cancel_Task(Quark *quark, unsigned long long taskid);
/* Returns a pointer to the list of arguments, used when unpacking the
arguments; Returna a pointer to icl_list_t, so icl_list.h will need
bo included if you use this function */
void *QUARK_Args_List(Quark *quark);
/* Returns the rank of a thread in a parallel task */
int QUARK_Get_RankInTask(Quark *quark);
/* Return a pointer to an argument. The variable last_arg should be
NULL on the first call, then each subsequent call will used
last_arg to get the the next argument. */
void *QUARK_Args_Pop( void *args_list, void **last_arg);
/* Utility function returning rank of the current thread */
int QUARK_Thread_Rank(Quark *quark);
/* Packed task interface */
/* Create a task data structure to hold arguments */
Quark_Task *QUARK_Task_Init(Quark * quark, void (*function) (Quark *), Quark_Task_Flags *task_flags );
/* Add (or pack) the arguments into a task data structure (make sure of the correct order) */
void QUARK_Task_Pack_Arg( Quark *quark, Quark_Task *task, int arg_size, void *arg_ptr, int arg_flags );
/* Insert the packed task data strucure into the scheduler for execution */
unsigned long long QUARK_Insert_Task_Packed(Quark * quark, Quark_Task *task );
/* Unsupported function for debugging purposes; execute task AT ONCE */
unsigned long long QUARK_Execute_Task(Quark * quark, void (*function) (Quark *), Quark_Task_Flags *task_flags, ...);
/* Get the label (if any) associated with the current task; used for printing and debugging */
char *QUARK_Get_Task_Label(Quark *quark);
/* Method for setting task flags */
Quark_Task_Flags *QUARK_Task_Flag_Set( Quark_Task_Flags *flags, int flag, intptr_t val );
/* Type for task sequences */
typedef struct Quark_sequence_s Quark_Sequence;
/* Create a seqeuence structure, to hold sequences of tasks */
Quark_Sequence *QUARK_Sequence_Create( Quark *quark );
/* Called by worker, cancel any pending tasks, and mark sequence so that it does not accept any more tasks */
int QUARK_Sequence_Cancel( Quark *quark, Quark_Sequence *sequence );
/* Destroy a sequence structure, cancelling any pending tasks */
Quark_Sequence *QUARK_Sequence_Destroy( Quark *quark, Quark_Sequence *sequence );
/* Wait for a sequence of tasks to complete */
int QUARK_Sequence_Wait( Quark *quark, Quark_Sequence *sequence );
/* Get the sequence information associated the current task/worker, this was provided when the tasks was created */
Quark_Sequence *QUARK_Get_Sequence(Quark *quark);
/* Get the priority associated the current task/worker */
int QUARK_Get_Priority(Quark *quark);
/* Get information associated the current task and worker thread;
* Callable from within a task, since it works on the currently
* executing task */
intptr_t QUARK_Task_Flag_Get( Quark *quark, int flag );
/* Enable and disable DAG generation via API. Only makes sense after
* a sync, such as QUARK_Barrier. */
void QUARK_DOT_DAG_Enable( Quark *quark, int boolean_value );
/* Get the number_th bit in a bitset (unsigned char *); useful for QUARK_LOCK_TO_THREAD_MASK flag */
static inline int QUARK_Bit_Get(unsigned char *set, int number)
{
set += number / 8;
return (*set & (1 << (7-(number % 8)))) != 0; /* 0 or 1 */
}
/* Set the number_th bit in a bitset (unsigned char *) to value (0 or 1); useful for QUARK_LOCK_TO_THREAD_MASK flag */
static inline void QUARK_Bit_Set(unsigned char *set, int number, int value)
{
set += number / 8;
if (value)
*set |= 1 << (7-(number % 8)); /* set bit */
else *set &= ~(1 << (7-(number % 8))); /* clear bit */
}
#if defined(c_plusplus) || defined(__cplusplus)
}
#endif
#endif /* QUARK.H */
| {
"content_hash": "1fe884e35cbb6700b7d1d76889fcb173",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 138,
"avg_line_length": 39.00408163265306,
"alnum_prop": 0.7045835077438258,
"repo_name": "joao-lima/plasma-kaapi",
"id": "6e590d59d3e7177b23833a62334e2f108251ceab",
"size": "9556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quark/quark.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8491080"
},
{
"name": "C++",
"bytes": "100356"
},
{
"name": "FORTRAN",
"bytes": "2823643"
},
{
"name": "JavaScript",
"bytes": "20219"
},
{
"name": "Objective-C",
"bytes": "31848"
},
{
"name": "Python",
"bytes": "7172"
},
{
"name": "Ruby",
"bytes": "6767"
},
{
"name": "Shell",
"bytes": "20350"
}
],
"symlink_target": ""
} |
<linker>
<assembly fullname="GoogleMobileAds.iOS" ignoreIfMissing="1">
<namespace fullname="GoogleMobileAds" preserve="all" />
<namespace fullname="GoogleMobileAds.iOS" preserve="all" />
</assembly>
<assembly fullname="GoogleMobileAds.Android" ignoreIfMissing="1">
<namespace fullname="GoogleMobileAds" preserve="all" />
<namespace fullname="GoogleMobileAds.Android" preserve="all" />
</assembly>
</linker>
| {
"content_hash": "31cbeddd5926052bbeb5de6c48e3c2d3",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 67,
"avg_line_length": 42.9,
"alnum_prop": 0.7319347319347319,
"repo_name": "googlecodelabs/admob-appopen-unity",
"id": "0b1e5fc1c72886827b01553cf8d88e0e2e7ca188",
"size": "429",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "complete/Assets/GoogleMobileAds/link.xml",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "23599"
},
{
"name": "Objective-C",
"bytes": "254"
}
],
"symlink_target": ""
} |
// Originally from UpdatedComponentTests/ResponseTypes/REST/Rest_Post_ListResponse_XML_Empty.xls;
package com.betfair.cougar.tests.updatedcomponenttests.responsetypes.rest;
import com.betfair.testing.utils.cougar.misc.XMLHelpers;
import com.betfair.testing.utils.JSONHelpers;
import com.betfair.testing.utils.cougar.assertions.AssertionUtils;
import com.betfair.testing.utils.cougar.beans.HttpCallBean;
import com.betfair.testing.utils.cougar.beans.HttpResponseBean;
import com.betfair.testing.utils.cougar.manager.CougarManager;
import com.betfair.testing.utils.cougar.manager.RequestLogRequirement;
import org.json.JSONObject;
import org.testng.annotations.Test;
import org.w3c.dom.Document;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.ByteArrayInputStream;
import java.sql.Timestamp;
/**
* Ensure that when a Rest (XML) Post operation is performed against Cougar, passing a Set with a blank entry in the post body, it is correctly de-serialized, processed, returned the correct response.
*/
public class RestPostListResponseXMLEmptyTest {
@Test
public void doTest() throws Exception {
CougarManager cougarManager1 = CougarManager.getInstance();
HttpCallBean getNewHttpCallBean1 = cougarManager1.getNewHttpCallBean();
cougarManager1 = cougarManager1;
getNewHttpCallBean1.setOperationName("TestSimpleListGet", "simpleListGet");
getNewHttpCallBean1.setServiceName("baseline", "cougarBaseline");
getNewHttpCallBean1.setVersion("v2");
getNewHttpCallBean1.setRestPostQueryObjects(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream("<inputList/>".getBytes())));
Timestamp getTimeAsTimeStamp7 = new Timestamp(System.currentTimeMillis());
cougarManager1.makeRestCougarHTTPCall(getNewHttpCallBean1, com.betfair.testing.utils.cougar.enums.CougarMessageProtocolRequestTypeEnum.RESTXML, com.betfair.testing.utils.cougar.enums.CougarMessageContentTypeEnum.XML);
cougarManager1.makeRestCougarHTTPCall(getNewHttpCallBean1, com.betfair.testing.utils.cougar.enums.CougarMessageProtocolRequestTypeEnum.RESTXML, com.betfair.testing.utils.cougar.enums.CougarMessageContentTypeEnum.JSON);
XMLHelpers xMLHelpers3 = new XMLHelpers();
Document createAsDocument10 = xMLHelpers3.getXMLObjectFromString("<TestSimpleListGetResponse></TestSimpleListGetResponse>");
JSONHelpers jSONHelpers4 = new JSONHelpers();
JSONObject createAsJSONObject11 = jSONHelpers4.createAsJSONObject(new JSONObject("{\"response\": []}"));
HttpResponseBean response5 = getNewHttpCallBean1.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTXMLXML);
AssertionUtils.multiAssertEquals(createAsDocument10, response5.getResponseObject());
AssertionUtils.multiAssertEquals((int) 200, response5.getHttpStatusCode());
AssertionUtils.multiAssertEquals("OK", response5.getHttpStatusText());
HttpResponseBean response6 = getNewHttpCallBean1.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTXMLJSON);
AssertionUtils.multiAssertEquals(createAsJSONObject11, response6.getResponseObject());
AssertionUtils.multiAssertEquals((int) 200, response6.getHttpStatusCode());
AssertionUtils.multiAssertEquals("OK", response6.getHttpStatusText());
// generalHelpers.pauseTest(500L);
cougarManager1.verifyRequestLogEntriesAfterDate(getTimeAsTimeStamp7, new RequestLogRequirement("2.8", "testSimpleListGet"),new RequestLogRequirement("2.8", "testSimpleListGet") );
}
}
| {
"content_hash": "38443597d821b07f2803871dfe31576c",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 226,
"avg_line_length": 53.7887323943662,
"alnum_prop": 0.7703587326525269,
"repo_name": "olupas/cougar",
"id": "93aa0f0fccd1ead7bf1e2a39aa500ddcbce2c871",
"size": "4432",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cougar-test/cougar-normal-code-tests/src/test/java/com/betfair/cougar/tests/updatedcomponenttests/responsetypes/rest/RestPostListResponseXMLEmptyTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "83394"
},
{
"name": "HTML",
"bytes": "23595"
},
{
"name": "Java",
"bytes": "9535986"
},
{
"name": "Shell",
"bytes": "19139"
},
{
"name": "XSLT",
"bytes": "57361"
}
],
"symlink_target": ""
} |
---
type: post102
title: JDBC Driver
categories: XAP102
parent: other-data-access-apis.html
weight: 100
---
{{% ssummary%}}{{% /ssummary %}}
The XAP JDBC interface allows database-driven applications to interact with spaces via SQL queries and commands. A query processor transparently translates SQL queries into legal space operations. No integration is required - all you need to do is point the application to the GigaSpaces JDBC driver like any other JDBC driver.
Applications can access the XAP Data Grid using the JDBC API; data written to the IMDG using the JDBC API can also be accessed using other APIs.
{{% note %}}
An alternative way of querying the space using SQL syntax is the [SQLQuery](./query-sql.html) class. This class allows you to perform SQL queries directly against space objects, without adding O/R mapping complexity.
{{% /note %}}
JDBC support in XAP is centered around the Space-Based Architecture - its main motivation is to enable more sophisticated querying of the space, beyond the template matching provided by the [The GigaSpace Interface](./the-gigaspace-interface.html).
GigaSpaces is not a full-fledged relational database and it does not support the full SQL92 standard (see [JDBC Supported Features](#supported-features)). However, the existing SQL support is extremely useful for applications that need to execute queries on a space for real-time queries.
{{% tip %}}
You can use the [SQL Command Line]({{%currentadmurl%}}/space-gigaspaces-cli.html) to query and fetch data from the IMDG. The SQL Command Line using the GigaSpaces JDBC Driver when accessing the IMDG.
{{% /tip %}}
## Using Existing SQL Code and Porting to External Systems
The JDBC interface is mostly used to enable access to the space through standard SQL tools and programming interfaces. You can write SQL commands against the space, and the same code will in many (simple) cases be compatible with other SQL implementations.
Porting existing JDBC code to the space is certainly doable (but would require some level of adaptation depending on the specifics of the case and the complexity of the SQL queries. For legacy applications, it may still be easier than porting existing code to leverage the space technology directly. Since the SQL support is limited, this path should be taken with caution.
# Getting the GigaSpaces JDBC connection
In order to get the XAP JDBC connection you should use the following JDBC Driver classname:
```java
Class.forName("com.j_spaces.jdbc.driver.GDriver");
```
The connection URL should include :`jdbc:gigaspaces:url:<Space URL>` -- e.g.:
```java
Connection con = DriverManager.getConnection("jdbc:gigaspaces:url:jini://*/*/mySpace");
```
{{% tip %}}
You may use the GigaSpaces JDBC driver with remote or embedded space
{{% /tip %}}
{{% refer %}}For more details on the Space URL, refer to the [Space URL](./the-space-configuration.html) section.{{% /refer %}}
Example:
```java
Connection conn;
Class.forName("com.j_spaces.jdbc.driver.GDriver").newInstance();
String url = "jdbc:gigaspaces:url:jini://*/*/mySpace";
conn = DriverManager.getConnection(url);
Statement st = conn.createStatement();
String createTable = "CREATE TABLE COFFEES (COF_NAME VARCHAR(32) INDEX,SUP_ID INTEGER INDEX, " +
"PRICE FLOAT INDEX,SALES INTEGER INDEX,TOTAL INTEGER)";
st.executeUpdate(createTable);
String query = "SELECT COF_NAME, PRICE FROM COFFEES";
st = conn.createStatement();
ResultSet rs = st.executeQuery(query);
while (rs.next()) {
String s = rs.getString("COF_NAME");
float n = rs.getFloat("PRICE");
System.out.println(s + " " + n);
}
```
{{% tip %}}
There is no need to deal with JDBC connection polling when using GigaSpaces JDBC driver.
{{% /tip %}}
### Embedding the Query Processor within the application
By default, the Query Processor is running server side.
It is possible to set the Query Processor to run embedded within the application by passing a parameter to the JDBC driver:
```java
Class.forName("com.j_spaces.jdbc.driver.GDriver").newInstance();
String url = "jdbc:gigaspaces:url:jini://*/*/mySpace";
Properties properties = new Properties();
properties.put("com.gs.embeddedQP.enabled", "true");
conn = DriverManager.getConnection(url, properties);
```
{{% tip %}}
It is also possible to set the "com.gs.embeddedQP.enabled" connection property as a System property (connection property overrides the system property).
{{% /tip %}}
# Transaction Support
GigaSpaces JDBC Driver supports the following transaction managers:
- Local Transaction Manager
- Distributed embedded Jini Transaction Manager (default)
- Lookup Distributed Transaction Manager
The transaction manager type can be configured via JDBC's connection properties (there are additional properties for lookup distributed tx manager):
|Property|Description|
|:-------|:----------|
| gs.tx_manager_type | Transaction manager type: "local"/"distributed"/"lookup_distributed" |
| gs.lookup_tx.name | Lookup service name |
| gs.lookup_tx.timeout | Lookup timeout (default=3000) |
| gs.lookup_tx.groups | Lookup groups |
| gs.lookup_tx.locators | Lookup locators |
Transaction Manager Type Configuration Example:
```java
Class.forName("com.j_spaces.jdbc.driver.GDriver");
Properties props = new Properties();
props.put("gs.tx_manager_type", "distributed");
Connection conn = DriverManager.getConnection("jdbc:gigaspaces:url:jini://*/*/mySpace", props);
```
# Getting JDBC connection from a Space Proxy
You can get a GigaSpaces JDBC connection from a space proxy using the `com.j_spaces.jdbc.driver.GConnection`. See below example:
```java
IJSpace gsSpaceProxy; //your space proxy. You can get it using GigaSpace.getSpace()
GConnection connection = GConnection.getInstance(gsSpaceProxy);
connection.setUseSingleSpace(true); //false = cluster, true=single
```
The `setUseSingleSpace` method allows you to get a JDBC connection that encapsulates a clustered proxy or to an embedded space proxy.
You can also use the following `GConnection` method to set the user and password for a secured space:
```java
public static Connection getInstance(IJSpace space, String username, String password)
throws SQLException
```
# Mixing Space API with the JDBC API
The following example using the Space API [DistributedTask](./task-execution-over-the-space.html) with the JDBC API. With this example we use map/reduce approach to query the space using the JDBC API, but we send the JDBC query to be executed within the space. This approach scales very well once the space have multiple partitions avoiding the need to retrieve the actual space objects from the space to evaluate the query. Retrieving objects from the space involved network latency and serialization overhead.
With the example below we execute the following query:
```java
Select FIELD from CLASS group by FIELD sort by FIELD
```
The query is executed in two phases:
Step 1. A `DistributedTask` is sent to each partition to execute the following JDBC query:
```java
Select FIELD from CLASS group by FIELD
```
The result is then sent into the reducer running at the client side.
Step 2. The `DistributedTask.reduce` method running at the client side aggregating the results from all the partitions and sort the final set.
```java
public class JDBCTask implements DistributedTask<String[], String[]>{
public JDBCTask(String queryStr){
this.queryStr=queryStr;
}
@TaskGigaSpace
transient GigaSpace gigaspace;
transient Statement stmt;
transient PreparedStatement perstmt;
transient Connection con;
String queryStr;
public String[] execute() throws Exception{
Connection con = getConnection();
stmt = con.createStatement();
ArrayList<String> result = new ArrayList<String> ();
ResultSet rs =stmt.executeQuery(queryStr);
while (rs.next()) {
result.add(rs.getString(1));
}
stmt.close();
con.close();
String resultArr [] = new String[result.size()] ;
resultArr = result.toArray(resultArr);
return resultArr;
}
public String[] reduce(List<AsyncResult<String[]>> results) throws Exception {
Iterator<AsyncResult<String[]>> iter = results.iterator();
String[] result_ = null;
HashSet<String> result = new HashSet<String>();
while (iter.hasNext())
{
result_ = iter.next().getResult();
for (int i=0;i<result_.length;i++)
{
result.add(result_[i]) ;
}
}
String[] fullResult = new String[result.size()];
fullResult = result.toArray(fullResult);
Arrays.sort(fullResult);
return fullResult;
}
public Connection getConnection()
{
GConnection connection = null;
try {
Class.forName("com.j_spaces.jdbc.driver.GDriver");
} catch(java.lang.ClassNotFoundException e) {
System.err.print("ClassNotFoundException: ");
System.err.println(e.getMessage());
}
try {
connection = GConnection.getInstance(gigaspace.getSpace());
connection.setUseSingleSpace(true); //false = cluster, true=single
} catch(SQLException ex) {
System.err.println("SQLException: " + ex.getMessage());
}
return connection;
}
}
```
```java
GigaSpace gigapace =new GigaSpaceConfigurer(new SpaceProxyConfigurer("mySpace")).gigaSpace();
AsyncFuture<String[]> result= gigapace.execute(new JDBCTask("select str from " +MyClass.class.getName() + " group by str"));
String[] result_str = result.get();
System.out.println("The Result:" + Arrays.asList(result_str));
```
# SQL to Java Type Mapping
The GigaSpaces JDBC Driver translates in runtime a Space object into a relational table representation.
- All Java class attributes are translated into their corresponding SQL types.
- Class names are translated into table names.
- Field names are translated into column names.
- Indexed columns are translated into indexed fields. Make sure the `btree` index is turned on allowing fast processing of bigger than/less than queries. For more details, refer to the [Indexing](./indexing.html#Extended Indexing) section.
The following information represents the SQL to Java mapping conducted at runtime when a table is created via the JDBC driver.
|SQL Type|Java Type|
|:-------|:--------|
| VARCHAR, VARCHAR2 | java.lang.String |
| CHAR | java.lang.String |
| DATE | java.sql.Date |
| DATE | java.sql.Timestamp |
| TIME | java.sql.Time |
| FLOAT | java.lang.Float |
| REAL | java.lang.Float |
| NUMERIC | java.math.BigDecimal |
| DECIMAL | java.math.BigDecimal |
| DOUBLE | java.lang.Double |
| BOOLEAN | java.lang.Boolean |
| INTEGER | java.lang.Integer |
| TIMESTAMP | java.sql.Timestamp |
| LONG | java.lang.Long |
| BLOB | com.gigaspaces.jdbc.driver.Blob |
| CLOB | com.gigaspaces.jdbc.driver.Clob |
# Supported Features
**XAP JDBC supports the following**:
- All Basic SQL statements: `SELECT, INSERT, DELETE, UPDATE, CREATE TABLE, DROP TABLE`.
- `AND/OR` operators to join two or more conditions in a `WHERE` clause.
- `NOT IN` and `NOT NULL`
- Aggregate functions: `COUNT`, `MAX`, `MIN`, `SUM`, `AVG`.
- All basic logical operations to create conditions: =, <>, <,>, >=, <=, `[NOT]` like, is `[NOT]` `null`, `IN`, `BETWEEN`.
- Nested fields query - You may use as part of the select statement nested fields within collections (maps) or objects within the Space object.
- Multiple tables `Join` - Join supports the selection of multiple tables. It uses the Cartesian product of the tables data to form the result set. The join will perform well when having tables with small/medium size (up to 1,000,000 rows).
- `ORDER BY` for multiple columns.
- `GROUP BY` for multiple columns.
- `DISTINCT` (although not when used with functions or aggregations)
- Column aliases.
{{%accordion%}}
{{%accord title="Click here for example..."%}}
```java
Connection conn;
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT ID AS Identifier, NAME AS FullName FROM PERSON WHERE Identifier = 210");
```
{{%/accord%}}
{{%/accordion%}}
- Table aliases -- tables are allowed to use aliases throughout the query.
- Using a sub-query in the `FROM` clause.
{{%accordion%}}
{{%accord title="Click here for example..."%}}
```java
Connection conn;
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM STUDENT WHERE GRADE >= (SELECT AVG(GRADE) FROM STUDENT)");
```
{{%warning%}}
Joined sub-queries are not supported.
{{%/warning%}}
{{%/accord%}}
{{%/accordion%}}
- `sysdate` - a keyword suggesting current time and date.
- `rownum` - a keyword to use in `WHERE` clauses, setting the number of rows to select.
- Select for update -- allowing the locking of rows in order to update them later.
- Remote and embedded query processes configuration -- allows fast access to the space using embedded mode.
- Optimistic locking.
- Batch Processing.
- Increment a field in an UPDATE statement.
{{%accordion%}}
{{%accord title="Click here for example..."%}}
```java
Connection conn;
Statement stmt = conn.createStatement();
int result = stmt.executeUpdate("UPDATE PERSON SET VERSION = VERSION + 1 WHERE ID = 10000");
```
{{%warning%}}
Field incrementing is only supported for `Integer` fields using a '+' operator.
{{%/warning%}}
{{%/accord%}}
{{%/accordion%}}
- A statement caching mechanism is provided to speed up statement parsing.
- Meta Data API.
- Connection pool.
- All JDBC basic types including Blob and Clob.
# Regular Expression
GigaSpaces XAP support query using regular expression. You may use `like` or `rlike` expressions with your JDBC queries.
{{% note %}}
It is important you index `String` type fields used with regular expression queries. Not indexing such fields may result slow query execution and garbage creation.
{{% /note %}}
When using the SQL `like` operator you may use the following:
`%` - match any string of any length (including zero length)
`_` - match on a single character
Querying the space using the [Java Regular Expression](http://docs.oracle.com/javase/{{%version "java-version"%}}/docs/api/java/util/regex/Pattern.html) provides more options than the SQL `like` operator. The Query syntax is done using the `rlike` operator.
When you search for space objects with String fields that includes a **single quote** your query should use Parameterized Query - with the following we are searching for all `Data` objects that include the value `today's` with their `myTextField`:
```java
PreparedStatement st = con.prepareStatement("select id,text,text2 from MyData WHERE text rlike ?");
st.setString(1, "today\u0027s.*");
ResultSet rs = st.executeQuery();
```
# Indexing
It is highly recommended to use indexes on relevant properties to increase performance when using equality , bigger / less than , BETWEEN, IN , LIKE , NOT LIKE, IS NULL statements. For more information see [Indexing](./indexing.html). The above supported query features can leverage indexes except for the `is NOT null` and `NOT IN`statement.
# Partitioning Support
In order to partition the data and rout operations to the correct partition you should specify a "routing column" for each table. The "routing column" is specified through one of three mechanisms:
1. A POJO with a [@SpaceRouting](./modeling-your-data.html) field can be sent to the space via the `snapshot` call prior to calling the JDBC API.
2. Create the table through JDBC; the first index as part of the CREATE TABLE statement will be the routing field.
3. If there is no indexed column, the first column as part of the CREATE TABLE will be the routing field.
# Nested Field Query
You may use as part of the JDBC select statement nested fields. These could be a Map type fields or user defined data type fields within the Space object. See below example for a space class with a nested Map and a nested object fields. Both are indexed:
```java
public class MyData implements Serializable{
public MyData(){}
String data1;
String data2;
// getter and setter methods
}
```
```java
@SpaceClass
public class MyClass {
public MyClass (){}
Integer num;
String str;
Integer id;
HashMap<String, MyData> map; // a map within the space object
MyData data; // an object within the space object
@SpaceIndex (type=SpaceIndexType.BASIC)
public Integer getNum() {
return num;
}
public void setNum(Integer num) {
this.num = num;
}
@SpaceIndex (type=SpaceIndexType.BASIC)
public String getStr() {
return str;
}
public void setStr(String str) {
this.str = str;
}
@SpaceId (autoGenerate = false)
@SpaceRouting
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@SpaceIndexes({@SpaceIndex(path="key1" , type = SpaceIndexType.BASIC),
@SpaceIndex(path="key2" , type = SpaceIndexType.BASIC)})
public HashMap<String, MyData> getMap() {
return map;
}
public void setMap(HashMap<String, MyData> map) {
this.map = map;
}
@SpaceIndexes({@SpaceIndex(path="data1" , type = SpaceIndexType.BASIC),
@SpaceIndex(path="data2" , type = SpaceIndexType.BASIC)})
public MyData getData() {
return data;
}
public void setData(MyData data) {
this.data = data;
}
}
```
Here is an example for a JDBC query call you may use with the above Space object. Both the nested Map and nested object fields are used with the JDBC query below:
```sql
String queryString =
"select uid,
map.key1.data1, map.key1.data2,
map.key2.data1, map.key2.data2,
data.data1, data.data2,str
from
org.test.MyClass where
map.key1.data1='aa' and
map.key1.data2='bb' and
map.key2.data1='cc' and
map.key2.data2='dd' and
data.data1 = 'ee' and
data.data1 = 'ff' and
str='ABCD'";
stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(queryString );
while (rs.next()) {
...
}
```
# Unsupported Features
GigaSpaces JDBC Driver does not support the following:
- The SQL statements: `HAVING`, `VIEW`, `TRIGGERS`, `EXISTS`, `BETWEEN` in collections, `NOT`, `CREATE USER`, `GRANT`, `REVOKE`, `SET PASSWORD`, `CONNECT USER`, `ON`.
- `CREATE` Database.
- `CREATE` Index, `DROP` Index.
- Constraints: `NOT NULL`, `IDENTITY`, `UNIQUE`, `PRIMARY KEY`, Foreign Key `REFERENCES`, `NO ACTION`, `CASCADE`, `SET NULL`, `SET DEFAULT`, `CHECK`.
- Set operations: `Union, Minus, Union All`.
- Aggregate Functions: `STDEV`, `STDEVP`, `VAR`, `VARP`, `FIRST`, `LAST`.
- The `UPDATE` statement does not allow the use of an expression or a `null` value in the `SET` clause.
- Using a constant instead of the column name.
- The `INSERT` statement does not allow the use of an expression in the `VALUES` clause.
- "." used to indicate a double data type.
- Using mathematical expressions in the `WHERE` clause, however the [Aggregators](./aggregators.html) functionality can be used.
- Using a non constant right-hand side comparison operator. This can be implemented via [Custom Aggregation](./aggregators.html#custom-aggregation).
- `LEFT [OUTER] JOIN`
- `RIGHT [OUTER] JOIN`
- `[INNER] JOIN`
- Statement::setFetchSize()
{{% tip %}}
When having `SELECT count (*) FROM myClass` JDBC query -- `myClass` sub classes object count are not taken into consideration when processing the query result. The `SELECT count (*) FROM myClass WHERE X=Y` and `SELECT (*) from myClass` do take into consideration `myClass` sub classes objects when processing the result. Future versions will resolve this inconsistency.
As a workaround, construct a JDBC query that includes a relevant `WHERE` part.
{{% /tip %}}
# JDBC Reserved Words
Here is a list of JDBC reserved keywords, data types, separators and operators:
## Keywords
```java
ALTER ADD AND ASC BETWEEN BY CREATE CALL DROP DEFAULT_NULL DESC DISTINCT
END FROM GROUP IN IS LIKE RLIKE MAX MIN NOT NULL OR ORDER SELECT SUBSTR SUM SYSDATE
UPPER WHERE COUNT DELETE EXCEPTION ROWNUM INDEX INSERT INTO SET TABLE TO_CHAR
TO_NUMBER FOR_UPDATE UPDATE UNION VALUES COMMIT ROLLBACK PRIMARY_KEY UID USING
```
## Data Types
```java
date datetime time float double number decimal boolean integer varchar varchar2
char timestamp long clob blob empty_clob() empty_blob() lob true false
```
## Separators and operators
```java
:= || ; . ROWTYPE ~ < <= > >= => != <> \(+\) ( ) \* / + - ? \{ \}
```
# Configuration
The JDBC Driver should be configured using the following properties. These should be part of the [The Space Component](./the-space-configuration.html#proxy) configuration when deployed:
| Parameter | Description | Default Value |
|:----------|:------------|:--------------|
|space-config.QueryProcessor.space_read_lease_time|Read timeout. Millisec units.|0|
|space-config.QueryProcessor.space_write_lease|Object lease timeout. Millisec units. |9223372036854775807L|
|space-config.QueryProcessor.transaction_timeout|Millisec unit. Transaction Timeout.|30000|
|space-config.QueryProcessor.init_jmx|Expose Tracing via JMX|false|
|space-config.QueryProcessor.trace_exec_time|Enable Tracing |false|
|space-config.QueryProcessor.debug_mode|Boolean value. When true show debug information.|false|
|<nobr>space-config.QueryProcessor.parser_case_sensetivity</nobr>|Boolean value. Determines if Column and Table names are case sensitive.|true|
|space-config.QueryProcessor.auto_commit|Boolean value. Auto Commit mode|true|
|space-config.QueryProcessor.date_format| Date Format|yyyy-MM-dd|
|space-config.QueryProcessor.datetime_format| DateTime Format|yyyy-MM-dd hh:mm:ss|
|space-config.QueryProcessor.time_format| Time Format|hh:mm:ss|
Example:
```xml
<os-core:embedded-space id="space" name="space" >
<os-core:properties>
<props>
<prop key="space-config.QueryProcessor.transaction_timeout">50000</prop>
<prop key="space-config.QueryProcessor.date_format">yyyy-MM-dd</prop>
</props>
</os-core:properties>
</os-core:embedded-space>
```
# JDBC Error Codes
List of JDBC error codes and their descriptions:
```bash
`100`: No (more) data
`0`: Successful Completion
- `101`: Can't alter table
- `102`: Table `<tableName>` does not exist
- `103`: Commit/Rollback failed
- `104`: Can't delete row
- `105`: Table does not exist
- `106`: Remote Exception occurred
- `107`: Failed to drop table
- `108`: All values must be set in a Prepared Statement
- `109`: Prepared value already set!
- `110`: Prepared value missing!
- `111`: Invalid data
- `112`: Invalid type for the specified column
- `113`: Unknown columns
- `114`: Unknown table in condition
- `115`: Can't set same value more than once
- `116`: Unknown column for IN condition
- `117`: Unknown execution type
- `118`: Table already exists
- `119`: Wrong data type in SUM function
- `120`: Error in rownum
- `121`: Select failed
- `122`: The selected column does not exist in the selected tables
- `123`: No such column for given alias
- `124`: Order by column should be in select list
- `125`: Must specify the column to return the sum of.
- `126`: All values must be set
- `127`: Wrong type for given column
- `128`: Incorrect number of values to insert
- `129`: Type mismatch in nested query
- `130`: Can't update row!
- `131`: Blob cannot hold `null` data
- `132`: Command not supported
- `133`: Both parameters should be greater than 1
- `134`: Clob cannot hold `null` data
- `135`: Can't convert clob to ascii, unsupported encoding
- `136`: Substring out of clob's bounds
- `137`: Error creating connection - Unknown host
- `138`: Error creating connection or reading QP properties
- `139`: Cannot commit an autocommit connection
- `140`: Cannot rollback an autocommit connection
- `141`: The given URL is not supported
- `142`: Prepared statement must contain prepared values
- `143`: Cannot call `SELECT` with `executeUpdate`. Use `executeQuery` instead
- `144`: Cannot set a `null` object
- `145`: Cannot set object, unknown type
- `146`: Used `executeQuery` instead of `executeUpdate`
- `147`: Cannot set a value
- `148`: Cannot represent this value as `byte`
- `149`: Cannot represent this value as `double`
- `150`: Cannot represent this value as `float`
- `151`: Cannot represent this value as `int`
- `152`: Cannot represent this value as `long`
- `153`: Cannot represent this value as `short`
- `154`: Cannot represent this value as boolean
- `155`: Column found in result
- `156`: Cannot represent this value as `Blob`
- `157`: Cannot represent this value as `Clob`
- `158`: Cannot represent this value as `Date`
- `159`: Cannot represent this value as `Time`
- `160`: Cannot represent this value as `Timestamp`
- `161`: The `next()` method must be called at least once
- `162`: Exhausted `ResultSet`
- `201`: Invalid SQL syntax
``` | {
"content_hash": "16a1bef4251362eee8c3831dbf4b0fd7",
"timestamp": "",
"source": "github",
"line_count": 663,
"max_line_length": 511,
"avg_line_length": 36.788838612368025,
"alnum_prop": 0.7343282358246894,
"repo_name": "croffler/documentation",
"id": "cc4275082a51142d775f0d40753c0b5479343b7e",
"size": "24391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sites/xap-docs/content/xap102/jdbc-driver.markdown",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!doctype html>
<!--[if lte IE 8]><SCRIPT src='source/excanvas.js'></script><![endif]--><SCRIPT src='../ChartNew.js'></script>
<SCRIPT>
defCanvasWidth=1200;
defCanvasHalfWidth=400;
defCanvasHeight=600;
var mydata2 = {
labels : [""],
datasets : [
{
data : [30],
fillColor : "#D97041",
title : "data1"
},
{
data : [90],
fillColor : "#C7604C",
title : "data2"
},
{
data : [24],
fillColor : "#21323D",
title : "data3"
},
{
data : [58],
fillColor : "#9D9B7F",
title : "data4"
},
{
data : [82],
fillColor : "#7D4F6D",
title : "data5"
},
{
data : [8],
fillColor : "#584A5E",
title : "data6"
}
]
};
var opt1 = {
inGraphDataShow : true,
canvasBorders : true,
canvasBordersWidth : 3,
canvasBordersColor : "black",
graphTitle : "Graph Title",
startAngle : 180,
totalAmplitude : 180
}
var opt2 = {
inGraphDataShow : true,
canvasBorders : true,
canvasBordersWidth : 3,
canvasBordersColor : "black",
graphTitle : "Graph Title",
startAngle : 0,
totalAmplitude : 180
}
var opt3 = {
inGraphDataShow : true,
canvasBorders : true,
canvasBordersWidth : 3,
canvasBordersColor : "black",
graphTitle : "Graph Title",
startAngle : 90,
totalAmplitude : 180
}
var opt4 = {
inGraphDataShow : true,
canvasBorders : true,
canvasBordersWidth : 3,
canvasBordersColor : "black",
graphTitle : "Graph Title",
startAngle : -90,
totalAmplitude : 180
}
</SCRIPT>
<html>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
<head>
<title>Demo ChartNew.js</title>
</head>
<body>
<!div id="divCursor" style="position:absolute"> <!/div>
<center>
<FONT SIZE=6><B>Demo of ChartNew.js !</B></FONT> <BR>
<script>
document.write("<canvas id=\"canvas_polararea\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>");
document.write("<canvas id=\"canvas_pie\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>");
document.write("<canvas id=\"canvas_doughnut\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>");
document.write("<canvas id=\"canvas_pie2\" height=\""+defCanvasHeight+"\" width=\""+defCanvasHalfWidth+"\"></canvas>");
document.write("<canvas id=\"canvas_doughnut2\" height=\""+defCanvasHeight+"\" width=\""+defCanvasHalfWidth+"\"></canvas>");
window.onload = function() {
if(1==1) {
var myLine = new Chart(document.getElementById("canvas_polararea").getContext("2d")).PolarArea(mydata2,opt1);
var myLine = new Chart(document.getElementById("canvas_pie").getContext("2d")).Pie(mydata2,opt1);
var myLine = new Chart(document.getElementById("canvas_doughnut").getContext("2d")).Doughnut(mydata2,opt2);
var myLine = new Chart(document.getElementById("canvas_pie2").getContext("2d")).Pie(mydata2,opt3);
var myLine = new Chart(document.getElementById("canvas_doughnut2").getContext("2d")).Doughnut(mydata2,opt4);
}
}
</script>
</body>
</html>
| {
"content_hash": "769f44700572ced33ccb93ec1baeaed4",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 130,
"avg_line_length": 24.347826086956523,
"alnum_prop": 0.5699404761904762,
"repo_name": "FVANCOP/ChartNew.js",
"id": "cac6f8c98ce143974e6021b8ca6b0079bb59bafb",
"size": "3360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Samples/half_pie.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3259"
},
{
"name": "HTML",
"bytes": "16115"
},
{
"name": "JavaScript",
"bytes": "1501237"
}
],
"symlink_target": ""
} |
namespace GuruComponents.Netrix.Events
{
/// <summary>
/// Assigns the handler for the BeforeSnap event.
/// </summary>
public delegate void BeforeSnapRectEventHandler(object sender, BeforeSnapRectEventArgs e);
} | {
"content_hash": "481d4f0fdd4bc544b3d786ef266511a7",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 94,
"avg_line_length": 32.57142857142857,
"alnum_prop": 0.7324561403508771,
"repo_name": "joergkrause/netrix",
"id": "d6f06a908739eda5f1d29f750ee85acb1f8c903e",
"size": "228",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Netrix2.0/Core/Events/BeforeSnapRectEventHandler.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "74972"
},
{
"name": "C",
"bytes": "57184"
},
{
"name": "C#",
"bytes": "18966484"
},
{
"name": "C++",
"bytes": "181221"
},
{
"name": "CSS",
"bytes": "7533"
},
{
"name": "Clean",
"bytes": "7372"
},
{
"name": "HTML",
"bytes": "733559"
},
{
"name": "Objective-C",
"bytes": "1404"
},
{
"name": "Pascal",
"bytes": "177323"
},
{
"name": "Smalltalk",
"bytes": "57995"
},
{
"name": "Visual Basic",
"bytes": "502969"
},
{
"name": "XSLT",
"bytes": "685482"
}
],
"symlink_target": ""
} |
# Associations
This section describes the various association types in sequelize. When calling a method such as `User.hasOne(Project)`, we say that the `User` model (the model that the function is being invoked on) is the __source__ and the `Project` model (the model being passed as an argument) is the __target__.
## One-To-One associations
One-To-One associations are associations between exactly two models connected by a single foreign key.
### BelongsTo
BelongsTo associations are associations where the foreign key for the one-to-one relation exists on the **source model**.
A simple example would be a **Player** being part of a **Team** with the foreign key on the player.
```js
const Player = this.sequelize.define('player', {/* attributes */});
const Team = this.sequelize.define('team', {/* attributes */});
Player.belongsTo(Team); // Will add a teamId attribute to Player to hold the primary key value for Team
```
#### Foreign keys
By default the foreign key for a belongsTo relation will be generated from the target model name and the target primary key name.
The default casing is `camelCase` however if the source model is configured with `underscored: true` the foreignKey will be `snake_case`.
```js
const User = this.sequelize.define('user', {/* attributes */})
const Company = this.sequelize.define('company', {/* attributes */});
User.belongsTo(Company); // Will add companyId to user
const User = this.sequelize.define('user', {/* attributes */}, {underscored: true})
const Company = this.sequelize.define('company', {
uuid: {
type: Sequelize.UUID,
primaryKey: true
}
});
User.belongsTo(Company); // Will add company_uuid to user
```
In cases where `as` has been defined it will be used in place of the target model name.
```js
const User = this.sequelize.define('user', {/* attributes */})
const UserRole = this.sequelize.define('userRole', {/* attributes */});
User.belongsTo(UserRole, {as: 'role'}); // Adds roleId to user rather than userRoleId
```
In all cases the default foreign key can be overwritten with the `foreignKey` option.
When the foreign key option is used, Sequelize will use it as-is:
```js
const User = this.sequelize.define('user', {/* attributes */})
const Company = this.sequelize.define('company', {/* attributes */});
User.belongsTo(Company, {foreignKey: 'fk_company'}); // Adds fk_company to User
```
#### Target keys
The target key is the column on the target model that the foreign key column on the source model points to. By default the target key for a belongsTo relation will be the target model's primary key. To define a custom column, use the `targetKey` option.
```js
const User = this.sequelize.define('user', {/* attributes */})
const Company = this.sequelize.define('company', {/* attributes */});
User.belongsTo(Company, {foreignKey: 'fk_companyname', targetKey: 'name'}); // Adds fk_companyname to User
```
### HasOne
HasOne associations are associations where the foreign key for the one-to-one relation exists on the **target model**.
```js
const User = sequelize.define('user', {/* ... */})
const Project = sequelize.define('project', {/* ... */})
// One-way associations
Project.hasOne(User)
/*
In this example hasOne will add an attribute projectId to the User model!
Furthermore, Project.prototype will gain the methods getUser and setUser according
to the first parameter passed to define. If you have underscore style
enabled, the added attribute will be project_id instead of projectId.
The foreign key will be placed on the users table.
You can also define the foreign key, e.g. if you already have an existing
database and want to work on it:
*/
Project.hasOne(User, { foreignKey: 'initiator_id' })
/*
Because Sequelize will use the model's name (first parameter of define) for
the accessor methods, it is also possible to pass a special option to hasOne:
*/
Project.hasOne(User, { as: 'Initiator' })
// Now you will get Project#getInitiator and Project#setInitiator
// Or let's define some self references
const Person = sequelize.define('person', { /* ... */})
Person.hasOne(Person, {as: 'Father'})
// this will add the attribute FatherId to Person
// also possible:
Person.hasOne(Person, {as: 'Father', foreignKey: 'DadId'})
// this will add the attribute DadId to Person
// In both cases you will be able to do:
Person#setFather
Person#getFather
// If you need to join a table twice you can double join the same table
Team.hasOne(Game, {as: 'HomeTeam', foreignKey : 'homeTeamId'});
Team.hasOne(Game, {as: 'AwayTeam', foreignKey : 'awayTeamId'});
Game.belongsTo(Team);
```
Even though it is called a HasOne association, for most 1:1 relations you usually want the BelongsTo association since BelongsTo will add the foreignKey on the source where hasOne will add on the target.
### Difference between HasOne and BelongsTo
In Sequelize 1:1 relationship can be set using HasOne and BelongsTo. They are suitable for different scenarios. Lets study this difference using an example.
Suppose we have two tables to link **Player** and **Team**. Lets define their models.
```js
const Player = this.sequelize.define('player', {/* attributes */})
const Team = this.sequelize.define('team', {/* attributes */});
```
When we link two models in Sequelize we can refer them as pairs of **source** and **target** models. Like this
Having **Player** as the **source** and **Team** as the **target**
```js
Player.belongsTo(Team);
//Or
Player.hasOne(Team);
```
Having **Team** as the **source** and **Player** as the **target**
```js
Team.belongsTo(Player);
//Or
Team.hasOne(Player);
```
HasOne and BelongsTo insert the association key in different models from each other. HasOne inserts the association key in **target** model whereas BelongsTo inserts the association key in the **source** model.
Here is an example demonstrating use cases of BelongsTo and HasOne.
```js
const Player = this.sequelize.define('player', {/* attributes */})
const Coach = this.sequelize.define('coach', {/* attributes */})
const Team = this.sequelize.define('team', {/* attributes */});
```
Suppose our `Player` model has information about its team as `teamId` column. Information about each Team's `Coach` is stored in the `Team` model as `coachId` column. These both scenarios requires different kind of 1:1 relation because foreign key relation is present on different models each time.
When information about association is present in **source** model we can use `belongsTo`. In this case `Player` is suitable for `belongsTo` because it has `teamId` column.
```js
Player.belongsTo(Team) // `teamId` will be added on Player / Source model
```
When information about association is present in **target** model we can use `hasOne`. In this case `Coach` is suitable for `hasOne` because `Team` model store information about its `Coach` as `coachId` field.
```js
Coach.hasOne(Team) // `coachId` will be added on Team / Target model
```
## One-To-Many associations
One-To-Many associations are connecting one source with multiple targets. The targets however are again connected to exactly one specific source.
```js
const User = sequelize.define('user', {/* ... */})
const Project = sequelize.define('project', {/* ... */})
// OK. Now things get more complicated (not really visible to the user :)).
// First let's define a hasMany association
Project.hasMany(User, {as: 'Workers'})
```
This will add the attribute `projectId` or `project_id` to User. Instances of Project will get the accessors `getWorkers` and `setWorkers`. We could just leave it the way it is and let it be a one-way association.
But we want more! Let's define it the other way around by creating a many to many association in the next section:
Sometimes you may need to associate records on different columns, you may use `sourceKey` option:
```js
const City = sequelize.define('city', { countryCode: Sequelize.STRING });
const Country = sequelize.define('country', { isoCode: Sequelize.STRING });
// Here we can connect countries and cities base on country code
Country.hasMany(City, {foreignKey: 'countryCode', sourceKey: 'isoCode'});
City.belongsTo(Country, {foreignKey: 'countryCode', targetKey: 'isoCode'});
```
## Belongs-To-Many associations
Belongs-To-Many associations are used to connect sources with multiple targets. Furthermore the targets can also have connections to multiple sources.
```js
Project.belongsToMany(User, {through: 'UserProject'});
User.belongsToMany(Project, {through: 'UserProject'});
```
This will create a new model called UserProject with the equivalent foreign keys `projectId` and `userId`. Whether the attributes are camelcase or not depends on the two models joined by the table (in this case User and Project).
Defining `through` is **required**. Sequelize would previously attempt to autogenerate names but that would not always lead to the most logical setups.
This will add methods `getUsers`, `setUsers`, `addUser`,`addUsers` to `Project`, and `getProjects`, `setProjects`, `addProject`, and `addProjects` to `User`.
Sometimes you may want to rename your models when using them in associations. Let's define users as workers and projects as tasks by using the alias (`as`) option. We will also manually define the foreign keys to use:
```js
User.belongsToMany(Project, { as: 'Tasks', through: 'worker_tasks', foreignKey: 'userId' })
Project.belongsToMany(User, { as: 'Workers', through: 'worker_tasks', foreignKey: 'projectId' })
```
`foreignKey` will allow you to set **source model** key in the **through** relation.
`otherKey` will allow you to set **target model** key in the **through** relation.
```js
User.belongsToMany(Project, { as: 'Tasks', through: 'worker_tasks', foreignKey: 'userId', otherKey: 'projectId'})
```
Of course you can also define self references with belongsToMany:
```js
Person.belongsToMany(Person, { as: 'Children', through: 'PersonChildren' })
// This will create the table PersonChildren which stores the ids of the objects.
```
If you want additional attributes in your join table, you can define a model for the join table in sequelize, before you define the association, and then tell sequelize that it should use that model for joining, instead of creating a new one:
```js
const User = sequelize.define('user', {})
const Project = sequelize.define('project', {})
const UserProjects = sequelize.define('userProjects', {
status: DataTypes.STRING
})
User.belongsToMany(Project, { through: UserProjects })
Project.belongsToMany(User, { through: UserProjects })
```
To add a new project to a user and set its status, you pass extra `options.through` to the setter, which contains the attributes for the join table
```js
user.addProject(project, { through: { status: 'started' }})
```
By default the code above will add projectId and userId to the UserProjects table, and _remove any previously defined primary key attribute_ - the table will be uniquely identified by the combination of the keys of the two tables, and there is no reason to have other PK columns. To enforce a primary key on the `UserProjects` model you can add it manually.
```js
const UserProjects = sequelize.define('userProjects', {
id: {
type: Sequelize.INTEGER,
primaryKey: true,
autoIncrement: true
},
status: DataTypes.STRING
})
```
With Belongs-To-Many you can query based on **through** relation and select specific attributes. For example using `findAll` with **through**
```js
User.findAll({
include: [{
model: Project,
through: {
attributes: ['createdAt', 'startedAt', 'finishedAt'],
where: {completed: true}
}
}]
});
```
## Scopes
This section concerns association scopes. For a definition of association scopes vs. scopes on associated models, see [Scopes](/manual/tutorial/scopes.html).
Association scopes allow you to place a scope (a set of default attributes for `get` and `create`) on the association. Scopes can be placed both on the associated model (the target of the association), and on the through table for n:m relations.
#### 1:m
Assume we have tables Comment, Post, and Image. A comment can be associated to either an image or a post via `commentable_id` and `commentable` - we say that Post and Image are `Commentable`
```js
const Comment = this.sequelize.define('comment', {
title: Sequelize.STRING,
commentable: Sequelize.STRING,
commentable_id: Sequelize.INTEGER
});
Comment.prototype.getItem = function() {
return this['get' + this.get('commentable').substr(0, 1).toUpperCase() + this.get('commentable').substr(1)]();
};
Post.hasMany(this.Comment, {
foreignKey: 'commentable_id',
constraints: false,
scope: {
commentable: 'post'
}
});
Comment.belongsTo(this.Post, {
foreignKey: 'commentable_id',
constraints: false,
as: 'post'
});
Image.hasMany(this.Comment, {
foreignKey: 'commentable_id',
constraints: false,
scope: {
commentable: 'image'
}
});
Comment.belongsTo(this.Image, {
foreignKey: 'commentable_id',
constraints: false,
as: 'image'
});
```
`constraints: false,` disables references constraints - since the `commentable_id` column references several tables, we cannot add a `REFERENCES` constraint to it. Note that the Image -> Comment and Post -> Comment relations define a scope, `commentable: 'image'` and `commentable: 'post'` respectively. This scope is automatically applied when using the association functions:
```js
image.getComments()
SELECT * FROM comments WHERE commentable_id = 42 AND commentable = 'image';
image.createComment({
title: 'Awesome!'
})
INSERT INTO comments (title, commentable_id, commentable) VALUES ('Awesome!', 42, 'image');
image.addComment(comment);
UPDATE comments SET commentable_id = 42, commentable = 'image'
```
The `getItem` utility function on `Comment` completes the picture - it simply converts the `commentable` string into a call to either `getImage` or `getPost`, providing an abstraction over whether a comment belongs to a post or an image.
#### n:m
Continuing with the idea of a polymorphic model, consider a tag table - an item can have multiple tags, and a tag can be related to several items.
For brevity, the example only shows a Post model, but in reality Tag would be related to several other models.
```js
const ItemTag = sequelize.define('item_tag', {
id : {
type: DataTypes.INTEGER,
primaryKey: true,
autoIncrement: true
},
tag_id: {
type: DataTypes.INTEGER,
unique: 'item_tag_taggable'
},
taggable: {
type: DataTypes.STRING,
unique: 'item_tag_taggable'
},
taggable_id: {
type: DataTypes.INTEGER,
unique: 'item_tag_taggable',
references: null
}
});
const Tag = sequelize.define('tag', {
name: DataTypes.STRING
});
Post.belongsToMany(Tag, {
through: {
model: ItemTag,
unique: false,
scope: {
taggable: 'post'
}
},
foreignKey: 'taggable_id',
constraints: false
});
Tag.belongsToMany(Post, {
through: {
model: ItemTag,
unique: false
},
foreignKey: 'tag_id',
constraints: false
});
```
Notice that the scoped column (`taggable`) is now on the through model (`ItemTag`).
We could also define a more restrictive association, for example, to get all pending tags for a post by applying a scope of both the through model (`ItemTag`) and the target model (`Tag`):
```js
Post.hasMany(Tag, {
through: {
model: ItemTag,
unique: false,
scope: {
taggable: 'post'
}
},
scope: {
status: 'pending'
},
as: 'pendingTags',
foreignKey: 'taggable_id',
constraints: false
});
Post.getPendingTags();
```
```sql
SELECT `tag`.* INNER JOIN `item_tags` AS `item_tag`
ON `tag`.`id` = `item_tag`.`tagId`
AND `item_tag`.`taggable_id` = 42
AND `item_tag`.`taggable` = 'post'
WHERE (`tag`.`status` = 'pending');
```
`constraints: false` disables references constraints on the `taggable_id` column. Because the column is polymorphic, we cannot say that it `REFERENCES` a specific table.
## Naming strategy
By default sequelize will use the model name (the name passed to `sequelize.define`) to figure out the name of the model when used in associations. For example, a model named `user` will add the functions `get/set/add User` to instances of the associated model, and a property named `.user` in eager loading, while a model named `User` will add the same functions, but a property named `.User` (notice the upper case U) in eager loading.
As we've already seen, you can alias models in associations using `as`. In single associations (has one and belongs to), the alias should be singular, while for many associations (has many) it should be plural. Sequelize then uses the [inflection ][0]library to convert the alias to its singular form. However, this might not always work for irregular or non-english words. In this case, you can provide both the plural and the singular form of the alias:
```js
User.belongsToMany(Project, { as: { singular: 'task', plural: 'tasks' }})
// Notice that inflection has no problem singularizing tasks, this is just for illustrative purposes.
```
If you know that a model will always use the same alias in associations, you can provide it when creating the model
```js
const Project = sequelize.define('project', attributes, {
name: {
singular: 'task',
plural: 'tasks',
}
})
User.belongsToMany(Project);
```
This will add the functions `add/set/get Tasks` to user instances.
Remember, that using `as` to change the name of the association will also change the name of the foreign key. When using `as`, it is safest to also specify the foreign key.
```js
Invoice.belongsTo(Subscription)
Subscription.hasMany(Invoice)
```
Without `as`, this adds `subscriptionId` as expected. However, if you were to say `Invoice.belongsTo(Subscription, { as: 'TheSubscription' })`, you will have both `subscriptionId` and `theSubscriptionId`, because sequelize is not smart enough to figure that the calls are two sides of the same relation. 'foreignKey' fixes this problem;
```js
Invoice.belongsTo(Subscription, , { as: 'TheSubscription', foreignKey: 'subscription_id' })
Subscription.hasMany(Invoice, { foreignKey: 'subscription_id' )
```
## Associating objects
Because Sequelize is doing a lot of magic, you have to call `Sequelize.sync` after setting the associations! Doing so will allow you the following:
```js
Project.belongsToMany(Task)
Task.belongsToMany(Project)
Project.create()...
Task.create()...
Task.create()...
// save them... and then:
project.setTasks([task1, task2]).then(() => {
// saved!
})
// ok, now they are saved... how do I get them later on?
project.getTasks().then(associatedTasks => {
// associatedTasks is an array of tasks
})
// You can also pass filters to the getter method.
// They are equal to the options you can pass to a usual finder method.
project.getTasks({ where: 'id > 10' }).then(tasks => {
// tasks with an id greater than 10 :)
})
// You can also only retrieve certain fields of a associated object.
project.getTasks({attributes: ['title']}).then(tasks => {
// retrieve tasks with the attributes "title" and "id"
})
```
To remove created associations you can just call the set method without a specific id:
```js
// remove the association with task1
project.setTasks([task2]).then(associatedTasks => {
// you will get task2 only
})
// remove 'em all
project.setTasks([]).then(associatedTasks => {
// you will get an empty array
})
// or remove 'em more directly
project.removeTask(task1).then(() => {
// it's gone
})
// and add 'em again
project.addTask(task1).then(function() {
// it's back again
})
```
You can of course also do it vice versa:
```js
// project is associated with task1 and task2
task2.setProject(null).then(function() {
// and it's gone
})
```
For hasOne/belongsTo its basically the same:
```js
Task.hasOne(User, {as: "Author"})
Task#setAuthor(anAuthor)
```
Adding associations to a relation with a custom join table can be done in two ways (continuing with the associations defined in the previous chapter):
```js
// Either by adding a property with the name of the join table model to the object, before creating the association
project.UserProjects = {
status: 'active'
}
u.addProject(project)
// Or by providing a second options.through argument when adding the association, containing the data that should go in the join table
u.addProject(project, { through: { status: 'active' }})
// When associating multiple objects, you can combine the two options above. In this case the second argument
// will be treated as a defaults object, that will be used if no data is provided
project1.UserProjects = {
status: 'inactive'
}
u.setProjects([project1, project2], { through: { status: 'active' }})
// The code above will record inactive for project one, and active for project two in the join table
```
When getting data on an association that has a custom join table, the data from the join table will be returned as a DAO instance:
```js
u.getProjects().then(projects => {
const project = projects[0]
if (project.UserProjects.status === 'active') {
// .. do magic
// since this is a real DAO instance, you can save it directly after you are done doing magic
return project.UserProjects.save()
}
})
```
If you only need some of the attributes from the join table, you can provide an array with the attributes you want:
```js
// This will select only name from the Projects table, and only status from the UserProjects table
user.getProjects({ attributes: ['name'], joinTableAttributes: ['status']})
```
## Check associations
You can also check if an object is already associated with another one (N:M only). Here is how you'd do it:
```js
// check if an object is one of associated ones:
Project.create({ /* */ }).then(project => {
return User.create({ /* */ }).then(user => {
return project.hasUser(user).then(result => {
// result would be false
return project.addUser(user).then(() => {
return project.hasUser(user).then(result => {
// result would be true
})
})
})
})
})
// check if all associated objects are as expected:
// let's assume we have already a project and two users
project.setUsers([user1, user2]).then(() => {
return project.hasUsers([user1]);
}).then(result => {
// result would be false
return project.hasUsers([user1, user2]);
}).then(result => {
// result would be true
})
```
## Foreign Keys
When you create associations between your models in sequelize, foreign key references with constraints will automatically be created. The setup below:
```js
const Task = this.sequelize.define('task', { title: Sequelize.STRING })
const User = this.sequelize.define('user', { username: Sequelize.STRING })
User.hasMany(Task)
Task.belongsTo(User)
```
Will generate the following SQL:
```sql
CREATE TABLE IF NOT EXISTS `User` (
`id` INTEGER PRIMARY KEY,
`username` VARCHAR(255)
);
CREATE TABLE IF NOT EXISTS `Task` (
`id` INTEGER PRIMARY KEY,
`title` VARCHAR(255),
`user_id` INTEGER REFERENCES `User` (`id`) ON DELETE SET NULL ON UPDATE CASCADE
);
```
The relation between task and user injects the `user_id` foreign key on tasks, and marks it as a reference to the `User` table. By default `user_id` will be set to `NULL` if the referenced user is deleted, and updated if the id of the user id updated. These options can be overridden by passing `onUpdate` and `onDelete` options to the association calls. The validation options are `RESTRICT, CASCADE, NO ACTION, SET DEFAULT, SET NULL`.
For 1:1 and 1:m associations the default option is `SET NULL` for deletion, and `CASCADE` for updates. For n:m, the default for both is `CASCADE`. This means, that if you delete or update a row from one side of an n:m association, all the rows in the join table referencing that row will also be deleted or updated.
Adding constraints between tables means that tables must be created in the database in a certain order, when using `sequelize.sync`. If Task has a reference to User, the User table must be created before the Task table can be created. This can sometimes lead to circular references, where sequelize cannot find an order in which to sync. Imagine a scenario of documents and versions. A document can have multiple versions, and for convenience, a document has an reference to it's current version.
```js
const Document = this.sequelize.define('document', {
author: Sequelize.STRING
})
const Version = this.sequelize.define('version', {
timestamp: Sequelize.DATE
})
Document.hasMany(Version) // This adds document_id to version
Document.belongsTo(Version, { as: 'Current', foreignKey: 'current_version_id'}) // This adds current_version_id to document
```
However, the code above will result in the following error: `Cyclic dependency found. 'Document' is dependent of itself. Dependency Chain: Document -> Version => Document`. In order to alleviate that, we can pass `constraints: false` to one of the associations:
```js
Document.hasMany(Version)
Document.belongsTo(Version, { as: 'Current', foreignKey: 'current_version_id', constraints: false})
```
Which will allow us to sync the tables correctly:
```sql
CREATE TABLE IF NOT EXISTS `Document` (
`id` INTEGER PRIMARY KEY,
`author` VARCHAR(255),
`current_version_id` INTEGER
);
CREATE TABLE IF NOT EXISTS `Version` (
`id` INTEGER PRIMARY KEY,
`timestamp` DATETIME,
`document_id` INTEGER REFERENCES `Document` (`id`) ON DELETE SET NULL ON UPDATE CASCADE
);
```
### Enforcing a foreign key reference without constraints
Sometimes you may want to reference another table, without adding any constraints, or associations. In that case you can manually add the reference attributes to your schema definition, and mark the relations between them.
```js
// Series has a trainer_id=Trainer.id foreign reference key after we call Trainer.hasMany(series)
const Series = sequelize.define('series', {
title: DataTypes.STRING,
sub_title: DataTypes.STRING,
description: DataTypes.TEXT,
// Set FK relationship (hasMany) with `Trainer`
trainer_id: {
type: DataTypes.INTEGER,
references: {
model: "trainers",
key: "id"
}
}
})
const Trainer = sequelize.define('trainer', {
first_name: DataTypes.STRING,
last_name: DataTypes.STRING
});
// Video has a series_id=Series.id foreign reference key after we call Series.hasOne(Video)...
const Video = sequelize.define('video', {
title: DataTypes.STRING,
sequence: DataTypes.INTEGER,
description: DataTypes.TEXT,
// set relationship (hasOne) with `Series`
series_id: {
type: DataTypes.INTEGER,
references: {
model: Series, // Can be both a string representing the table name, or a reference to the model
key: "id"
}
}
});
Series.hasOne(Video);
Trainer.hasMany(Series);
```
## Creating with associations
An instance can be created with nested association in one step, provided all elements are new.
### Creating elements of a "BelongsTo", "Has Many" or "HasOne" association
Consider the following models:
```js
const Product = this.sequelize.define('product', {
title: Sequelize.STRING
});
const User = this.sequelize.define('user', {
first_name: Sequelize.STRING,
last_name: Sequelize.STRING
});
const Address = this.sequelize.define('address', {
type: Sequelize.STRING,
line_1: Sequelize.STRING,
line_2: Sequelize.STRING,
city: Sequelize.STRING,
state: Sequelize.STRING,
zip: Sequelize.STRING,
});
const Product.User = Product.belongsTo(User);
const User.Addresses = User.hasMany(Address);
// Also works for `hasOne`
```
A new `Product`, `User`, and one or more `Address` can be created in one step in the following way:
```js
return Product.create({
title: 'Chair',
user: {
first_name: 'Mick',
last_name: 'Broadstone',
addresses: [{
type: 'home',
line_1: '100 Main St.',
city: 'Austin',
state: 'TX',
zip: '78704'
}]
}
}, {
include: [{
association: Product.User,
include: [ User.Addresses ]
}]
});
```
Here, our user model is called `user`, with a lowercase u - This means that the property in the object should also be `user`. If the name given to `sequelize.define` was `User`, the key in the object should also be `User`. Likewise for `addresses`, except it's pluralized being a `hasMany` association.
### Creating elements of a "BelongsTo" association with an alias
The previous example can be extended to support an association alias.
```js
const Creator = Product.belongsTo(User, {as: 'creator'});
return Product.create({
title: 'Chair',
creator: {
first_name: 'Matt',
last_name: 'Hansen'
}
}, {
include: [ Creator ]
});
```
### Creating elements of a "HasMany" or "BelongsToMany" association
Let's introduce the ability to associate a product with many tags. Setting up the models could look like:
```js
const Tag = this.sequelize.define('tag', {
name: Sequelize.STRING
});
Product.hasMany(Tag);
// Also works for `belongsToMany`.
```
Now we can create a product with multiple tags in the following way:
```js
Product.create({
id: 1,
title: 'Chair',
tags: [
{ name: 'Alpha'},
{ name: 'Beta'}
]
}, {
include: [ Tag ]
})
```
And, we can modify this example to support an alias as well:
```js
const Categories = Product.hasMany(Tag, {as: 'categories'});
Product.create({
id: 1,
title: 'Chair',
categories: [
{id: 1, name: 'Alpha'},
{id: 2, name: 'Beta'}
]
}, {
include: [{
model: Categories,
as: 'categories'
}]
})
```
***
[0]: https://www.npmjs.org/package/inflection
| {
"content_hash": "883ccee55bb3a42330ec8509169c080c",
"timestamp": "",
"source": "github",
"line_count": 862,
"max_line_length": 496,
"avg_line_length": 34.43619489559165,
"alnum_prop": 0.7155033014418541,
"repo_name": "Verdier/sequelize",
"id": "b5855501f13788ca4ef3d3cb28ec0a53d751e67a",
"size": "29718",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/associations.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2602252"
},
{
"name": "PowerShell",
"bytes": "1468"
}
],
"symlink_target": ""
} |
just a playground
| {
"content_hash": "f2cb95a3ea92cd01f16e2095407ff7fa",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 17,
"avg_line_length": 18,
"alnum_prop": 0.8333333333333334,
"repo_name": "bbasic/workload-planner",
"id": "21aa6f595862798713c93880a2c4d729ae6e7146",
"size": "37",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2605"
},
{
"name": "CoffeeScript",
"bytes": "25219"
},
{
"name": "HTML",
"bytes": "22634"
},
{
"name": "JavaScript",
"bytes": "18081"
}
],
"symlink_target": ""
} |
namespace NLog.Internal
{
using System;
using System.Collections.Generic;
using NLog.Config;
using NLog.Filters;
using NLog.Targets;
/// <summary>
/// Represents target with a chain of filters which determine
/// whether logging should happen.
/// </summary>
[NLogConfigurationItem]
internal class TargetWithFilterChain
{
internal static readonly TargetWithFilterChain[] NoTargetsByLevel = CreateLoggingConfiguration();
internal static TargetWithFilterChain[] CreateLoggingConfiguration() => new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 2]; // +2 to include LogLevel.Off
private MruCache<CallSiteKey, string> _callSiteClassNameCache;
/// <summary>
/// Initializes a new instance of the <see cref="TargetWithFilterChain" /> class.
/// </summary>
/// <param name="target">The target.</param>
/// <param name="filterChain">The filter chain.</param>
/// <param name="filterDefaultAction">Default action if none of the filters match.</param>
public TargetWithFilterChain(Target target, IList<Filter> filterChain, FilterResult filterDefaultAction)
{
Target = target;
FilterChain = filterChain;
FilterDefaultAction = filterDefaultAction;
}
/// <summary>
/// Gets the target.
/// </summary>
/// <value>The target.</value>
public Target Target { get; }
/// <summary>
/// Gets the filter chain.
/// </summary>
/// <value>The filter chain.</value>
public IList<Filter> FilterChain { get; }
/// <summary>
/// Gets or sets the next <see cref="TargetWithFilterChain"/> item in the chain.
/// </summary>
/// <value>The next item in the chain.</value>
/// <example>This is for example the 'target2' logger in writeTo='target1,target2' </example>
public TargetWithFilterChain NextInChain { get; set; }
/// <summary>
/// Gets the stack trace usage.
/// </summary>
/// <returns>A <see cref="StackTraceUsage" /> value that determines stack trace handling.</returns>
public StackTraceUsage StackTraceUsage { get; private set; }
/// <summary>
/// Default action if none of the filters match.
/// </summary>
public FilterResult FilterDefaultAction { get; }
internal StackTraceUsage PrecalculateStackTraceUsage()
{
var stackTraceUsage = StackTraceUsage.None;
// find all objects which may need stack trace
// and determine maximum
if (Target != null)
{
stackTraceUsage = Target.StackTraceUsage;
}
//recurse into chain if not max
if (NextInChain != null && (stackTraceUsage & StackTraceUsage.Max) != StackTraceUsage.Max)
{
var stackTraceUsageForChain = NextInChain.PrecalculateStackTraceUsage();
stackTraceUsage |= stackTraceUsageForChain;
}
StackTraceUsage = stackTraceUsage;
return stackTraceUsage;
}
internal bool TryCallSiteClassNameOptimization(StackTraceUsage stackTraceUsage, LogEventInfo logEvent)
{
if ((stackTraceUsage & (StackTraceUsage.WithCallSiteClassName | StackTraceUsage.WithStackTrace)) != StackTraceUsage.WithCallSiteClassName)
return false;
if (string.IsNullOrEmpty(logEvent.CallSiteInformation?.CallerFilePath))
return false;
if (logEvent.HasStackTrace)
return false;
return true;
}
internal bool MustCaptureStackTrace(StackTraceUsage stackTraceUsage, LogEventInfo logEvent)
{
if (logEvent.HasStackTrace)
return false;
if ((stackTraceUsage & StackTraceUsage.WithStackTrace) != StackTraceUsage.None)
return true;
if ((stackTraceUsage & StackTraceUsage.WithCallSite) != StackTraceUsage.None && string.IsNullOrEmpty(logEvent.CallSiteInformation?.CallerMethodName) && string.IsNullOrEmpty(logEvent.CallSiteInformation?.CallerFilePath))
return true; // We don't have enough CallSiteInformation
return false;
}
internal bool TryRememberCallSiteClassName(LogEventInfo logEvent)
{
if (string.IsNullOrEmpty(logEvent.CallSiteInformation?.CallerFilePath))
return false;
string className = logEvent.CallSiteInformation.GetCallerClassName(null, true, true, true);
if (string.IsNullOrEmpty(className))
return false;
if (_callSiteClassNameCache is null)
return false;
string internClassName = logEvent.LoggerName == className ?
logEvent.LoggerName :
#if !NETSTANDARD1_3 && !NETSTANDARD1_5
string.Intern(className); // Single string-reference for all logging-locations for the same class
#else
className;
#endif
CallSiteKey callSiteKey = new CallSiteKey(logEvent.CallerMemberName, logEvent.CallerFilePath, logEvent.CallerLineNumber);
return _callSiteClassNameCache.TryAddValue(callSiteKey, internClassName);
}
internal bool TryLookupCallSiteClassName(LogEventInfo logEvent, out string callSiteClassName)
{
callSiteClassName = logEvent.CallSiteInformation?.CallerClassName;
if (!string.IsNullOrEmpty(callSiteClassName))
return true;
if (_callSiteClassNameCache is null)
{
System.Threading.Interlocked.CompareExchange(ref _callSiteClassNameCache, new MruCache<CallSiteKey, string>(1000), null);
}
CallSiteKey callSiteKey = new CallSiteKey(logEvent.CallerMemberName, logEvent.CallerFilePath, logEvent.CallerLineNumber);
return _callSiteClassNameCache.TryGetValue(callSiteKey, out callSiteClassName);
}
struct CallSiteKey : IEquatable<CallSiteKey>
{
public CallSiteKey(string methodName, string fileSourceName, int fileSourceLineNumber)
{
MethodName = methodName ?? string.Empty;
FileSourceName = fileSourceName ?? string.Empty;
FileSourceLineNumber = fileSourceLineNumber;
}
public readonly string MethodName;
public readonly string FileSourceName;
public readonly int FileSourceLineNumber;
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
public override int GetHashCode()
{
return MethodName.GetHashCode() ^ FileSourceName.GetHashCode() ^ FileSourceLineNumber;
}
/// <summary>
/// Determines if two objects are equal in value.
/// </summary>
/// <param name="obj">Other object to compare to.</param>
/// <returns>True if objects are equal, false otherwise.</returns>
public override bool Equals(object obj)
{
return obj is CallSiteKey key && Equals(key);
}
/// <summary>
/// Determines if two objects of the same type are equal in value.
/// </summary>
/// <param name="other">Other object to compare to.</param>
/// <returns>True if objects are equal, false otherwise.</returns>
public bool Equals(CallSiteKey other)
{
return FileSourceLineNumber == other.FileSourceLineNumber
&& string.Equals(FileSourceName, other.FileSourceName, StringComparison.Ordinal)
&& string.Equals(MethodName, other.MethodName, StringComparison.Ordinal);
}
}
}
}
| {
"content_hash": "5dfc0671cc950383b52dfe9ddde88fbc",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 231,
"avg_line_length": 40.52791878172589,
"alnum_prop": 0.6169839679358717,
"repo_name": "304NotModified/NLog",
"id": "74a0d3f66c52ac5509f6a33dee86d1fcc7354757",
"size": "9650",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/NLog/Internal/TargetWithFilterChain.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "623"
},
{
"name": "C#",
"bytes": "6867869"
},
{
"name": "CSS",
"bytes": "1033"
},
{
"name": "PowerShell",
"bytes": "6862"
},
{
"name": "XSLT",
"bytes": "10993"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>continuations: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.7.1 / continuations - 8.9.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
continuations
<small>
8.9.0
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-10-04 09:52:02 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-04 09:52:02 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-ocamlbuild base OCamlbuild binary and libraries distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.7.1 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.02.3 The OCaml compiler (virtual package)
ocaml-base-compiler 4.02.3 Official 4.02.3 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "Hugo.Herbelin@inria.fr"
homepage: "https://github.com/coq-contribs/continuations"
license: "Unknown"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/Continuations"]
depends: [
"ocaml"
"coq" {>= "8.9" & < "8.10~"}
]
tags: [
"keyword: exceptions"
"keyword: monads"
"keyword: continuations"
"keyword: cps"
"category: Computer Science/Semantics and Compilation/Semantics"
"category: Miscellaneous/Extracted Programs/Combinatorics"
]
authors: [
"Jean-François Monin"
]
bug-reports: "https://github.com/coq-contribs/continuations/issues"
dev-repo: "git+https://github.com/coq-contribs/continuations.git"
synopsis: "A toolkit to reason with programs raising exceptions"
description: """
We show a way of developing correct functionnal programs
raising exceptions. This is made possible using a Continuation
Passing Style translation, see the contribution "exceptions" from
P. Casteran at Bordeaux. Things are made easier and more modular using
some general definitions."""
flags: light-uninstall
url {
src: "https://github.com/coq-contribs/continuations/archive/v8.9.0.tar.gz"
checksum: "md5=09c9cc6422b3a07d418fe468138a3489"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-continuations.8.9.0 coq.8.7.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.7.1).
The following dependencies couldn't be met:
- coq-continuations -> coq >= 8.9 -> ocaml >= 4.05.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-continuations.8.9.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "86b33665a6bef293dcf094b671e6979b",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 159,
"avg_line_length": 41.583333333333336,
"alnum_prop": 0.5605878423513694,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "725f4e10fa1597383a4833ee49122221a427160d",
"size": "7511",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.02.3-2.0.6/released/8.7.1/continuations/8.9.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""
Module contains tools for processing files into DataFrames or other objects
"""
from collections import defaultdict
import csv
import datetime
from io import StringIO
import re
import sys
from textwrap import fill
from typing import Any, Dict, Set
import warnings
import numpy as np
import pandas._libs.lib as lib
import pandas._libs.ops as libops
import pandas._libs.parsers as parsers
from pandas._libs.tslibs import parsing
from pandas.errors import (
AbstractMethodError,
EmptyDataError,
ParserError,
ParserWarning,
)
from pandas.util._decorators import Appender
from pandas.core.dtypes.cast import astype_nansafe
from pandas.core.dtypes.common import (
ensure_object,
ensure_str,
is_bool_dtype,
is_categorical_dtype,
is_dtype_equal,
is_extension_array_dtype,
is_float,
is_integer,
is_integer_dtype,
is_list_like,
is_object_dtype,
is_scalar,
is_string_dtype,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import CategoricalDtype
from pandas.core.dtypes.missing import isna
from pandas._typing import FilePathOrBuffer
from pandas.core import algorithms
from pandas.core.arrays import Categorical
from pandas.core.frame import DataFrame
from pandas.core.index import Index, MultiIndex, RangeIndex, ensure_index_from_sequences
from pandas.core.series import Series
from pandas.core.tools import datetimes as tools
from pandas.io.common import (
_NA_VALUES,
BaseIterator,
UnicodeReader,
UTF8Recoder,
_get_handle,
_infer_compression,
_validate_header_arg,
get_filepath_or_buffer,
is_file_like,
)
from pandas.io.date_converters import generic_parser
# BOM character (byte order mark)
# This exists at the beginning of a file to indicate endianness
# of a file (stream). Unfortunately, this marker screws up parsing,
# so we need to remove it if we see it.
_BOM = "\ufeff"
_doc_read_csv_and_table = (
r"""
{summary}
Also supports optionally iterating or breaking of the file
into chunks.
Additional help can be found in the online docs for
`IO Tools <http://pandas.pydata.org/pandas-docs/stable/user_guide/io.html>`_.
Parameters
----------
filepath_or_buffer : str, path object or file-like object
Any valid string path is acceptable. The string could be a URL. Valid
URL schemes include http, ftp, s3, and file. For file URLs, a host is
expected. A local file could be: file://localhost/path/to/table.csv.
If you want to pass in a path object, pandas accepts any ``os.PathLike``.
By file-like object, we refer to objects with a ``read()`` method, such as
a file handler (e.g. via builtin ``open`` function) or ``StringIO``.
sep : str, default {_default_sep}
Delimiter to use. If sep is None, the C engine cannot automatically detect
the separator, but the Python parsing engine can, meaning the latter will
be used and automatically detect the separator by Python's builtin sniffer
tool, ``csv.Sniffer``. In addition, separators longer than 1 character and
different from ``'\s+'`` will be interpreted as regular expressions and
will also force the use of the Python parsing engine. Note that regex
delimiters are prone to ignoring quoted data. Regex example: ``'\r\t'``.
delimiter : str, default ``None``
Alias for sep.
header : int, list of int, default 'infer'
Row number(s) to use as the column names, and the start of the
data. Default behavior is to infer the column names: if no names
are passed the behavior is identical to ``header=0`` and column
names are inferred from the first line of the file, if column
names are passed explicitly then the behavior is identical to
``header=None``. Explicitly pass ``header=0`` to be able to
replace existing names. The header can be a list of integers that
specify row locations for a multi-index on the columns
e.g. [0,1,3]. Intervening rows that are not specified will be
skipped (e.g. 2 in this example is skipped). Note that this
parameter ignores commented lines and empty lines if
``skip_blank_lines=True``, so ``header=0`` denotes the first line of
data rather than the first line of the file.
names : array-like, optional
List of column names to use. If file contains no header row, then you
should explicitly pass ``header=None``. Duplicates in this list are not
allowed.
index_col : int, str, sequence of int / str, or False, default ``None``
Column(s) to use as the row labels of the ``DataFrame``, either given as
string name or column index. If a sequence of int / str is given, a
MultiIndex is used.
Note: ``index_col=False`` can be used to force pandas to *not* use the first
column as the index, e.g. when you have a malformed file with delimiters at
the end of each line.
usecols : list-like or callable, optional
Return a subset of the columns. If list-like, all elements must either
be positional (i.e. integer indices into the document columns) or strings
that correspond to column names provided either by the user in `names` or
inferred from the document header row(s). For example, a valid list-like
`usecols` parameter would be ``[0, 1, 2]`` or ``['foo', 'bar', 'baz']``.
Element order is ignored, so ``usecols=[0, 1]`` is the same as ``[1, 0]``.
To instantiate a DataFrame from ``data`` with element order preserved use
``pd.read_csv(data, usecols=['foo', 'bar'])[['foo', 'bar']]`` for columns
in ``['foo', 'bar']`` order or
``pd.read_csv(data, usecols=['foo', 'bar'])[['bar', 'foo']]``
for ``['bar', 'foo']`` order.
If callable, the callable function will be evaluated against the column
names, returning names where the callable function evaluates to True. An
example of a valid callable argument would be ``lambda x: x.upper() in
['AAA', 'BBB', 'DDD']``. Using this parameter results in much faster
parsing time and lower memory usage.
squeeze : bool, default False
If the parsed data only contains one column then return a Series.
prefix : str, optional
Prefix to add to column numbers when no header, e.g. 'X' for X0, X1, ...
mangle_dupe_cols : bool, default True
Duplicate columns will be specified as 'X', 'X.1', ...'X.N', rather than
'X'...'X'. Passing in False will cause data to be overwritten if there
are duplicate names in the columns.
dtype : Type name or dict of column -> type, optional
Data type for data or columns. E.g. {{'a': np.float64, 'b': np.int32,
'c': 'Int64'}}
Use `str` or `object` together with suitable `na_values` settings
to preserve and not interpret dtype.
If converters are specified, they will be applied INSTEAD
of dtype conversion.
engine : {{'c', 'python'}}, optional
Parser engine to use. The C engine is faster while the python engine is
currently more feature-complete.
converters : dict, optional
Dict of functions for converting values in certain columns. Keys can either
be integers or column labels.
true_values : list, optional
Values to consider as True.
false_values : list, optional
Values to consider as False.
skipinitialspace : bool, default False
Skip spaces after delimiter.
skiprows : list-like, int or callable, optional
Line numbers to skip (0-indexed) or number of lines to skip (int)
at the start of the file.
If callable, the callable function will be evaluated against the row
indices, returning True if the row should be skipped and False otherwise.
An example of a valid callable argument would be ``lambda x: x in [0, 2]``.
skipfooter : int, default 0
Number of lines at bottom of file to skip (Unsupported with engine='c').
nrows : int, optional
Number of rows of file to read. Useful for reading pieces of large files.
na_values : scalar, str, list-like, or dict, optional
Additional strings to recognize as NA/NaN. If dict passed, specific
per-column NA values. By default the following values are interpreted as
NaN: '"""
+ fill("', '".join(sorted(_NA_VALUES)), 70, subsequent_indent=" ")
+ """'.
keep_default_na : bool, default True
Whether or not to include the default NaN values when parsing the data.
Depending on whether `na_values` is passed in, the behavior is as follows:
* If `keep_default_na` is True, and `na_values` are specified, `na_values`
is appended to the default NaN values used for parsing.
* If `keep_default_na` is True, and `na_values` are not specified, only
the default NaN values are used for parsing.
* If `keep_default_na` is False, and `na_values` are specified, only
the NaN values specified `na_values` are used for parsing.
* If `keep_default_na` is False, and `na_values` are not specified, no
strings will be parsed as NaN.
Note that if `na_filter` is passed in as False, the `keep_default_na` and
`na_values` parameters will be ignored.
na_filter : bool, default True
Detect missing value markers (empty strings and the value of na_values). In
data without any NAs, passing na_filter=False can improve the performance
of reading a large file.
verbose : bool, default False
Indicate number of NA values placed in non-numeric columns.
skip_blank_lines : bool, default True
If True, skip over blank lines rather than interpreting as NaN values.
parse_dates : bool or list of int or names or list of lists or dict, \
default False
The behavior is as follows:
* boolean. If True -> try parsing the index.
* list of int or names. e.g. If [1, 2, 3] -> try parsing columns 1, 2, 3
each as a separate date column.
* list of lists. e.g. If [[1, 3]] -> combine columns 1 and 3 and parse as
a single date column.
* dict, e.g. {{'foo' : [1, 3]}} -> parse columns 1, 3 as date and call
result 'foo'
If a column or index cannot be represented as an array of datetimes,
say because of an unparseable value or a mixture of timezones, the column
or index will be returned unaltered as an object data type. For
non-standard datetime parsing, use ``pd.to_datetime`` after
``pd.read_csv``. To parse an index or column with a mixture of timezones,
specify ``date_parser`` to be a partially-applied
:func:`pandas.to_datetime` with ``utc=True``. See
:ref:`io.csv.mixed_timezones` for more.
Note: A fast-path exists for iso8601-formatted dates.
infer_datetime_format : bool, default False
If True and `parse_dates` is enabled, pandas will attempt to infer the
format of the datetime strings in the columns, and if it can be inferred,
switch to a faster method of parsing them. In some cases this can increase
the parsing speed by 5-10x.
keep_date_col : bool, default False
If True and `parse_dates` specifies combining multiple columns then
keep the original columns.
date_parser : function, optional
Function to use for converting a sequence of string columns to an array of
datetime instances. The default uses ``dateutil.parser.parser`` to do the
conversion. Pandas will try to call `date_parser` in three different ways,
advancing to the next if an exception occurs: 1) Pass one or more arrays
(as defined by `parse_dates`) as arguments; 2) concatenate (row-wise) the
string values from the columns defined by `parse_dates` into a single array
and pass that; and 3) call `date_parser` once for each row using one or
more strings (corresponding to the columns defined by `parse_dates`) as
arguments.
dayfirst : bool, default False
DD/MM format dates, international and European format.
cache_dates : boolean, default True
If True, use a cache of unique, converted dates to apply the datetime
conversion. May produce significant speed-up when parsing duplicate
date strings, especially ones with timezone offsets.
.. versionadded:: 0.25.0
iterator : bool, default False
Return TextFileReader object for iteration or getting chunks with
``get_chunk()``.
chunksize : int, optional
Return TextFileReader object for iteration.
See the `IO Tools docs
<http://pandas.pydata.org/pandas-docs/stable/io.html#io-chunking>`_
for more information on ``iterator`` and ``chunksize``.
compression : {{'infer', 'gzip', 'bz2', 'zip', 'xz', None}}, default 'infer'
For on-the-fly decompression of on-disk data. If 'infer' and
`filepath_or_buffer` is path-like, then detect compression from the
following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no
decompression). If using 'zip', the ZIP file must contain only one data
file to be read in. Set to None for no decompression.
thousands : str, optional
Thousands separator.
decimal : str, default '.'
Character to recognize as decimal point (e.g. use ',' for European data).
lineterminator : str (length 1), optional
Character to break file into lines. Only valid with C parser.
quotechar : str (length 1), optional
The character used to denote the start and end of a quoted item. Quoted
items can include the delimiter and it will be ignored.
quoting : int or csv.QUOTE_* instance, default 0
Control field quoting behavior per ``csv.QUOTE_*`` constants. Use one of
QUOTE_MINIMAL (0), QUOTE_ALL (1), QUOTE_NONNUMERIC (2) or QUOTE_NONE (3).
doublequote : bool, default ``True``
When quotechar is specified and quoting is not ``QUOTE_NONE``, indicate
whether or not to interpret two consecutive quotechar elements INSIDE a
field as a single ``quotechar`` element.
escapechar : str (length 1), optional
One-character string used to escape other characters.
comment : str, optional
Indicates remainder of line should not be parsed. If found at the beginning
of a line, the line will be ignored altogether. This parameter must be a
single character. Like empty lines (as long as ``skip_blank_lines=True``),
fully commented lines are ignored by the parameter `header` but not by
`skiprows`. For example, if ``comment='#'``, parsing
``#empty\\na,b,c\\n1,2,3`` with ``header=0`` will result in 'a,b,c' being
treated as the header.
encoding : str, optional
Encoding to use for UTF when reading/writing (ex. 'utf-8'). `List of Python
standard encodings
<https://docs.python.org/3/library/codecs.html#standard-encodings>`_ .
dialect : str or csv.Dialect, optional
If provided, this parameter will override values (default or not) for the
following parameters: `delimiter`, `doublequote`, `escapechar`,
`skipinitialspace`, `quotechar`, and `quoting`. If it is necessary to
override values, a ParserWarning will be issued. See csv.Dialect
documentation for more details.
error_bad_lines : bool, default True
Lines with too many fields (e.g. a csv line with too many commas) will by
default cause an exception to be raised, and no DataFrame will be returned.
If False, then these "bad lines" will dropped from the DataFrame that is
returned.
warn_bad_lines : bool, default True
If error_bad_lines is False, and warn_bad_lines is True, a warning for each
"bad line" will be output.
delim_whitespace : bool, default False
Specifies whether or not whitespace (e.g. ``' '`` or ``'\t'``) will be
used as the sep. Equivalent to setting ``sep='\\s+'``. If this option
is set to True, nothing should be passed in for the ``delimiter``
parameter.
low_memory : bool, default True
Internally process the file in chunks, resulting in lower memory use
while parsing, but possibly mixed type inference. To ensure no mixed
types either set False, or specify the type with the `dtype` parameter.
Note that the entire file is read into a single DataFrame regardless,
use the `chunksize` or `iterator` parameter to return the data in chunks.
(Only valid with C parser).
memory_map : bool, default False
If a filepath is provided for `filepath_or_buffer`, map the file object
directly onto memory and access the data directly from there. Using this
option can improve performance because there is no longer any I/O overhead.
float_precision : str, optional
Specifies which converter the C engine should use for floating-point
values. The options are `None` for the ordinary converter,
`high` for the high-precision converter, and `round_trip` for the
round-trip converter.
Returns
-------
DataFrame or TextParser
A comma-separated values (csv) file is returned as two-dimensional
data structure with labeled axes.
See Also
--------
to_csv : Write DataFrame to a comma-separated values (csv) file.
read_csv : Read a comma-separated values (csv) file into DataFrame.
read_fwf : Read a table of fixed-width formatted lines into DataFrame.
Examples
--------
>>> pd.{func_name}('data.csv') # doctest: +SKIP
"""
)
def _validate_integer(name, val, min_val=0):
"""
Checks whether the 'name' parameter for parsing is either
an integer OR float that can SAFELY be cast to an integer
without losing accuracy. Raises a ValueError if that is
not the case.
Parameters
----------
name : string
Parameter name (used for error reporting)
val : int or float
The value to check
min_val : int
Minimum allowed value (val < min_val will result in a ValueError)
"""
msg = "'{name:s}' must be an integer >={min_val:d}".format(
name=name, min_val=min_val
)
if val is not None:
if is_float(val):
if int(val) != val:
raise ValueError(msg)
val = int(val)
elif not (is_integer(val) and val >= min_val):
raise ValueError(msg)
return val
def _validate_names(names):
"""
Check if the `names` parameter contains duplicates.
If duplicates are found, we issue a warning before returning.
Parameters
----------
names : array-like or None
An array containing a list of the names used for the output DataFrame.
Returns
-------
names : array-like or None
The original `names` parameter.
"""
if names is not None:
if len(names) != len(set(names)):
raise ValueError("Duplicate names are not allowed.")
return names
def _read(filepath_or_buffer: FilePathOrBuffer, kwds):
"""Generic reader of line files."""
encoding = kwds.get("encoding", None)
if encoding is not None:
encoding = re.sub("_", "-", encoding).lower()
kwds["encoding"] = encoding
compression = kwds.get("compression", "infer")
compression = _infer_compression(filepath_or_buffer, compression)
# TODO: get_filepath_or_buffer could return
# Union[FilePathOrBuffer, s3fs.S3File, gcsfs.GCSFile]
# though mypy handling of conditional imports is difficult.
# See https://github.com/python/mypy/issues/1297
fp_or_buf, _, compression, should_close = get_filepath_or_buffer(
filepath_or_buffer, encoding, compression
)
kwds["compression"] = compression
if kwds.get("date_parser", None) is not None:
if isinstance(kwds["parse_dates"], bool):
kwds["parse_dates"] = True
# Extract some of the arguments (pass chunksize on).
iterator = kwds.get("iterator", False)
chunksize = _validate_integer("chunksize", kwds.get("chunksize", None), 1)
nrows = kwds.get("nrows", None)
# Check for duplicates in names.
_validate_names(kwds.get("names", None))
# Create the parser.
parser = TextFileReader(fp_or_buf, **kwds)
if chunksize or iterator:
return parser
try:
data = parser.read(nrows)
finally:
parser.close()
if should_close:
try:
fp_or_buf.close()
except ValueError:
pass
return data
_parser_defaults = {
"delimiter": None,
"escapechar": None,
"quotechar": '"',
"quoting": csv.QUOTE_MINIMAL,
"doublequote": True,
"skipinitialspace": False,
"lineterminator": None,
"header": "infer",
"index_col": None,
"names": None,
"prefix": None,
"skiprows": None,
"skipfooter": 0,
"nrows": None,
"na_values": None,
"keep_default_na": True,
"true_values": None,
"false_values": None,
"converters": None,
"dtype": None,
"cache_dates": True,
"thousands": None,
"comment": None,
"decimal": b".",
# 'engine': 'c',
"parse_dates": False,
"keep_date_col": False,
"dayfirst": False,
"date_parser": None,
"usecols": None,
# 'iterator': False,
"chunksize": None,
"verbose": False,
"encoding": None,
"squeeze": False,
"compression": None,
"mangle_dupe_cols": True,
"infer_datetime_format": False,
"skip_blank_lines": True,
}
_c_parser_defaults = {
"delim_whitespace": False,
"na_filter": True,
"low_memory": True,
"memory_map": False,
"error_bad_lines": True,
"warn_bad_lines": True,
"float_precision": None,
}
_fwf_defaults = {"colspecs": "infer", "infer_nrows": 100, "widths": None}
_c_unsupported = {"skipfooter"}
_python_unsupported = {"low_memory", "float_precision"}
_deprecated_defaults = {} # type: Dict[str, Any]
_deprecated_args = set() # type: Set[str]
def _make_parser_function(name, default_sep=","):
def parser_f(
filepath_or_buffer: FilePathOrBuffer,
sep=default_sep,
delimiter=None,
# Column and Index Locations and Names
header="infer",
names=None,
index_col=None,
usecols=None,
squeeze=False,
prefix=None,
mangle_dupe_cols=True,
# General Parsing Configuration
dtype=None,
engine=None,
converters=None,
true_values=None,
false_values=None,
skipinitialspace=False,
skiprows=None,
skipfooter=0,
nrows=None,
# NA and Missing Data Handling
na_values=None,
keep_default_na=True,
na_filter=True,
verbose=False,
skip_blank_lines=True,
# Datetime Handling
parse_dates=False,
infer_datetime_format=False,
keep_date_col=False,
date_parser=None,
dayfirst=False,
cache_dates=True,
# Iteration
iterator=False,
chunksize=None,
# Quoting, Compression, and File Format
compression="infer",
thousands=None,
decimal=b".",
lineterminator=None,
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
doublequote=True,
escapechar=None,
comment=None,
encoding=None,
dialect=None,
# Error Handling
error_bad_lines=True,
warn_bad_lines=True,
# Internal
delim_whitespace=False,
low_memory=_c_parser_defaults["low_memory"],
memory_map=False,
float_precision=None,
):
# gh-23761
#
# When a dialect is passed, it overrides any of the overlapping
# parameters passed in directly. We don't want to warn if the
# default parameters were passed in (since it probably means
# that the user didn't pass them in explicitly in the first place).
#
# "delimiter" is the annoying corner case because we alias it to
# "sep" before doing comparison to the dialect values later on.
# Thus, we need a flag to indicate that we need to "override"
# the comparison to dialect values by checking if default values
# for BOTH "delimiter" and "sep" were provided.
if dialect is not None:
sep_override = delimiter is None and sep == default_sep
kwds = dict(sep_override=sep_override)
else:
kwds = dict()
# Alias sep -> delimiter.
if delimiter is None:
delimiter = sep
if delim_whitespace and delimiter != default_sep:
raise ValueError(
"Specified a delimiter with both sep and"
" delim_whitespace=True; you can only"
" specify one."
)
if engine is not None:
engine_specified = True
else:
engine = "c"
engine_specified = False
kwds.update(
delimiter=delimiter,
engine=engine,
dialect=dialect,
compression=compression,
engine_specified=engine_specified,
doublequote=doublequote,
escapechar=escapechar,
quotechar=quotechar,
quoting=quoting,
skipinitialspace=skipinitialspace,
lineterminator=lineterminator,
header=header,
index_col=index_col,
names=names,
prefix=prefix,
skiprows=skiprows,
skipfooter=skipfooter,
na_values=na_values,
true_values=true_values,
false_values=false_values,
keep_default_na=keep_default_na,
thousands=thousands,
comment=comment,
decimal=decimal,
parse_dates=parse_dates,
keep_date_col=keep_date_col,
dayfirst=dayfirst,
date_parser=date_parser,
cache_dates=cache_dates,
nrows=nrows,
iterator=iterator,
chunksize=chunksize,
converters=converters,
dtype=dtype,
usecols=usecols,
verbose=verbose,
encoding=encoding,
squeeze=squeeze,
memory_map=memory_map,
float_precision=float_precision,
na_filter=na_filter,
delim_whitespace=delim_whitespace,
warn_bad_lines=warn_bad_lines,
error_bad_lines=error_bad_lines,
low_memory=low_memory,
mangle_dupe_cols=mangle_dupe_cols,
infer_datetime_format=infer_datetime_format,
skip_blank_lines=skip_blank_lines,
)
return _read(filepath_or_buffer, kwds)
parser_f.__name__ = name
return parser_f
read_csv = _make_parser_function("read_csv", default_sep=",")
read_csv = Appender(
_doc_read_csv_and_table.format(
func_name="read_csv",
summary=("Read a comma-separated values (csv) file " "into DataFrame."),
_default_sep="','",
)
)(read_csv)
read_table = _make_parser_function("read_table", default_sep="\t")
read_table = Appender(
_doc_read_csv_and_table.format(
func_name="read_table",
summary="Read general delimited file into DataFrame.",
_default_sep=r"'\\t' (tab-stop)",
)
)(read_table)
def read_fwf(
filepath_or_buffer: FilePathOrBuffer,
colspecs="infer",
widths=None,
infer_nrows=100,
**kwds
):
r"""
Read a table of fixed-width formatted lines into DataFrame.
Also supports optionally iterating or breaking of the file
into chunks.
Additional help can be found in the `online docs for IO Tools
<http://pandas.pydata.org/pandas-docs/stable/user_guide/io.html>`_.
Parameters
----------
filepath_or_buffer : str, path object or file-like object
Any valid string path is acceptable. The string could be a URL. Valid
URL schemes include http, ftp, s3, and file. For file URLs, a host is
expected. A local file could be:
``file://localhost/path/to/table.csv``.
If you want to pass in a path object, pandas accepts any
``os.PathLike``.
By file-like object, we refer to objects with a ``read()`` method,
such as a file handler (e.g. via builtin ``open`` function)
or ``StringIO``.
colspecs : list of tuple (int, int) or 'infer'. optional
A list of tuples giving the extents of the fixed-width
fields of each line as half-open intervals (i.e., [from, to[ ).
String value 'infer' can be used to instruct the parser to try
detecting the column specifications from the first 100 rows of
the data which are not being skipped via skiprows (default='infer').
widths : list of int, optional
A list of field widths which can be used instead of 'colspecs' if
the intervals are contiguous.
infer_nrows : int, default 100
The number of rows to consider when letting the parser determine the
`colspecs`.
.. versionadded:: 0.24.0
**kwds : optional
Optional keyword arguments can be passed to ``TextFileReader``.
Returns
-------
DataFrame or TextParser
A comma-separated values (csv) file is returned as two-dimensional
data structure with labeled axes.
See Also
--------
to_csv : Write DataFrame to a comma-separated values (csv) file.
read_csv : Read a comma-separated values (csv) file into DataFrame.
Examples
--------
>>> pd.read_fwf('data.csv') # doctest: +SKIP
"""
# Check input arguments.
if colspecs is None and widths is None:
raise ValueError("Must specify either colspecs or widths")
elif colspecs not in (None, "infer") and widths is not None:
raise ValueError("You must specify only one of 'widths' and " "'colspecs'")
# Compute 'colspecs' from 'widths', if specified.
if widths is not None:
colspecs, col = [], 0
for w in widths:
colspecs.append((col, col + w))
col += w
kwds["colspecs"] = colspecs
kwds["infer_nrows"] = infer_nrows
kwds["engine"] = "python-fwf"
return _read(filepath_or_buffer, kwds)
class TextFileReader(BaseIterator):
"""
Passed dialect overrides any of the related parser options
"""
def __init__(self, f, engine=None, **kwds):
self.f = f
if engine is not None:
engine_specified = True
else:
engine = "python"
engine_specified = False
self._engine_specified = kwds.get("engine_specified", engine_specified)
if kwds.get("dialect") is not None:
dialect = kwds["dialect"]
if dialect in csv.list_dialects():
dialect = csv.get_dialect(dialect)
# Any valid dialect should have these attributes.
# If any are missing, we will raise automatically.
for param in (
"delimiter",
"doublequote",
"escapechar",
"skipinitialspace",
"quotechar",
"quoting",
):
try:
dialect_val = getattr(dialect, param)
except AttributeError:
raise ValueError(
"Invalid dialect '{dialect}' provided".format(
dialect=kwds["dialect"]
)
)
parser_default = _parser_defaults[param]
provided = kwds.get(param, parser_default)
# Messages for conflicting values between the dialect
# instance and the actual parameters provided.
conflict_msgs = []
# Don't warn if the default parameter was passed in,
# even if it conflicts with the dialect (gh-23761).
if provided != parser_default and provided != dialect_val:
msg = (
"Conflicting values for '{param}': '{val}' was "
"provided, but the dialect specifies '{diaval}'. "
"Using the dialect-specified value.".format(
param=param, val=provided, diaval=dialect_val
)
)
# Annoying corner case for not warning about
# conflicts between dialect and delimiter parameter.
# Refer to the outer "_read_" function for more info.
if not (param == "delimiter" and kwds.pop("sep_override", False)):
conflict_msgs.append(msg)
if conflict_msgs:
warnings.warn(
"\n\n".join(conflict_msgs), ParserWarning, stacklevel=2
)
kwds[param] = dialect_val
if kwds.get("skipfooter"):
if kwds.get("iterator") or kwds.get("chunksize"):
raise ValueError("'skipfooter' not supported for 'iteration'")
if kwds.get("nrows"):
raise ValueError("'skipfooter' not supported with 'nrows'")
if kwds.get("header", "infer") == "infer":
kwds["header"] = 0 if kwds.get("names") is None else None
self.orig_options = kwds
# miscellanea
self.engine = engine
self._engine = None
self._currow = 0
options = self._get_options_with_defaults(engine)
self.chunksize = options.pop("chunksize", None)
self.nrows = options.pop("nrows", None)
self.squeeze = options.pop("squeeze", False)
# might mutate self.engine
self.engine = self._check_file_or_buffer(f, engine)
self.options, self.engine = self._clean_options(options, engine)
if "has_index_names" in kwds:
self.options["has_index_names"] = kwds["has_index_names"]
self._make_engine(self.engine)
def close(self):
self._engine.close()
def _get_options_with_defaults(self, engine):
kwds = self.orig_options
options = {}
for argname, default in _parser_defaults.items():
value = kwds.get(argname, default)
# see gh-12935
if argname == "mangle_dupe_cols" and not value:
raise ValueError(
"Setting mangle_dupe_cols=False is " "not supported yet"
)
else:
options[argname] = value
for argname, default in _c_parser_defaults.items():
if argname in kwds:
value = kwds[argname]
if engine != "c" and value != default:
if "python" in engine and argname not in _python_unsupported:
pass
elif value == _deprecated_defaults.get(argname, default):
pass
else:
raise ValueError(
"The %r option is not supported with the"
" %r engine" % (argname, engine)
)
else:
value = _deprecated_defaults.get(argname, default)
options[argname] = value
if engine == "python-fwf":
for argname, default in _fwf_defaults.items():
options[argname] = kwds.get(argname, default)
return options
def _check_file_or_buffer(self, f, engine):
# see gh-16530
if is_file_like(f):
next_attr = "__next__"
# The C engine doesn't need the file-like to have the "next" or
# "__next__" attribute. However, the Python engine explicitly calls
# "next(...)" when iterating through such an object, meaning it
# needs to have that attribute ("next" for Python 2.x, "__next__"
# for Python 3.x)
if engine != "c" and not hasattr(f, next_attr):
msg = "The 'python' engine cannot iterate " "through this file buffer."
raise ValueError(msg)
return engine
def _clean_options(self, options, engine):
result = options.copy()
engine_specified = self._engine_specified
fallback_reason = None
sep = options["delimiter"]
delim_whitespace = options["delim_whitespace"]
# C engine not supported yet
if engine == "c":
if options["skipfooter"] > 0:
fallback_reason = "the 'c' engine does not support" " skipfooter"
engine = "python"
encoding = sys.getfilesystemencoding() or "utf-8"
if sep is None and not delim_whitespace:
if engine == "c":
fallback_reason = (
"the 'c' engine does not support"
" sep=None with delim_whitespace=False"
)
engine = "python"
elif sep is not None and len(sep) > 1:
if engine == "c" and sep == r"\s+":
result["delim_whitespace"] = True
del result["delimiter"]
elif engine not in ("python", "python-fwf"):
# wait until regex engine integrated
fallback_reason = (
"the 'c' engine does not support"
" regex separators (separators > 1 char and"
r" different from '\s+' are"
" interpreted as regex)"
)
engine = "python"
elif delim_whitespace:
if "python" in engine:
result["delimiter"] = r"\s+"
elif sep is not None:
encodeable = True
try:
if len(sep.encode(encoding)) > 1:
encodeable = False
except UnicodeDecodeError:
encodeable = False
if not encodeable and engine not in ("python", "python-fwf"):
fallback_reason = (
"the separator encoded in {encoding}"
" is > 1 char long, and the 'c' engine"
" does not support such separators".format(encoding=encoding)
)
engine = "python"
quotechar = options["quotechar"]
if quotechar is not None and isinstance(quotechar, (str, bytes)):
if (
len(quotechar) == 1
and ord(quotechar) > 127
and engine not in ("python", "python-fwf")
):
fallback_reason = (
"ord(quotechar) > 127, meaning the "
"quotechar is larger than one byte, "
"and the 'c' engine does not support "
"such quotechars"
)
engine = "python"
if fallback_reason and engine_specified:
raise ValueError(fallback_reason)
if engine == "c":
for arg in _c_unsupported:
del result[arg]
if "python" in engine:
for arg in _python_unsupported:
if fallback_reason and result[arg] != _c_parser_defaults[arg]:
msg = (
"Falling back to the 'python' engine because"
" {reason}, but this causes {option!r} to be"
" ignored as it is not supported by the 'python'"
" engine."
).format(reason=fallback_reason, option=arg)
raise ValueError(msg)
del result[arg]
if fallback_reason:
warnings.warn(
(
"Falling back to the 'python' engine because"
" {0}; you can avoid this warning by specifying"
" engine='python'."
).format(fallback_reason),
ParserWarning,
stacklevel=5,
)
index_col = options["index_col"]
names = options["names"]
converters = options["converters"]
na_values = options["na_values"]
skiprows = options["skiprows"]
_validate_header_arg(options["header"])
depr_warning = ""
for arg in _deprecated_args:
parser_default = _c_parser_defaults[arg]
depr_default = _deprecated_defaults[arg]
msg = (
"The '{arg}' argument has been deprecated "
"and will be removed in a future version.".format(arg=arg)
)
if result.get(arg, depr_default) != depr_default:
# raise Exception(result.get(arg, depr_default), depr_default)
depr_warning += msg + "\n\n"
else:
result[arg] = parser_default
if depr_warning != "":
warnings.warn(depr_warning, FutureWarning, stacklevel=2)
if index_col is True:
raise ValueError("The value of index_col couldn't be 'True'")
if _is_index_col(index_col):
if not isinstance(index_col, (list, tuple, np.ndarray)):
index_col = [index_col]
result["index_col"] = index_col
names = list(names) if names is not None else names
# type conversion-related
if converters is not None:
if not isinstance(converters, dict):
raise TypeError(
"Type converters must be a dict or"
" subclass, input was "
"a {0!r}".format(type(converters).__name__)
)
else:
converters = {}
# Converting values to NA
keep_default_na = options["keep_default_na"]
na_values, na_fvalues = _clean_na_values(na_values, keep_default_na)
# handle skiprows; this is internally handled by the
# c-engine, so only need for python parsers
if engine != "c":
if is_integer(skiprows):
skiprows = list(range(skiprows))
if skiprows is None:
skiprows = set()
elif not callable(skiprows):
skiprows = set(skiprows)
# put stuff back
result["names"] = names
result["converters"] = converters
result["na_values"] = na_values
result["na_fvalues"] = na_fvalues
result["skiprows"] = skiprows
return result, engine
def __next__(self):
try:
return self.get_chunk()
except StopIteration:
self.close()
raise
def _make_engine(self, engine="c"):
if engine == "c":
self._engine = CParserWrapper(self.f, **self.options)
else:
if engine == "python":
klass = PythonParser
elif engine == "python-fwf":
klass = FixedWidthFieldParser
else:
raise ValueError(
"Unknown engine: {engine} (valid options are"
' "c", "python", or'
' "python-fwf")'.format(engine=engine)
)
self._engine = klass(self.f, **self.options)
def _failover_to_python(self):
raise AbstractMethodError(self)
def read(self, nrows=None):
nrows = _validate_integer("nrows", nrows)
ret = self._engine.read(nrows)
# May alter columns / col_dict
index, columns, col_dict = self._create_index(ret)
if index is None:
if col_dict:
# Any column is actually fine:
new_rows = len(next(iter(col_dict.values())))
index = RangeIndex(self._currow, self._currow + new_rows)
else:
new_rows = 0
else:
new_rows = len(index)
df = DataFrame(col_dict, columns=columns, index=index)
self._currow += new_rows
if self.squeeze and len(df.columns) == 1:
return df[df.columns[0]].copy()
return df
def _create_index(self, ret):
index, columns, col_dict = ret
return index, columns, col_dict
def get_chunk(self, size=None):
if size is None:
size = self.chunksize
if self.nrows is not None:
if self._currow >= self.nrows:
raise StopIteration
size = min(size, self.nrows - self._currow)
return self.read(nrows=size)
def _is_index_col(col):
return col is not None and col is not False
def _is_potential_multi_index(columns):
"""
Check whether or not the `columns` parameter
could be converted into a MultiIndex.
Parameters
----------
columns : array-like
Object which may or may not be convertible into a MultiIndex
Returns
-------
boolean : Whether or not columns could become a MultiIndex
"""
return (
len(columns)
and not isinstance(columns, MultiIndex)
and all(isinstance(c, tuple) for c in columns)
)
def _evaluate_usecols(usecols, names):
"""
Check whether or not the 'usecols' parameter
is a callable. If so, enumerates the 'names'
parameter and returns a set of indices for
each entry in 'names' that evaluates to True.
If not a callable, returns 'usecols'.
"""
if callable(usecols):
return {i for i, name in enumerate(names) if usecols(name)}
return usecols
def _validate_usecols_names(usecols, names):
"""
Validates that all usecols are present in a given
list of names. If not, raise a ValueError that
shows what usecols are missing.
Parameters
----------
usecols : iterable of usecols
The columns to validate are present in names.
names : iterable of names
The column names to check against.
Returns
-------
usecols : iterable of usecols
The `usecols` parameter if the validation succeeds.
Raises
------
ValueError : Columns were missing. Error message will list them.
"""
missing = [c for c in usecols if c not in names]
if len(missing) > 0:
raise ValueError(
"Usecols do not match columns, "
"columns expected but not found: {missing}".format(missing=missing)
)
return usecols
def _validate_skipfooter_arg(skipfooter):
"""
Validate the 'skipfooter' parameter.
Checks whether 'skipfooter' is a non-negative integer.
Raises a ValueError if that is not the case.
Parameters
----------
skipfooter : non-negative integer
The number of rows to skip at the end of the file.
Returns
-------
validated_skipfooter : non-negative integer
The original input if the validation succeeds.
Raises
------
ValueError : 'skipfooter' was not a non-negative integer.
"""
if not is_integer(skipfooter):
raise ValueError("skipfooter must be an integer")
if skipfooter < 0:
raise ValueError("skipfooter cannot be negative")
return skipfooter
def _validate_usecols_arg(usecols):
"""
Validate the 'usecols' parameter.
Checks whether or not the 'usecols' parameter contains all integers
(column selection by index), strings (column by name) or is a callable.
Raises a ValueError if that is not the case.
Parameters
----------
usecols : list-like, callable, or None
List of columns to use when parsing or a callable that can be used
to filter a list of table columns.
Returns
-------
usecols_tuple : tuple
A tuple of (verified_usecols, usecols_dtype).
'verified_usecols' is either a set if an array-like is passed in or
'usecols' if a callable or None is passed in.
'usecols_dtype` is the inferred dtype of 'usecols' if an array-like
is passed in or None if a callable or None is passed in.
"""
msg = (
"'usecols' must either be list-like of all strings, all unicode, "
"all integers or a callable."
)
if usecols is not None:
if callable(usecols):
return usecols, None
if not is_list_like(usecols):
# see gh-20529
#
# Ensure it is iterable container but not string.
raise ValueError(msg)
usecols_dtype = lib.infer_dtype(usecols, skipna=False)
if usecols_dtype not in ("empty", "integer", "string", "unicode"):
raise ValueError(msg)
usecols = set(usecols)
return usecols, usecols_dtype
return usecols, None
def _validate_parse_dates_arg(parse_dates):
"""
Check whether or not the 'parse_dates' parameter
is a non-boolean scalar. Raises a ValueError if
that is the case.
"""
msg = (
"Only booleans, lists, and "
"dictionaries are accepted "
"for the 'parse_dates' parameter"
)
if parse_dates is not None:
if is_scalar(parse_dates):
if not lib.is_bool(parse_dates):
raise TypeError(msg)
elif not isinstance(parse_dates, (list, dict)):
raise TypeError(msg)
return parse_dates
class ParserBase:
def __init__(self, kwds):
self.names = kwds.get("names")
self.orig_names = None
self.prefix = kwds.pop("prefix", None)
self.index_col = kwds.get("index_col", None)
self.unnamed_cols = set()
self.index_names = None
self.col_names = None
self.parse_dates = _validate_parse_dates_arg(kwds.pop("parse_dates", False))
self.date_parser = kwds.pop("date_parser", None)
self.dayfirst = kwds.pop("dayfirst", False)
self.keep_date_col = kwds.pop("keep_date_col", False)
self.na_values = kwds.get("na_values")
self.na_fvalues = kwds.get("na_fvalues")
self.na_filter = kwds.get("na_filter", False)
self.keep_default_na = kwds.get("keep_default_na", True)
self.true_values = kwds.get("true_values")
self.false_values = kwds.get("false_values")
self.mangle_dupe_cols = kwds.get("mangle_dupe_cols", True)
self.infer_datetime_format = kwds.pop("infer_datetime_format", False)
self.cache_dates = kwds.pop("cache_dates", True)
self._date_conv = _make_date_converter(
date_parser=self.date_parser,
dayfirst=self.dayfirst,
infer_datetime_format=self.infer_datetime_format,
cache_dates=self.cache_dates,
)
# validate header options for mi
self.header = kwds.get("header")
if isinstance(self.header, (list, tuple, np.ndarray)):
if not all(map(is_integer, self.header)):
raise ValueError("header must be integer or list of integers")
if kwds.get("usecols"):
raise ValueError(
"cannot specify usecols when " "specifying a multi-index header"
)
if kwds.get("names"):
raise ValueError(
"cannot specify names when " "specifying a multi-index header"
)
# validate index_col that only contains integers
if self.index_col is not None:
is_sequence = isinstance(self.index_col, (list, tuple, np.ndarray))
if not (
is_sequence
and all(map(is_integer, self.index_col))
or is_integer(self.index_col)
):
raise ValueError(
"index_col must only contain row numbers "
"when specifying a multi-index header"
)
# GH 16338
elif self.header is not None and not is_integer(self.header):
raise ValueError("header must be integer or list of integers")
self._name_processed = False
self._first_chunk = True
# GH 13932
# keep references to file handles opened by the parser itself
self.handles = []
def close(self):
for f in self.handles:
f.close()
@property
def _has_complex_date_col(self):
return isinstance(self.parse_dates, dict) or (
isinstance(self.parse_dates, list)
and len(self.parse_dates) > 0
and isinstance(self.parse_dates[0], list)
)
def _should_parse_dates(self, i):
if isinstance(self.parse_dates, bool):
return self.parse_dates
else:
if self.index_names is not None:
name = self.index_names[i]
else:
name = None
j = self.index_col[i]
if is_scalar(self.parse_dates):
return (j == self.parse_dates) or (
name is not None and name == self.parse_dates
)
else:
return (j in self.parse_dates) or (
name is not None and name in self.parse_dates
)
def _extract_multi_indexer_columns(
self, header, index_names, col_names, passed_names=False
):
""" extract and return the names, index_names, col_names
header is a list-of-lists returned from the parsers """
if len(header) < 2:
return header[0], index_names, col_names, passed_names
# the names are the tuples of the header that are not the index cols
# 0 is the name of the index, assuming index_col is a list of column
# numbers
ic = self.index_col
if ic is None:
ic = []
if not isinstance(ic, (list, tuple, np.ndarray)):
ic = [ic]
sic = set(ic)
# clean the index_names
index_names = header.pop(-1)
index_names, names, index_col = _clean_index_names(
index_names, self.index_col, self.unnamed_cols
)
# extract the columns
field_count = len(header[0])
def extract(r):
return tuple(r[i] for i in range(field_count) if i not in sic)
columns = list(zip(*(extract(r) for r in header)))
names = ic + columns
# If we find unnamed columns all in a single
# level, then our header was too long.
for n in range(len(columns[0])):
if all(ensure_str(col[n]) in self.unnamed_cols for col in columns):
raise ParserError(
"Passed header=[{header}] are too many rows for this "
"multi_index of columns".format(
header=",".join(str(x) for x in self.header)
)
)
# Clean the column names (if we have an index_col).
if len(ic):
col_names = [
r[0] if (len(r[0]) and r[0] not in self.unnamed_cols) else None
for r in header
]
else:
col_names = [None] * len(header)
passed_names = True
return names, index_names, col_names, passed_names
def _maybe_dedup_names(self, names):
# see gh-7160 and gh-9424: this helps to provide
# immediate alleviation of the duplicate names
# issue and appears to be satisfactory to users,
# but ultimately, not needing to butcher the names
# would be nice!
if self.mangle_dupe_cols:
names = list(names) # so we can index
counts = defaultdict(int)
is_potential_mi = _is_potential_multi_index(names)
for i, col in enumerate(names):
cur_count = counts[col]
while cur_count > 0:
counts[col] = cur_count + 1
if is_potential_mi:
col = col[:-1] + (
"{column}.{count}".format(column=col[-1], count=cur_count),
)
else:
col = "{column}.{count}".format(column=col, count=cur_count)
cur_count = counts[col]
names[i] = col
counts[col] = cur_count + 1
return names
def _maybe_make_multi_index_columns(self, columns, col_names=None):
# possibly create a column mi here
if _is_potential_multi_index(columns):
columns = MultiIndex.from_tuples(columns, names=col_names)
return columns
def _make_index(self, data, alldata, columns, indexnamerow=False):
if not _is_index_col(self.index_col) or not self.index_col:
index = None
elif not self._has_complex_date_col:
index = self._get_simple_index(alldata, columns)
index = self._agg_index(index)
elif self._has_complex_date_col:
if not self._name_processed:
(self.index_names, _, self.index_col) = _clean_index_names(
list(columns), self.index_col, self.unnamed_cols
)
self._name_processed = True
index = self._get_complex_date_index(data, columns)
index = self._agg_index(index, try_parse_dates=False)
# add names for the index
if indexnamerow:
coffset = len(indexnamerow) - len(columns)
index = index.set_names(indexnamerow[:coffset])
# maybe create a mi on the columns
columns = self._maybe_make_multi_index_columns(columns, self.col_names)
return index, columns
_implicit_index = False
def _get_simple_index(self, data, columns):
def ix(col):
if not isinstance(col, str):
return col
raise ValueError("Index {col} invalid".format(col=col))
to_remove = []
index = []
for idx in self.index_col:
i = ix(idx)
to_remove.append(i)
index.append(data[i])
# remove index items from content and columns, don't pop in
# loop
for i in reversed(sorted(to_remove)):
data.pop(i)
if not self._implicit_index:
columns.pop(i)
return index
def _get_complex_date_index(self, data, col_names):
def _get_name(icol):
if isinstance(icol, str):
return icol
if col_names is None:
raise ValueError(
("Must supply column order to use {icol!s} " "as index").format(
icol=icol
)
)
for i, c in enumerate(col_names):
if i == icol:
return c
to_remove = []
index = []
for idx in self.index_col:
name = _get_name(idx)
to_remove.append(name)
index.append(data[name])
# remove index items from content and columns, don't pop in
# loop
for c in reversed(sorted(to_remove)):
data.pop(c)
col_names.remove(c)
return index
def _agg_index(self, index, try_parse_dates=True):
arrays = []
for i, arr in enumerate(index):
if try_parse_dates and self._should_parse_dates(i):
arr = self._date_conv(arr)
if self.na_filter:
col_na_values = self.na_values
col_na_fvalues = self.na_fvalues
else:
col_na_values = set()
col_na_fvalues = set()
if isinstance(self.na_values, dict):
col_name = self.index_names[i]
if col_name is not None:
col_na_values, col_na_fvalues = _get_na_values(
col_name, self.na_values, self.na_fvalues, self.keep_default_na
)
arr, _ = self._infer_types(arr, col_na_values | col_na_fvalues)
arrays.append(arr)
names = self.index_names
index = ensure_index_from_sequences(arrays, names)
return index
def _convert_to_ndarrays(
self, dct, na_values, na_fvalues, verbose=False, converters=None, dtypes=None
):
result = {}
for c, values in dct.items():
conv_f = None if converters is None else converters.get(c, None)
if isinstance(dtypes, dict):
cast_type = dtypes.get(c, None)
else:
# single dtype or None
cast_type = dtypes
if self.na_filter:
col_na_values, col_na_fvalues = _get_na_values(
c, na_values, na_fvalues, self.keep_default_na
)
else:
col_na_values, col_na_fvalues = set(), set()
if conv_f is not None:
# conv_f applied to data before inference
if cast_type is not None:
warnings.warn(
(
"Both a converter and dtype were specified "
"for column {0} - only the converter will "
"be used"
).format(c),
ParserWarning,
stacklevel=7,
)
try:
values = lib.map_infer(values, conv_f)
except ValueError:
mask = algorithms.isin(values, list(na_values)).view(np.uint8)
values = lib.map_infer_mask(values, conv_f, mask)
cvals, na_count = self._infer_types(
values, set(col_na_values) | col_na_fvalues, try_num_bool=False
)
else:
is_str_or_ea_dtype = is_string_dtype(
cast_type
) or is_extension_array_dtype(cast_type)
# skip inference if specified dtype is object
# or casting to an EA
try_num_bool = not (cast_type and is_str_or_ea_dtype)
# general type inference and conversion
cvals, na_count = self._infer_types(
values, set(col_na_values) | col_na_fvalues, try_num_bool
)
# type specified in dtype param or cast_type is an EA
if cast_type and (
not is_dtype_equal(cvals, cast_type)
or is_extension_array_dtype(cast_type)
):
try:
if (
is_bool_dtype(cast_type)
and not is_categorical_dtype(cast_type)
and na_count > 0
):
raise ValueError(
"Bool column has NA values in "
"column {column}".format(column=c)
)
except (AttributeError, TypeError):
# invalid input to is_bool_dtype
pass
cvals = self._cast_types(cvals, cast_type, c)
result[c] = cvals
if verbose and na_count:
print(
"Filled {count} NA values in column {c!s}".format(
count=na_count, c=c
)
)
return result
def _infer_types(self, values, na_values, try_num_bool=True):
"""
Infer types of values, possibly casting
Parameters
----------
values : ndarray
na_values : set
try_num_bool : bool, default try
try to cast values to numeric (first preference) or boolean
Returns
-------
converted : ndarray
na_count : int
"""
na_count = 0
if issubclass(values.dtype.type, (np.number, np.bool_)):
mask = algorithms.isin(values, list(na_values))
na_count = mask.sum()
if na_count > 0:
if is_integer_dtype(values):
values = values.astype(np.float64)
np.putmask(values, mask, np.nan)
return values, na_count
if try_num_bool:
try:
result = lib.maybe_convert_numeric(values, na_values, False)
na_count = isna(result).sum()
except Exception:
result = values
if values.dtype == np.object_:
na_count = parsers.sanitize_objects(result, na_values, False)
else:
result = values
if values.dtype == np.object_:
na_count = parsers.sanitize_objects(values, na_values, False)
if result.dtype == np.object_ and try_num_bool:
result = libops.maybe_convert_bool(
np.asarray(values),
true_values=self.true_values,
false_values=self.false_values,
)
return result, na_count
def _cast_types(self, values, cast_type, column):
"""
Cast values to specified type
Parameters
----------
values : ndarray
cast_type : string or np.dtype
dtype to cast values to
column : string
column name - used only for error reporting
Returns
-------
converted : ndarray
"""
if is_categorical_dtype(cast_type):
known_cats = (
isinstance(cast_type, CategoricalDtype)
and cast_type.categories is not None
)
if not is_object_dtype(values) and not known_cats:
# XXX this is for consistency with
# c-parser which parses all categories
# as strings
values = astype_nansafe(values, str)
cats = Index(values).unique().dropna()
values = Categorical._from_inferred_categories(
cats, cats.get_indexer(values), cast_type, true_values=self.true_values
)
# use the EA's implementation of casting
elif is_extension_array_dtype(cast_type):
# ensure cast_type is an actual dtype and not a string
cast_type = pandas_dtype(cast_type)
array_type = cast_type.construct_array_type()
try:
return array_type._from_sequence_of_strings(values, dtype=cast_type)
except NotImplementedError:
raise NotImplementedError(
"Extension Array: {ea} must implement "
"_from_sequence_of_strings in order "
"to be used in parser methods".format(ea=array_type)
)
else:
try:
values = astype_nansafe(values, cast_type, copy=True, skipna=True)
except ValueError:
raise ValueError(
"Unable to convert column {column} to type "
"{cast_type}".format(column=column, cast_type=cast_type)
)
return values
def _do_date_conversions(self, names, data):
# returns data, columns
if self.parse_dates is not None:
data, names = _process_date_conversion(
data,
self._date_conv,
self.parse_dates,
self.index_col,
self.index_names,
names,
keep_date_col=self.keep_date_col,
)
return names, data
class CParserWrapper(ParserBase):
"""
"""
def __init__(self, src, **kwds):
self.kwds = kwds
kwds = kwds.copy()
ParserBase.__init__(self, kwds)
if kwds.get("compression") is None and "utf-16" in (kwds.get("encoding") or ""):
# if source is utf-16 plain text, convert source to utf-8
if isinstance(src, str):
src = open(src, "rb")
self.handles.append(src)
src = UTF8Recoder(src, kwds["encoding"])
kwds["encoding"] = "utf-8"
# #2442
kwds["allow_leading_cols"] = self.index_col is not False
# GH20529, validate usecol arg before TextReader
self.usecols, self.usecols_dtype = _validate_usecols_arg(kwds["usecols"])
kwds["usecols"] = self.usecols
self._reader = parsers.TextReader(src, **kwds)
self.unnamed_cols = self._reader.unnamed_cols
passed_names = self.names is None
if self._reader.header is None:
self.names = None
else:
if len(self._reader.header) > 1:
# we have a multi index in the columns
self.names, self.index_names, self.col_names, passed_names = self._extract_multi_indexer_columns( # noqa: E501
self._reader.header, self.index_names, self.col_names, passed_names
)
else:
self.names = list(self._reader.header[0])
if self.names is None:
if self.prefix:
self.names = [
"{prefix}{i}".format(prefix=self.prefix, i=i)
for i in range(self._reader.table_width)
]
else:
self.names = list(range(self._reader.table_width))
# gh-9755
#
# need to set orig_names here first
# so that proper indexing can be done
# with _set_noconvert_columns
#
# once names has been filtered, we will
# then set orig_names again to names
self.orig_names = self.names[:]
if self.usecols:
usecols = _evaluate_usecols(self.usecols, self.orig_names)
# GH 14671
if self.usecols_dtype == "string" and not set(usecols).issubset(
self.orig_names
):
_validate_usecols_names(usecols, self.orig_names)
if len(self.names) > len(usecols):
self.names = [
n
for i, n in enumerate(self.names)
if (i in usecols or n in usecols)
]
if len(self.names) < len(usecols):
_validate_usecols_names(usecols, self.names)
self._set_noconvert_columns()
self.orig_names = self.names
if not self._has_complex_date_col:
if self._reader.leading_cols == 0 and _is_index_col(self.index_col):
self._name_processed = True
(index_names, self.names, self.index_col) = _clean_index_names(
self.names, self.index_col, self.unnamed_cols
)
if self.index_names is None:
self.index_names = index_names
if self._reader.header is None and not passed_names:
self.index_names = [None] * len(self.index_names)
self._implicit_index = self._reader.leading_cols > 0
def close(self):
for f in self.handles:
f.close()
# close additional handles opened by C parser (for compression)
try:
self._reader.close()
except ValueError:
pass
def _set_noconvert_columns(self):
"""
Set the columns that should not undergo dtype conversions.
Currently, any column that is involved with date parsing will not
undergo such conversions.
"""
names = self.orig_names
if self.usecols_dtype == "integer":
# A set of integers will be converted to a list in
# the correct order every single time.
usecols = list(self.usecols)
usecols.sort()
elif callable(self.usecols) or self.usecols_dtype not in ("empty", None):
# The names attribute should have the correct columns
# in the proper order for indexing with parse_dates.
usecols = self.names[:]
else:
# Usecols is empty.
usecols = None
def _set(x):
if usecols is not None and is_integer(x):
x = usecols[x]
if not is_integer(x):
x = names.index(x)
self._reader.set_noconvert(x)
if isinstance(self.parse_dates, list):
for val in self.parse_dates:
if isinstance(val, list):
for k in val:
_set(k)
else:
_set(val)
elif isinstance(self.parse_dates, dict):
for val in self.parse_dates.values():
if isinstance(val, list):
for k in val:
_set(k)
else:
_set(val)
elif self.parse_dates:
if isinstance(self.index_col, list):
for k in self.index_col:
_set(k)
elif self.index_col is not None:
_set(self.index_col)
def set_error_bad_lines(self, status):
self._reader.set_error_bad_lines(int(status))
def read(self, nrows=None):
try:
data = self._reader.read(nrows)
except StopIteration:
if self._first_chunk:
self._first_chunk = False
names = self._maybe_dedup_names(self.orig_names)
index, columns, col_dict = _get_empty_meta(
names,
self.index_col,
self.index_names,
dtype=self.kwds.get("dtype"),
)
columns = self._maybe_make_multi_index_columns(columns, self.col_names)
if self.usecols is not None:
columns = self._filter_usecols(columns)
col_dict = dict(
filter(lambda item: item[0] in columns, col_dict.items())
)
return index, columns, col_dict
else:
raise
# Done with first read, next time raise StopIteration
self._first_chunk = False
names = self.names
if self._reader.leading_cols:
if self._has_complex_date_col:
raise NotImplementedError("file structure not yet supported")
# implicit index, no index names
arrays = []
for i in range(self._reader.leading_cols):
if self.index_col is None:
values = data.pop(i)
else:
values = data.pop(self.index_col[i])
values = self._maybe_parse_dates(values, i, try_parse_dates=True)
arrays.append(values)
index = ensure_index_from_sequences(arrays)
if self.usecols is not None:
names = self._filter_usecols(names)
names = self._maybe_dedup_names(names)
# rename dict keys
data = sorted(data.items())
data = {k: v for k, (i, v) in zip(names, data)}
names, data = self._do_date_conversions(names, data)
else:
# rename dict keys
data = sorted(data.items())
# ugh, mutation
names = list(self.orig_names)
names = self._maybe_dedup_names(names)
if self.usecols is not None:
names = self._filter_usecols(names)
# columns as list
alldata = [x[1] for x in data]
data = {k: v for k, (i, v) in zip(names, data)}
names, data = self._do_date_conversions(names, data)
index, names = self._make_index(data, alldata, names)
# maybe create a mi on the columns
names = self._maybe_make_multi_index_columns(names, self.col_names)
return index, names, data
def _filter_usecols(self, names):
# hackish
usecols = _evaluate_usecols(self.usecols, names)
if usecols is not None and len(names) != len(usecols):
names = [
name for i, name in enumerate(names) if i in usecols or name in usecols
]
return names
def _get_index_names(self):
names = list(self._reader.header[0])
idx_names = None
if self._reader.leading_cols == 0 and self.index_col is not None:
(idx_names, names, self.index_col) = _clean_index_names(
names, self.index_col, self.unnamed_cols
)
return names, idx_names
def _maybe_parse_dates(self, values, index, try_parse_dates=True):
if try_parse_dates and self._should_parse_dates(index):
values = self._date_conv(values)
return values
def TextParser(*args, **kwds):
"""
Converts lists of lists/tuples into DataFrames with proper type inference
and optional (e.g. string to datetime) conversion. Also enables iterating
lazily over chunks of large files
Parameters
----------
data : file-like object or list
delimiter : separator character to use
dialect : str or csv.Dialect instance, optional
Ignored if delimiter is longer than 1 character
names : sequence, default
header : int, default 0
Row to use to parse column labels. Defaults to the first row. Prior
rows will be discarded
index_col : int or list, optional
Column or columns to use as the (possibly hierarchical) index
has_index_names: bool, default False
True if the cols defined in index_col have an index name and are
not in the header.
na_values : scalar, str, list-like, or dict, optional
Additional strings to recognize as NA/NaN.
keep_default_na : bool, default True
thousands : str, optional
Thousands separator
comment : str, optional
Comment out remainder of line
parse_dates : bool, default False
keep_date_col : bool, default False
date_parser : function, optional
skiprows : list of integers
Row numbers to skip
skipfooter : int
Number of line at bottom of file to skip
converters : dict, optional
Dict of functions for converting values in certain columns. Keys can
either be integers or column labels, values are functions that take one
input argument, the cell (not column) content, and return the
transformed content.
encoding : str, optional
Encoding to use for UTF when reading/writing (ex. 'utf-8')
squeeze : bool, default False
returns Series if only one column.
infer_datetime_format: bool, default False
If True and `parse_dates` is True for a column, try to infer the
datetime format based on the first datetime string. If the format
can be inferred, there often will be a large parsing speed-up.
float_precision : str, optional
Specifies which converter the C engine should use for floating-point
values. The options are None for the ordinary converter,
'high' for the high-precision converter, and 'round_trip' for the
round-trip converter.
"""
kwds["engine"] = "python"
return TextFileReader(*args, **kwds)
def count_empty_vals(vals):
return sum(1 for v in vals if v == "" or v is None)
class PythonParser(ParserBase):
def __init__(self, f, **kwds):
"""
Workhorse function for processing nested list into DataFrame
Should be replaced by np.genfromtxt eventually?
"""
ParserBase.__init__(self, kwds)
self.data = None
self.buf = []
self.pos = 0
self.line_pos = 0
self.encoding = kwds["encoding"]
self.compression = kwds["compression"]
self.memory_map = kwds["memory_map"]
self.skiprows = kwds["skiprows"]
if callable(self.skiprows):
self.skipfunc = self.skiprows
else:
self.skipfunc = lambda x: x in self.skiprows
self.skipfooter = _validate_skipfooter_arg(kwds["skipfooter"])
self.delimiter = kwds["delimiter"]
self.quotechar = kwds["quotechar"]
if isinstance(self.quotechar, str):
self.quotechar = str(self.quotechar)
self.escapechar = kwds["escapechar"]
self.doublequote = kwds["doublequote"]
self.skipinitialspace = kwds["skipinitialspace"]
self.lineterminator = kwds["lineterminator"]
self.quoting = kwds["quoting"]
self.usecols, _ = _validate_usecols_arg(kwds["usecols"])
self.skip_blank_lines = kwds["skip_blank_lines"]
self.warn_bad_lines = kwds["warn_bad_lines"]
self.error_bad_lines = kwds["error_bad_lines"]
self.names_passed = kwds["names"] or None
self.has_index_names = False
if "has_index_names" in kwds:
self.has_index_names = kwds["has_index_names"]
self.verbose = kwds["verbose"]
self.converters = kwds["converters"]
self.dtype = kwds["dtype"]
self.thousands = kwds["thousands"]
self.decimal = kwds["decimal"]
self.comment = kwds["comment"]
self._comment_lines = []
f, handles = _get_handle(
f,
"r",
encoding=self.encoding,
compression=self.compression,
memory_map=self.memory_map,
)
self.handles.extend(handles)
# Set self.data to something that can read lines.
if hasattr(f, "readline"):
self._make_reader(f)
else:
self.data = f
# Get columns in two steps: infer from data, then
# infer column indices from self.usecols if it is specified.
self._col_indices = None
(
self.columns,
self.num_original_columns,
self.unnamed_cols,
) = self._infer_columns()
# Now self.columns has the set of columns that we will process.
# The original set is stored in self.original_columns.
if len(self.columns) > 1:
# we are processing a multi index column
self.columns, self.index_names, self.col_names, _ = self._extract_multi_indexer_columns( # noqa: E501
self.columns, self.index_names, self.col_names
)
# Update list of original names to include all indices.
self.num_original_columns = len(self.columns)
else:
self.columns = self.columns[0]
# get popped off for index
self.orig_names = list(self.columns)
# needs to be cleaned/refactored
# multiple date column thing turning into a real spaghetti factory
if not self._has_complex_date_col:
(index_names, self.orig_names, self.columns) = self._get_index_name(
self.columns
)
self._name_processed = True
if self.index_names is None:
self.index_names = index_names
if self.parse_dates:
self._no_thousands_columns = self._set_no_thousands_columns()
else:
self._no_thousands_columns = None
if len(self.decimal) != 1:
raise ValueError("Only length-1 decimal markers supported")
if self.thousands is None:
self.nonnum = re.compile(
r"[^-^0-9^{decimal}]+".format(decimal=self.decimal)
)
else:
self.nonnum = re.compile(
r"[^-^0-9^{thousands}^{decimal}]+".format(
thousands=self.thousands, decimal=self.decimal
)
)
def _set_no_thousands_columns(self):
# Create a set of column ids that are not to be stripped of thousands
# operators.
noconvert_columns = set()
def _set(x):
if is_integer(x):
noconvert_columns.add(x)
else:
noconvert_columns.add(self.columns.index(x))
if isinstance(self.parse_dates, list):
for val in self.parse_dates:
if isinstance(val, list):
for k in val:
_set(k)
else:
_set(val)
elif isinstance(self.parse_dates, dict):
for val in self.parse_dates.values():
if isinstance(val, list):
for k in val:
_set(k)
else:
_set(val)
elif self.parse_dates:
if isinstance(self.index_col, list):
for k in self.index_col:
_set(k)
elif self.index_col is not None:
_set(self.index_col)
return noconvert_columns
def _make_reader(self, f):
sep = self.delimiter
if sep is None or len(sep) == 1:
if self.lineterminator:
raise ValueError(
"Custom line terminators not supported in " "python parser (yet)"
)
class MyDialect(csv.Dialect):
delimiter = self.delimiter
quotechar = self.quotechar
escapechar = self.escapechar
doublequote = self.doublequote
skipinitialspace = self.skipinitialspace
quoting = self.quoting
lineterminator = "\n"
dia = MyDialect
sniff_sep = True
if sep is not None:
sniff_sep = False
dia.delimiter = sep
# attempt to sniff the delimiter
if sniff_sep:
line = f.readline()
while self.skipfunc(self.pos):
self.pos += 1
line = f.readline()
line = self._check_comments([line])[0]
self.pos += 1
self.line_pos += 1
sniffed = csv.Sniffer().sniff(line)
dia.delimiter = sniffed.delimiter
if self.encoding is not None:
self.buf.extend(
list(
UnicodeReader(
StringIO(line), dialect=dia, encoding=self.encoding
)
)
)
else:
self.buf.extend(list(csv.reader(StringIO(line), dialect=dia)))
if self.encoding is not None:
reader = UnicodeReader(
f, dialect=dia, encoding=self.encoding, strict=True
)
else:
reader = csv.reader(f, dialect=dia, strict=True)
else:
def _read():
line = f.readline()
pat = re.compile(sep)
yield pat.split(line.strip())
for line in f:
yield pat.split(line.strip())
reader = _read()
self.data = reader
def read(self, rows=None):
try:
content = self._get_lines(rows)
except StopIteration:
if self._first_chunk:
content = []
else:
raise
# done with first read, next time raise StopIteration
self._first_chunk = False
columns = list(self.orig_names)
if not len(content): # pragma: no cover
# DataFrame with the right metadata, even though it's length 0
names = self._maybe_dedup_names(self.orig_names)
index, columns, col_dict = _get_empty_meta(
names, self.index_col, self.index_names, self.dtype
)
columns = self._maybe_make_multi_index_columns(columns, self.col_names)
return index, columns, col_dict
# handle new style for names in index
count_empty_content_vals = count_empty_vals(content[0])
indexnamerow = None
if self.has_index_names and count_empty_content_vals == len(columns):
indexnamerow = content[0]
content = content[1:]
alldata = self._rows_to_cols(content)
data = self._exclude_implicit_index(alldata)
columns = self._maybe_dedup_names(self.columns)
columns, data = self._do_date_conversions(columns, data)
data = self._convert_data(data)
index, columns = self._make_index(data, alldata, columns, indexnamerow)
return index, columns, data
def _exclude_implicit_index(self, alldata):
names = self._maybe_dedup_names(self.orig_names)
if self._implicit_index:
excl_indices = self.index_col
data = {}
offset = 0
for i, col in enumerate(names):
while i + offset in excl_indices:
offset += 1
data[col] = alldata[i + offset]
else:
data = {k: v for k, v in zip(names, alldata)}
return data
# legacy
def get_chunk(self, size=None):
if size is None:
size = self.chunksize
return self.read(rows=size)
def _convert_data(self, data):
# apply converters
def _clean_mapping(mapping):
"converts col numbers to names"
clean = {}
for col, v in mapping.items():
if isinstance(col, int) and col not in self.orig_names:
col = self.orig_names[col]
clean[col] = v
return clean
clean_conv = _clean_mapping(self.converters)
if not isinstance(self.dtype, dict):
# handles single dtype applied to all columns
clean_dtypes = self.dtype
else:
clean_dtypes = _clean_mapping(self.dtype)
# Apply NA values.
clean_na_values = {}
clean_na_fvalues = {}
if isinstance(self.na_values, dict):
for col in self.na_values:
na_value = self.na_values[col]
na_fvalue = self.na_fvalues[col]
if isinstance(col, int) and col not in self.orig_names:
col = self.orig_names[col]
clean_na_values[col] = na_value
clean_na_fvalues[col] = na_fvalue
else:
clean_na_values = self.na_values
clean_na_fvalues = self.na_fvalues
return self._convert_to_ndarrays(
data,
clean_na_values,
clean_na_fvalues,
self.verbose,
clean_conv,
clean_dtypes,
)
def _infer_columns(self):
names = self.names
num_original_columns = 0
clear_buffer = True
unnamed_cols = set()
if self.header is not None:
header = self.header
if isinstance(header, (list, tuple, np.ndarray)):
have_mi_columns = len(header) > 1
# we have a mi columns, so read an extra line
if have_mi_columns:
header = list(header) + [header[-1] + 1]
else:
have_mi_columns = False
header = [header]
columns = []
for level, hr in enumerate(header):
try:
line = self._buffered_line()
while self.line_pos <= hr:
line = self._next_line()
except StopIteration:
if self.line_pos < hr:
raise ValueError(
"Passed header={hr} but only {pos} lines in "
"file".format(hr=hr, pos=(self.line_pos + 1))
)
# We have an empty file, so check
# if columns are provided. That will
# serve as the 'line' for parsing
if have_mi_columns and hr > 0:
if clear_buffer:
self._clear_buffer()
columns.append([None] * len(columns[-1]))
return columns, num_original_columns, unnamed_cols
if not self.names:
raise EmptyDataError("No columns to parse from file")
line = self.names[:]
this_columns = []
this_unnamed_cols = []
for i, c in enumerate(line):
if c == "":
if have_mi_columns:
col_name = "Unnamed: {i}_level_{level}".format(
i=i, level=level
)
else:
col_name = "Unnamed: {i}".format(i=i)
this_unnamed_cols.append(i)
this_columns.append(col_name)
else:
this_columns.append(c)
if not have_mi_columns and self.mangle_dupe_cols:
counts = defaultdict(int)
for i, col in enumerate(this_columns):
cur_count = counts[col]
while cur_count > 0:
counts[col] = cur_count + 1
col = "{column}.{count}".format(column=col, count=cur_count)
cur_count = counts[col]
this_columns[i] = col
counts[col] = cur_count + 1
elif have_mi_columns:
# if we have grabbed an extra line, but its not in our
# format so save in the buffer, and create an blank extra
# line for the rest of the parsing code
if hr == header[-1]:
lc = len(this_columns)
ic = len(self.index_col) if self.index_col is not None else 0
unnamed_count = len(this_unnamed_cols)
if lc != unnamed_count and lc - ic > unnamed_count:
clear_buffer = False
this_columns = [None] * lc
self.buf = [self.buf[-1]]
columns.append(this_columns)
unnamed_cols.update({this_columns[i] for i in this_unnamed_cols})
if len(columns) == 1:
num_original_columns = len(this_columns)
if clear_buffer:
self._clear_buffer()
if names is not None:
if (self.usecols is not None and len(names) != len(self.usecols)) or (
self.usecols is None and len(names) != len(columns[0])
):
raise ValueError(
"Number of passed names did not match "
"number of header fields in the file"
)
if len(columns) > 1:
raise TypeError("Cannot pass names with multi-index " "columns")
if self.usecols is not None:
# Set _use_cols. We don't store columns because they are
# overwritten.
self._handle_usecols(columns, names)
else:
self._col_indices = None
num_original_columns = len(names)
columns = [names]
else:
columns = self._handle_usecols(columns, columns[0])
else:
try:
line = self._buffered_line()
except StopIteration:
if not names:
raise EmptyDataError("No columns to parse from file")
line = names[:]
ncols = len(line)
num_original_columns = ncols
if not names:
if self.prefix:
columns = [
[
"{prefix}{idx}".format(prefix=self.prefix, idx=i)
for i in range(ncols)
]
]
else:
columns = [list(range(ncols))]
columns = self._handle_usecols(columns, columns[0])
else:
if self.usecols is None or len(names) >= num_original_columns:
columns = self._handle_usecols([names], names)
num_original_columns = len(names)
else:
if not callable(self.usecols) and len(names) != len(self.usecols):
raise ValueError(
"Number of passed names did not match number of "
"header fields in the file"
)
# Ignore output but set used columns.
self._handle_usecols([names], names)
columns = [names]
num_original_columns = ncols
return columns, num_original_columns, unnamed_cols
def _handle_usecols(self, columns, usecols_key):
"""
Sets self._col_indices
usecols_key is used if there are string usecols.
"""
if self.usecols is not None:
if callable(self.usecols):
col_indices = _evaluate_usecols(self.usecols, usecols_key)
elif any(isinstance(u, str) for u in self.usecols):
if len(columns) > 1:
raise ValueError(
"If using multiple headers, usecols must " "be integers."
)
col_indices = []
for col in self.usecols:
if isinstance(col, str):
try:
col_indices.append(usecols_key.index(col))
except ValueError:
_validate_usecols_names(self.usecols, usecols_key)
else:
col_indices.append(col)
else:
col_indices = self.usecols
columns = [
[n for i, n in enumerate(column) if i in col_indices]
for column in columns
]
self._col_indices = col_indices
return columns
def _buffered_line(self):
"""
Return a line from buffer, filling buffer if required.
"""
if len(self.buf) > 0:
return self.buf[0]
else:
return self._next_line()
def _check_for_bom(self, first_row):
"""
Checks whether the file begins with the BOM character.
If it does, remove it. In addition, if there is quoting
in the field subsequent to the BOM, remove it as well
because it technically takes place at the beginning of
the name, not the middle of it.
"""
# first_row will be a list, so we need to check
# that that list is not empty before proceeding.
if not first_row:
return first_row
# The first element of this row is the one that could have the
# BOM that we want to remove. Check that the first element is a
# string before proceeding.
if not isinstance(first_row[0], str):
return first_row
# Check that the string is not empty, as that would
# obviously not have a BOM at the start of it.
if not first_row[0]:
return first_row
# Since the string is non-empty, check that it does
# in fact begin with a BOM.
first_elt = first_row[0][0]
if first_elt != _BOM:
return first_row
first_row_bom = first_row[0]
if len(first_row_bom) > 1 and first_row_bom[1] == self.quotechar:
start = 2
quote = first_row_bom[1]
end = first_row_bom[2:].index(quote) + 2
# Extract the data between the quotation marks
new_row = first_row_bom[start:end]
# Extract any remaining data after the second
# quotation mark.
if len(first_row_bom) > end + 1:
new_row += first_row_bom[end + 1 :]
return [new_row] + first_row[1:]
elif len(first_row_bom) > 1:
return [first_row_bom[1:]]
else:
# First row is just the BOM, so we
# return an empty string.
return [""]
def _is_line_empty(self, line):
"""
Check if a line is empty or not.
Parameters
----------
line : str, array-like
The line of data to check.
Returns
-------
boolean : Whether or not the line is empty.
"""
return not line or all(not x for x in line)
def _next_line(self):
if isinstance(self.data, list):
while self.skipfunc(self.pos):
self.pos += 1
while True:
try:
line = self._check_comments([self.data[self.pos]])[0]
self.pos += 1
# either uncommented or blank to begin with
if not self.skip_blank_lines and (
self._is_line_empty(self.data[self.pos - 1]) or line
):
break
elif self.skip_blank_lines:
ret = self._remove_empty_lines([line])
if ret:
line = ret[0]
break
except IndexError:
raise StopIteration
else:
while self.skipfunc(self.pos):
self.pos += 1
next(self.data)
while True:
orig_line = self._next_iter_line(row_num=self.pos + 1)
self.pos += 1
if orig_line is not None:
line = self._check_comments([orig_line])[0]
if self.skip_blank_lines:
ret = self._remove_empty_lines([line])
if ret:
line = ret[0]
break
elif self._is_line_empty(orig_line) or line:
break
# This was the first line of the file,
# which could contain the BOM at the
# beginning of it.
if self.pos == 1:
line = self._check_for_bom(line)
self.line_pos += 1
self.buf.append(line)
return line
def _alert_malformed(self, msg, row_num):
"""
Alert a user about a malformed row.
If `self.error_bad_lines` is True, the alert will be `ParserError`.
If `self.warn_bad_lines` is True, the alert will be printed out.
Parameters
----------
msg : The error message to display.
row_num : The row number where the parsing error occurred.
Because this row number is displayed, we 1-index,
even though we 0-index internally.
"""
if self.error_bad_lines:
raise ParserError(msg)
elif self.warn_bad_lines:
base = "Skipping line {row_num}: ".format(row_num=row_num)
sys.stderr.write(base + msg + "\n")
def _next_iter_line(self, row_num):
"""
Wrapper around iterating through `self.data` (CSV source).
When a CSV error is raised, we check for specific
error messages that allow us to customize the
error message displayed to the user.
Parameters
----------
row_num : The row number of the line being parsed.
"""
try:
return next(self.data)
except csv.Error as e:
if self.warn_bad_lines or self.error_bad_lines:
msg = str(e)
if "NULL byte" in msg:
msg = (
"NULL byte detected. This byte "
"cannot be processed in Python's "
"native csv library at the moment, "
"so please pass in engine='c' instead"
)
if self.skipfooter > 0:
reason = (
"Error could possibly be due to "
"parsing errors in the skipped footer rows "
"(the skipfooter keyword is only applied "
"after Python's csv library has parsed "
"all rows)."
)
msg += ". " + reason
self._alert_malformed(msg, row_num)
return None
def _check_comments(self, lines):
if self.comment is None:
return lines
ret = []
for l in lines:
rl = []
for x in l:
if not isinstance(x, str) or self.comment not in x:
rl.append(x)
else:
x = x[: x.find(self.comment)]
if len(x) > 0:
rl.append(x)
break
ret.append(rl)
return ret
def _remove_empty_lines(self, lines):
"""
Iterate through the lines and remove any that are
either empty or contain only one whitespace value
Parameters
----------
lines : array-like
The array of lines that we are to filter.
Returns
-------
filtered_lines : array-like
The same array of lines with the "empty" ones removed.
"""
ret = []
for l in lines:
# Remove empty lines and lines with only one whitespace value
if (
len(l) > 1
or len(l) == 1
and (not isinstance(l[0], str) or l[0].strip())
):
ret.append(l)
return ret
def _check_thousands(self, lines):
if self.thousands is None:
return lines
return self._search_replace_num_columns(
lines=lines, search=self.thousands, replace=""
)
def _search_replace_num_columns(self, lines, search, replace):
ret = []
for l in lines:
rl = []
for i, x in enumerate(l):
if (
not isinstance(x, str)
or search not in x
or (self._no_thousands_columns and i in self._no_thousands_columns)
or self.nonnum.search(x.strip())
):
rl.append(x)
else:
rl.append(x.replace(search, replace))
ret.append(rl)
return ret
def _check_decimal(self, lines):
if self.decimal == _parser_defaults["decimal"]:
return lines
return self._search_replace_num_columns(
lines=lines, search=self.decimal, replace="."
)
def _clear_buffer(self):
self.buf = []
_implicit_index = False
def _get_index_name(self, columns):
"""
Try several cases to get lines:
0) There are headers on row 0 and row 1 and their
total summed lengths equals the length of the next line.
Treat row 0 as columns and row 1 as indices
1) Look for implicit index: there are more columns
on row 1 than row 0. If this is true, assume that row
1 lists index columns and row 0 lists normal columns.
2) Get index from the columns if it was listed.
"""
orig_names = list(columns)
columns = list(columns)
try:
line = self._next_line()
except StopIteration:
line = None
try:
next_line = self._next_line()
except StopIteration:
next_line = None
# implicitly index_col=0 b/c 1 fewer column names
implicit_first_cols = 0
if line is not None:
# leave it 0, #2442
# Case 1
if self.index_col is not False:
implicit_first_cols = len(line) - self.num_original_columns
# Case 0
if next_line is not None:
if len(next_line) == len(line) + self.num_original_columns:
# column and index names on diff rows
self.index_col = list(range(len(line)))
self.buf = self.buf[1:]
for c in reversed(line):
columns.insert(0, c)
# Update list of original names to include all indices.
orig_names = list(columns)
self.num_original_columns = len(columns)
return line, orig_names, columns
if implicit_first_cols > 0:
# Case 1
self._implicit_index = True
if self.index_col is None:
self.index_col = list(range(implicit_first_cols))
index_name = None
else:
# Case 2
(index_name, columns_, self.index_col) = _clean_index_names(
columns, self.index_col, self.unnamed_cols
)
return index_name, orig_names, columns
def _rows_to_cols(self, content):
col_len = self.num_original_columns
if self._implicit_index:
col_len += len(self.index_col)
max_len = max(len(row) for row in content)
# Check that there are no rows with too many
# elements in their row (rows with too few
# elements are padded with NaN).
if max_len > col_len and self.index_col is not False and self.usecols is None:
footers = self.skipfooter if self.skipfooter else 0
bad_lines = []
iter_content = enumerate(content)
content_len = len(content)
content = []
for (i, l) in iter_content:
actual_len = len(l)
if actual_len > col_len:
if self.error_bad_lines or self.warn_bad_lines:
row_num = self.pos - (content_len - i + footers)
bad_lines.append((row_num, actual_len))
if self.error_bad_lines:
break
else:
content.append(l)
for row_num, actual_len in bad_lines:
msg = (
"Expected {col_len} fields in line {line}, saw "
"{length}".format(
col_len=col_len, line=(row_num + 1), length=actual_len
)
)
if (
self.delimiter
and len(self.delimiter) > 1
and self.quoting != csv.QUOTE_NONE
):
# see gh-13374
reason = (
"Error could possibly be due to quotes being "
"ignored when a multi-char delimiter is used."
)
msg += ". " + reason
self._alert_malformed(msg, row_num + 1)
# see gh-13320
zipped_content = list(lib.to_object_array(content, min_width=col_len).T)
if self.usecols:
if self._implicit_index:
zipped_content = [
a
for i, a in enumerate(zipped_content)
if (
i < len(self.index_col)
or i - len(self.index_col) in self._col_indices
)
]
else:
zipped_content = [
a for i, a in enumerate(zipped_content) if i in self._col_indices
]
return zipped_content
def _get_lines(self, rows=None):
lines = self.buf
new_rows = None
# already fetched some number
if rows is not None:
# we already have the lines in the buffer
if len(self.buf) >= rows:
new_rows, self.buf = self.buf[:rows], self.buf[rows:]
# need some lines
else:
rows -= len(self.buf)
if new_rows is None:
if isinstance(self.data, list):
if self.pos > len(self.data):
raise StopIteration
if rows is None:
new_rows = self.data[self.pos :]
new_pos = len(self.data)
else:
new_rows = self.data[self.pos : self.pos + rows]
new_pos = self.pos + rows
# Check for stop rows. n.b.: self.skiprows is a set.
if self.skiprows:
new_rows = [
row
for i, row in enumerate(new_rows)
if not self.skipfunc(i + self.pos)
]
lines.extend(new_rows)
self.pos = new_pos
else:
new_rows = []
try:
if rows is not None:
for _ in range(rows):
new_rows.append(next(self.data))
lines.extend(new_rows)
else:
rows = 0
while True:
new_row = self._next_iter_line(row_num=self.pos + rows + 1)
rows += 1
if new_row is not None:
new_rows.append(new_row)
except StopIteration:
if self.skiprows:
new_rows = [
row
for i, row in enumerate(new_rows)
if not self.skipfunc(i + self.pos)
]
lines.extend(new_rows)
if len(lines) == 0:
raise
self.pos += len(new_rows)
self.buf = []
else:
lines = new_rows
if self.skipfooter:
lines = lines[: -self.skipfooter]
lines = self._check_comments(lines)
if self.skip_blank_lines:
lines = self._remove_empty_lines(lines)
lines = self._check_thousands(lines)
return self._check_decimal(lines)
def _make_date_converter(
date_parser=None, dayfirst=False, infer_datetime_format=False, cache_dates=True
):
def converter(*date_cols):
if date_parser is None:
strs = parsing._concat_date_cols(date_cols)
try:
return tools.to_datetime(
ensure_object(strs),
utc=None,
dayfirst=dayfirst,
errors="ignore",
infer_datetime_format=infer_datetime_format,
cache=cache_dates,
).to_numpy()
except ValueError:
return tools.to_datetime(
parsing.try_parse_dates(strs, dayfirst=dayfirst), cache=cache_dates
)
else:
try:
result = tools.to_datetime(
date_parser(*date_cols), errors="ignore", cache=cache_dates
)
if isinstance(result, datetime.datetime):
raise Exception("scalar parser")
return result
except Exception:
try:
return tools.to_datetime(
parsing.try_parse_dates(
parsing._concat_date_cols(date_cols),
parser=date_parser,
dayfirst=dayfirst,
),
errors="ignore",
)
except Exception:
return generic_parser(date_parser, *date_cols)
return converter
def _process_date_conversion(
data_dict,
converter,
parse_spec,
index_col,
index_names,
columns,
keep_date_col=False,
):
def _isindex(colspec):
return (isinstance(index_col, list) and colspec in index_col) or (
isinstance(index_names, list) and colspec in index_names
)
new_cols = []
new_data = {}
orig_names = columns
columns = list(columns)
date_cols = set()
if parse_spec is None or isinstance(parse_spec, bool):
return data_dict, columns
if isinstance(parse_spec, list):
# list of column lists
for colspec in parse_spec:
if is_scalar(colspec):
if isinstance(colspec, int) and colspec not in data_dict:
colspec = orig_names[colspec]
if _isindex(colspec):
continue
data_dict[colspec] = converter(data_dict[colspec])
else:
new_name, col, old_names = _try_convert_dates(
converter, colspec, data_dict, orig_names
)
if new_name in data_dict:
raise ValueError(
"New date column already in dict {name}".format(name=new_name)
)
new_data[new_name] = col
new_cols.append(new_name)
date_cols.update(old_names)
elif isinstance(parse_spec, dict):
# dict of new name to column list
for new_name, colspec in parse_spec.items():
if new_name in data_dict:
raise ValueError(
"Date column {name} already in dict".format(name=new_name)
)
_, col, old_names = _try_convert_dates(
converter, colspec, data_dict, orig_names
)
new_data[new_name] = col
new_cols.append(new_name)
date_cols.update(old_names)
data_dict.update(new_data)
new_cols.extend(columns)
if not keep_date_col:
for c in list(date_cols):
data_dict.pop(c)
new_cols.remove(c)
return data_dict, new_cols
def _try_convert_dates(parser, colspec, data_dict, columns):
colset = set(columns)
colnames = []
for c in colspec:
if c in colset:
colnames.append(c)
elif isinstance(c, int) and c not in columns:
colnames.append(columns[c])
else:
colnames.append(c)
new_name = "_".join(str(x) for x in colnames)
to_parse = [data_dict[c] for c in colnames if c in data_dict]
new_col = parser(*to_parse)
return new_name, new_col, colnames
def _clean_na_values(na_values, keep_default_na=True):
if na_values is None:
if keep_default_na:
na_values = _NA_VALUES
else:
na_values = set()
na_fvalues = set()
elif isinstance(na_values, dict):
old_na_values = na_values.copy()
na_values = {} # Prevent aliasing.
# Convert the values in the na_values dictionary
# into array-likes for further use. This is also
# where we append the default NaN values, provided
# that `keep_default_na=True`.
for k, v in old_na_values.items():
if not is_list_like(v):
v = [v]
if keep_default_na:
v = set(v) | _NA_VALUES
na_values[k] = v
na_fvalues = {k: _floatify_na_values(v) for k, v in na_values.items()}
else:
if not is_list_like(na_values):
na_values = [na_values]
na_values = _stringify_na_values(na_values)
if keep_default_na:
na_values = na_values | _NA_VALUES
na_fvalues = _floatify_na_values(na_values)
return na_values, na_fvalues
def _clean_index_names(columns, index_col, unnamed_cols):
if not _is_index_col(index_col):
return None, columns, index_col
columns = list(columns)
cp_cols = list(columns)
index_names = []
# don't mutate
index_col = list(index_col)
for i, c in enumerate(index_col):
if isinstance(c, str):
index_names.append(c)
for j, name in enumerate(cp_cols):
if name == c:
index_col[i] = j
columns.remove(name)
break
else:
name = cp_cols[c]
columns.remove(name)
index_names.append(name)
# Only clean index names that were placeholders.
for i, name in enumerate(index_names):
if isinstance(name, str) and name in unnamed_cols:
index_names[i] = None
return index_names, columns, index_col
def _get_empty_meta(columns, index_col, index_names, dtype=None):
columns = list(columns)
# Convert `dtype` to a defaultdict of some kind.
# This will enable us to write `dtype[col_name]`
# without worrying about KeyError issues later on.
if not isinstance(dtype, dict):
# if dtype == None, default will be np.object.
default_dtype = dtype or np.object
dtype = defaultdict(lambda: default_dtype)
else:
# Save a copy of the dictionary.
_dtype = dtype.copy()
dtype = defaultdict(lambda: np.object)
# Convert column indexes to column names.
for k, v in _dtype.items():
col = columns[k] if is_integer(k) else k
dtype[col] = v
# Even though we have no data, the "index" of the empty DataFrame
# could for example still be an empty MultiIndex. Thus, we need to
# check whether we have any index columns specified, via either:
#
# 1) index_col (column indices)
# 2) index_names (column names)
#
# Both must be non-null to ensure a successful construction. Otherwise,
# we have to create a generic emtpy Index.
if (index_col is None or index_col is False) or index_names is None:
index = Index([])
else:
data = [Series([], dtype=dtype[name]) for name in index_names]
index = ensure_index_from_sequences(data, names=index_names)
index_col.sort()
for i, n in enumerate(index_col):
columns.pop(n - i)
col_dict = {col_name: Series([], dtype=dtype[col_name]) for col_name in columns}
return index, columns, col_dict
def _floatify_na_values(na_values):
# create float versions of the na_values
result = set()
for v in na_values:
try:
v = float(v)
if not np.isnan(v):
result.add(v)
except (TypeError, ValueError, OverflowError):
pass
return result
def _stringify_na_values(na_values):
""" return a stringified and numeric for these values """
result = []
for x in na_values:
result.append(str(x))
result.append(x)
try:
v = float(x)
# we are like 999 here
if v == int(v):
v = int(v)
result.append("{value}.0".format(value=v))
result.append(str(v))
result.append(v)
except (TypeError, ValueError, OverflowError):
pass
try:
result.append(int(x))
except (TypeError, ValueError, OverflowError):
pass
return set(result)
def _get_na_values(col, na_values, na_fvalues, keep_default_na):
"""
Get the NaN values for a given column.
Parameters
----------
col : str
The name of the column.
na_values : array-like, dict
The object listing the NaN values as strings.
na_fvalues : array-like, dict
The object listing the NaN values as floats.
keep_default_na : bool
If `na_values` is a dict, and the column is not mapped in the
dictionary, whether to return the default NaN values or the empty set.
Returns
-------
nan_tuple : A length-two tuple composed of
1) na_values : the string NaN values for that column.
2) na_fvalues : the float NaN values for that column.
"""
if isinstance(na_values, dict):
if col in na_values:
return na_values[col], na_fvalues[col]
else:
if keep_default_na:
return _NA_VALUES, set()
return set(), set()
else:
return na_values, na_fvalues
def _get_col_names(colspec, columns):
colset = set(columns)
colnames = []
for c in colspec:
if c in colset:
colnames.append(c)
elif isinstance(c, int):
colnames.append(columns[c])
return colnames
class FixedWidthReader(BaseIterator):
"""
A reader of fixed-width lines.
"""
def __init__(self, f, colspecs, delimiter, comment, skiprows=None, infer_nrows=100):
self.f = f
self.buffer = None
self.delimiter = "\r\n" + delimiter if delimiter else "\n\r\t "
self.comment = comment
if colspecs == "infer":
self.colspecs = self.detect_colspecs(
infer_nrows=infer_nrows, skiprows=skiprows
)
else:
self.colspecs = colspecs
if not isinstance(self.colspecs, (tuple, list)):
raise TypeError(
"column specifications must be a list or tuple, "
"input was a %r" % type(colspecs).__name__
)
for colspec in self.colspecs:
if not (
isinstance(colspec, (tuple, list))
and len(colspec) == 2
and isinstance(colspec[0], (int, np.integer, type(None)))
and isinstance(colspec[1], (int, np.integer, type(None)))
):
raise TypeError(
"Each column specification must be "
"2 element tuple or list of integers"
)
def get_rows(self, infer_nrows, skiprows=None):
"""
Read rows from self.f, skipping as specified.
We distinguish buffer_rows (the first <= infer_nrows
lines) from the rows returned to detect_colspecs
because it's simpler to leave the other locations
with skiprows logic alone than to modify them to
deal with the fact we skipped some rows here as
well.
Parameters
----------
infer_nrows : int
Number of rows to read from self.f, not counting
rows that are skipped.
skiprows: set, optional
Indices of rows to skip.
Returns
-------
detect_rows : list of str
A list containing the rows to read.
"""
if skiprows is None:
skiprows = set()
buffer_rows = []
detect_rows = []
for i, row in enumerate(self.f):
if i not in skiprows:
detect_rows.append(row)
buffer_rows.append(row)
if len(detect_rows) >= infer_nrows:
break
self.buffer = iter(buffer_rows)
return detect_rows
def detect_colspecs(self, infer_nrows=100, skiprows=None):
# Regex escape the delimiters
delimiters = "".join(r"\{}".format(x) for x in self.delimiter)
pattern = re.compile("([^{}]+)".format(delimiters))
rows = self.get_rows(infer_nrows, skiprows)
if not rows:
raise EmptyDataError("No rows from which to infer column width")
max_len = max(map(len, rows))
mask = np.zeros(max_len + 1, dtype=int)
if self.comment is not None:
rows = [row.partition(self.comment)[0] for row in rows]
for row in rows:
for m in pattern.finditer(row):
mask[m.start() : m.end()] = 1
shifted = np.roll(mask, 1)
shifted[0] = 0
edges = np.where((mask ^ shifted) == 1)[0]
edge_pairs = list(zip(edges[::2], edges[1::2]))
return edge_pairs
def __next__(self):
if self.buffer is not None:
try:
line = next(self.buffer)
except StopIteration:
self.buffer = None
line = next(self.f)
else:
line = next(self.f)
# Note: 'colspecs' is a sequence of half-open intervals.
return [line[fromm:to].strip(self.delimiter) for (fromm, to) in self.colspecs]
class FixedWidthFieldParser(PythonParser):
"""
Specialization that Converts fixed-width fields into DataFrames.
See PythonParser for details.
"""
def __init__(self, f, **kwds):
# Support iterators, convert to a list.
self.colspecs = kwds.pop("colspecs")
self.infer_nrows = kwds.pop("infer_nrows")
PythonParser.__init__(self, f, **kwds)
def _make_reader(self, f):
self.data = FixedWidthReader(
f,
self.colspecs,
self.delimiter,
self.comment,
self.skiprows,
self.infer_nrows,
)
| {
"content_hash": "2711c9922d96704c087b2ffa647f8307",
"timestamp": "",
"source": "github",
"line_count": 3702,
"max_line_length": 127,
"avg_line_length": 34.62155591572123,
"alnum_prop": 0.546637642487653,
"repo_name": "toobaz/pandas",
"id": "3e5b200c4643b133d13b348cc6111549905970bd",
"size": "128169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/io/parsers.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "541"
},
{
"name": "C",
"bytes": "394843"
},
{
"name": "C++",
"bytes": "17248"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "562"
},
{
"name": "Python",
"bytes": "15031623"
},
{
"name": "Shell",
"bytes": "27585"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
<title>rocLv's home for ruby & rails - All about ruby and rails</title>
<meta name="description" content="ruby or rails news, or some site developing stuff" />
<meta name="HandheldFriendly" content="True" />
<meta name="MobileOptimized" content="320" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" type="text/css" href="/assets/css/screen.css" />
<link rel="stylesheet" type="text/css" href="//fonts.googleapis.com/css?family=Merriweather:300,700,700italic,300italic|Open+Sans:700,400" />
<!-- Customisation -->
<link rel="stylesheet" type="text/css" href="/assets/css/main.css" />
</head>
<body class="post-template">
<header class="main-header post-head no-cover">
<nav class="main-nav clearfix">
<a class="back-button icon-arrow-left" href="/">Home</a>
<a class="subscribe-button icon-feed" href="/feed.xml">Subscribe</a>
</nav>
</header>
<main class="content" role="main">
<article class="post">
<header class="post-header">
<h1 class="post-title">Ruby中定义类方法的八种方法</h1>
<section class="post-meta">
<time class="post-date" datetime="2015-09-02">02 Sep 2015</time>
</section>
</header>
<!-- <header class="post-header">
<a id="blog-logo" href="roclv.github.io/rss">
<span class="blog-title">rocLv's home for ruby & rails</span>
</a>
</header> -->
<!-- <span class="post-meta">
<time datetime="2015-09-02">02 Sep 2015</time>
</span> -->
<!-- <h1 class="post-title">Ruby中定义类方法的八种方法</h1> -->
<section class="post-content">
<!--more-->
<h3 id="ruby">Ruby中定义类的六种方法</h3>
<p>众所周知,Ruby中的所有均为对象。Class是继承Object,而Object是继承BasicObject。</p>
<p>在Ruby中可以为特定的对象定义专属于这个对象的方法,名为单例方法。如:</p>
<p>“by str = ‘singleton method’</p>
<p>class « str def hello puts “I’m a singleton method” end end</p>
<p>str.hello</p>
<blockquote>
<blockquote>
<p>I’m a singleton method</p>
<p>“于类Class继承于Object ,所以Class本身也是对象。这样,我们可以把为类添加类方法,当作为对象Class添加单例方法。</p>
</blockquote>
</blockquote>
<p>“by class C class « C def singleton_method puts “I’m one of Class C’s methods” end end end</p>
<p>C.singleton_method</p>
<blockquote>
<blockquote>
<p>I’m one of Class C’s methods</p>
<p>“于在类定义中,self为类本身,因此,如果在类中定义类方法,可以用self替代类名,因此衍生出第二种定义类的方法:</p>
<p>“by class C class « self</p>
</blockquote>
</blockquote>
<pre><code> def singleton_method
puts 'I am one of Class methods'
end</code></pre>
<p>end end</p>
<p>“</p>
<p>Martz在设计Ruby的时候,把所有程序员设想为合格的程序员,因此,他要程序员为自己所写的代码负责,而不是通过提供受限的编程语言,保证程序员所写的代码质量。所以,由于上面的那种写法不太符合程序员的习惯,因此提供了以下简便一点的方法:</p>
<p>“by class C def C.class_method puts ‘another way to define class methods’ end end</p>
<p>“样,使用<code>self</code>来替换类名得到第四种声明类方法的方法:</p>
<p>“by class C def self.class_method puts ‘use self to replace class name’ end end</p>
<p>“此为止,其实类声明的方法就可以结束了。但是本则孔乙己(90s、00s等没学过这篇文章的需要脑补一下了)的精神胜利法,我再提供另外两种。 因为以上声明类方法的方式中,有两种是可以放在类外部声明的,所以又有了另外两种声明类的方法(lol):</p>
<p>“by class C end</p>
<p>class « C def singleton_method puts ‘the fifth way to define class methods’ end end</p>
<p>def C.singletom_method puts ‘the sixth way to define class methods’ end</p>
<p>“合以上,终于拼凑够了“回”字的六种写法。</p>
<p>除了定义类方法,我们可以用以下方法,把任何类变成单例类:</p>
<p>“by</p>
<h1 id="objectobjectobjectobject">Object本身也是类,(类又继承Object,能不这么虐人吗?)所以打开类Object,为Object添加单例方法:</h1>
<p>class Object class « self self end end</p>
<p>“</p>
<p>大神why the lucky stiff依据这个,为元编程创建了Metaid库:</p>
<p>“by class Object #将类变为单例类 def metaclass; class « self; self; end; end #看上去是实例方法,其实是类方法 def meta_eval &blk; metaclass.instance_eval &blk; end</p>
<p># 定义类方法 def meta_def name, &blk mate_eval { define_method name, &blk } end</p>
<p>#定义类的实例方法 def class_def name, &blk class_eval { define_method name, &blk } end</p>
<p>end</p>
<p>“用上述库,定义类方法:</p>
<p>“by class C end</p>
<p>C.meta_def class_method_name { #方法 }</p>
<p>“</p>
<p>最后,还有一种:</p>
<p>“by class C end</p>
<p>C.define_singleton_method(:single_method_name) do puts ‘the eighth method’ end</p>
<p>“</p>
<p>如此说来,共有7种定义类的方法了。</p>
</section>
<footer class="post-footer">
<!-- If we want to display author's name and bio -->
<figure class="author-image">
<a class="img" href="/" style="background-image: url(/assets/images/profile.png)">
<span class="hidden">rocLv's Picture</span></a>
</figure>
<section class="author">
<!-- Author Name -->
<h4> rocLv </h4>
<!-- Author Bio -->
<p>
I'm a common rubyist, feel free to connect with me.
</p>
</section>
<!-- Share links section -->
<form action="https://shenghuo.alipay.com/send/payment/fill.htm" method="POST" target="_blank" accept-charset="GBK">
<input name="optEmail" type="hidden" value="roc_war@yahoo.com.cn" />
<input name="payAmount" type="hidden" value="5" />
<input id="title" name="title" type="hidden" value="付款说明" />
<input name="memo" type="hidden" value="备注" />
<input name="pay" type="image" value="打赏" src= '/assets/images/donate.png' />
</form>
<!-- JiaThis Button BEGIN -->
<div class="jiathis_style">
<a class="jiathis_button_qzone"></a>
<a class="jiathis_button_tsina"></a>
<a class="jiathis_button_tqq"></a>
<a class="jiathis_button_weixin"></a>
<a class="jiathis_button_renren"></a>
<a href="http://www.jiathis.com/share" class="jiathis jiathis_txt jtico jtico_jiathis" target="_blank"></a>
<a class="jiathis_counter_style"></a>
</div>
<script type="text/javascript" src="http://v3.jiathis.com/code_mini/jia.js" charset="utf-8"></script>
<!-- JiaThis Button END -->
<!-- Disqus comments -->
<section class="disqus">
<div id="disqus_thread"></div>
<script type="text/javascript">
var disqus_shortname = 'roclv';
var disqus_developer = 0; // developer mode is on
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = 'http://' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
</script>
<noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
<a href="http://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a>
</section>
</footer>
</article>
</main>
<footer class="site-footer clearfix">
<section class="copyright">
<a href="/">rocLv's home for ruby & rails</a> ©
• All rights reserved.
</section>
<section class="poweredby">Made with Jekyll using
<a href="http://github.com/rosario/kasper">Kasper theme</a>
</section>
</footer>
<script type="text/javascript" src="/assets/js/jquery-1.11.1.min.js"></script>
<script type="text/javascript" src="/assets/js/jquery.fitvids.js"></script>
<script type="text/javascript" src="/assets/js/index.js"></script>
<!-- Google Analytics Tracking code -->
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-71237400-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</body>
</html>
| {
"content_hash": "036db6ed8ce612bdfbb74724073815f2",
"timestamp": "",
"source": "github",
"line_count": 259,
"max_line_length": 147,
"avg_line_length": 32.38223938223938,
"alnum_prop": 0.6016454036008108,
"repo_name": "rocLv/roclv.github.io",
"id": "e5ae55b4f8ed268364356de4872c6008ec8f091f",
"size": "9671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/2015/09/02/定义单态方法的八种方式.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "77836"
},
{
"name": "HTML",
"bytes": "291251"
},
{
"name": "JavaScript",
"bytes": "11546"
},
{
"name": "Ruby",
"bytes": "15020"
}
],
"symlink_target": ""
} |
class NewDataCz < ActiveRecord::Migration[5.0]
def self.up
directory = Rails.root.join('db/migrate/20220301163200_new_data_cz')
data_path = directory.join('data.csv')
commits_path = directory.join('commits.yml')
datasets = []
# By default, CSVImporter only allows updating existing datasets. If the
# migration is adding a new dataset, add the `create_missing_datasets`
# keyword argument. For example:
#
# CSVImporter.run(data_path, commits_path, create_missing_datasets: true) do |row, runner|
# # ...
# end
#
CSVImporter.run(data_path, commits_path, create_missing_datasets: true) do |row, runner|
print "Updating #{row['geo_id']}... "
commits = runner.call
if commits.any?
datasets.push(find_dataset(commits))
puts 'done!'
else
puts 'nothing to change!'
end
end
sleep(1)
puts
puts "Updated #{datasets.length} datasets with the following IDs:"
puts " #{datasets.map(&:id).join(',')}"
end
def self.down
raise ActiveRecord::IrreversibleMigration
end
def find_dataset(commits)
commits.each do |commit|
return commit.dataset if commit&.dataset
end
end
end
| {
"content_hash": "1222e42544a32a6752e3c36a12bde7f6",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 96,
"avg_line_length": 28.72093023255814,
"alnum_prop": 0.6380566801619433,
"repo_name": "quintel/etlocal",
"id": "50becbee8ee73dd87867b3967f2fd764c2e350c6",
"size": "1235",
"binary": false,
"copies": "1",
"ref": "refs/heads/production",
"path": "db/migrate/20220301163200_new_data_cz.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "865"
},
{
"name": "Dockerfile",
"bytes": "812"
},
{
"name": "HTML",
"bytes": "6774"
},
{
"name": "JavaScript",
"bytes": "49698"
},
{
"name": "Ruby",
"bytes": "782898"
},
{
"name": "Sass",
"bytes": "20202"
},
{
"name": "Slim",
"bytes": "21187"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE scripts PUBLIC "-//UniTime//DTD University Course Timetabling/EN" "http://www.unitime.org/interface/Script.dtd">
<!--
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
-->
<scripts created="Tue Mar 8 16:55 EDT 2022">
<script name="Reports: Building Utilization by Day of Week and Time of Day" permission="Events" engine="python" created="Tue Mar 08 15:22:10 EST 2022">
<description><![CDATA[<h3>Export building utilization data broken down by day of week and time of day.</h3>
]]></description>
<parameter name="includeSubjectArea" label="2) Break Data Down By Subject Area" type="boolean" default="false"/>
<parameter name="includeDept" label="3) Break Data Down By Department" type="boolean" default="false"/>
<parameter name="includeRoomType" label="1) Break Data Down By Room Type" type="boolean" default="true"/>
<body><![CDATA[import csv
from java.util import ArrayList
from org.unitime.timetable.util import RoomUtilizationHelper, RoomUsageAndOccupancyData
def outputData(writer, list):
for row in list:
writer.writerow(row)
def execute():
file = open(log.createOutput('utilization','csv').getAbsolutePath(), 'wb')
writer = csv.writer(file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
ruh = RoomUtilizationHelper()
headerRow = ArrayList()
ruaod = RoomUsageAndOccupancyData()
list = ruh.getUtilQueryResultsForQuery(ruaod.getBuildingUsageAndOccupancyTimeDayQuery(session, includeRoomType, includeSubjectArea, includeDept, headerRow), headerRow)
outputData(writer, list)
file.flush()
file.close()
execute()]]></body>
</script>
</scripts>
| {
"content_hash": "a4cc4af15ac56d965a891f17330fea61",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 171,
"avg_line_length": 46.38461538461539,
"alnum_prop": 0.7425373134328358,
"repo_name": "UniTime/unitime",
"id": "3597388cc7fe97d3017e724350a1337930e6b2c6",
"size": "2412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Documentation/Scripts/Reports- Building Utilization by Day of Week and Time of Day.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "199417"
},
{
"name": "FreeMarker",
"bytes": "48795"
},
{
"name": "HTML",
"bytes": "48"
},
{
"name": "Java",
"bytes": "27876192"
},
{
"name": "JavaScript",
"bytes": "446284"
}
],
"symlink_target": ""
} |
package deploylog
import (
"testing"
"time"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
clientgotesting "k8s.io/client-go/testing"
"k8s.io/kubernetes/pkg/api/legacyscheme"
kapi "k8s.io/kubernetes/pkg/apis/core"
"k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset/fake"
appsapi "github.com/openshift/origin/pkg/apps/apis/apps"
appstest "github.com/openshift/origin/pkg/apps/apis/apps/test"
appsfake "github.com/openshift/origin/pkg/apps/generated/internalclientset/fake"
appsutil "github.com/openshift/origin/pkg/apps/util"
// install all APIs
_ "github.com/openshift/origin/pkg/api/install"
)
var testSelector = map[string]string{"test": "rest"}
func makeDeployment(version int64) kapi.ReplicationController {
deployment, _ := appsutil.MakeDeployment(appstest.OkDeploymentConfig(version), legacyscheme.Codecs.LegacyCodec(appsapi.SchemeGroupVersion))
deployment.Namespace = metav1.NamespaceDefault
deployment.Spec.Selector = testSelector
return *deployment
}
func makeDeploymentList(versions int64) *kapi.ReplicationControllerList {
list := &kapi.ReplicationControllerList{}
for v := int64(1); v <= versions; v++ {
list.Items = append(list.Items, makeDeployment(v))
}
return list
}
var (
fakePodList = &kapi.PodList{
Items: []kapi.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: "config-5-application-pod-1",
Namespace: metav1.NamespaceDefault,
CreationTimestamp: metav1.Date(2016, time.February, 1, 1, 0, 1, 0, time.UTC),
Labels: testSelector,
},
Spec: kapi.PodSpec{
Containers: []kapi.Container{
{
Name: "config-5-container-1",
},
},
NodeName: "some-host",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "config-5-application-pod-2",
Namespace: metav1.NamespaceDefault,
CreationTimestamp: metav1.Date(2016, time.February, 1, 1, 0, 3, 0, time.UTC),
Labels: testSelector,
},
Spec: kapi.PodSpec{
Containers: []kapi.Container{
{
Name: "config-5-container-2",
},
},
NodeName: "some-host",
},
},
},
}
)
// mockREST mocks a DeploymentLog REST
func mockREST(version, desired int64, status appsapi.DeploymentStatus) *REST {
// Fake deploymentConfig
config := appstest.OkDeploymentConfig(version)
fakeDn := appsfake.NewSimpleClientset(config)
fakeDn.PrependReactor("get", "deploymentconfigs", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
return true, config, nil
})
// Used for testing validation errors prior to getting replication controllers.
if desired > version {
return &REST{
dcClient: fakeDn.Apps(),
timeout: defaultTimeout,
}
}
// Fake deployments
fakeDeployments := makeDeploymentList(version)
fakeRn := fake.NewSimpleClientset(fakeDeployments)
fakeRn.PrependReactor("get", "replicationcontrollers", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
return true, &fakeDeployments.Items[desired-1], nil
})
// Fake watcher for deployments
fakeWatch := watch.NewFake()
fakeRn.PrependWatchReactor("replicationcontrollers", clientgotesting.DefaultWatchReactor(fakeWatch, nil))
obj := &fakeDeployments.Items[desired-1]
obj.Annotations[appsapi.DeploymentStatusAnnotation] = string(status)
go fakeWatch.Add(obj)
fakePn := fake.NewSimpleClientset()
if status == appsapi.DeploymentStatusComplete {
// If the deployment is complete, we will try to get the logs from the oldest
// application pod...
fakePn.PrependReactor("list", "pods", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
return true, fakePodList, nil
})
fakePn.PrependReactor("get", "pods", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
return true, &fakePodList.Items[0], nil
})
} else {
// ...otherwise try to get the logs from the deployer pod.
fakeDeployer := &kapi.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: appsutil.DeployerPodNameForDeployment(obj.Name),
Namespace: metav1.NamespaceDefault,
},
Spec: kapi.PodSpec{
Containers: []kapi.Container{
{
Name: appsutil.DeployerPodNameForDeployment(obj.Name) + "-container",
},
},
NodeName: "some-host",
},
Status: kapi.PodStatus{
Phase: kapi.PodRunning,
},
}
fakePn.PrependReactor("get", "pods", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) {
return true, fakeDeployer, nil
})
}
return &REST{
dcClient: fakeDn.Apps(),
rcClient: fakeRn.Core(),
podClient: fakePn.Core(),
timeout: defaultTimeout,
}
}
func TestRESTGet(t *testing.T) {
ctx := apirequest.NewDefaultContext()
tests := []struct {
testName string
rest *REST
name string
opts runtime.Object
expectedNamespace string
expectedName string
expectedErr error
}{
{
testName: "running deployment",
rest: mockREST(1, 1, appsapi.DeploymentStatusRunning),
name: "config",
opts: &appsapi.DeploymentLogOptions{Follow: true, Version: intp(1)},
expectedNamespace: "default",
expectedName: "config-1-deploy",
expectedErr: nil,
},
{
testName: "complete deployment",
rest: mockREST(5, 5, appsapi.DeploymentStatusComplete),
name: "config",
opts: &appsapi.DeploymentLogOptions{Follow: true, Version: intp(5)},
expectedNamespace: "default",
expectedName: "config-5-application-pod-1",
expectedErr: nil,
},
{
testName: "previous failed deployment",
rest: mockREST(3, 2, appsapi.DeploymentStatusFailed),
name: "config",
opts: &appsapi.DeploymentLogOptions{Follow: false, Version: intp(2)},
expectedNamespace: "default",
expectedName: "config-2-deploy",
expectedErr: nil,
},
{
testName: "previous deployment",
rest: mockREST(3, 2, appsapi.DeploymentStatusFailed),
name: "config",
opts: &appsapi.DeploymentLogOptions{Follow: false, Previous: true},
expectedNamespace: "default",
expectedName: "config-2-deploy",
expectedErr: nil,
},
{
testName: "non-existent previous deployment",
rest: mockREST(1 /* won't be used */, 101, ""),
name: "config",
opts: &appsapi.DeploymentLogOptions{Follow: false, Previous: true},
expectedErr: errors.NewBadRequest("no previous deployment exists for deploymentConfig \"config\""),
},
}
for _, test := range tests {
t.Run(test.testName, func(t *testing.T) {
actualPodNamespace := ""
actualPodName := ""
getPodLogsFn := func(podNamespace, podName string, logOpts *kapi.PodLogOptions) (runtime.Object, error) {
actualPodNamespace = podNamespace
actualPodName = podName
return nil, nil
}
test.rest.getLogsFn = getPodLogsFn
_, err := test.rest.Get(ctx, test.name, test.opts)
if err != nil && test.expectedErr != nil && err.Error() != test.expectedErr.Error() {
t.Fatalf("error mismatch: expected %v, got %v", test.expectedErr, err)
}
if err != nil && test.expectedErr == nil {
t.Fatal(err)
}
if err == nil && test.expectedErr != nil {
t.Fatalf("error mismatch: expected %v, got no error", test.expectedErr)
}
if e, a := test.expectedNamespace, actualPodNamespace; e != a {
t.Errorf("expected %v, actual %v", e, a)
}
if e, a := test.expectedName, actualPodName; e != a {
t.Errorf("expected %v, actual %v", e, a)
}
})
}
}
// TODO: These kind of functions seem to be used in lots of places
// We should move it in a common location
func intp(num int64) *int64 {
return &num
}
| {
"content_hash": "589d54f952ebcfa47617888ad8485b8e",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 140,
"avg_line_length": 32.435483870967744,
"alnum_prop": 0.6652163102933863,
"repo_name": "dobbymoodge/origin",
"id": "e16ba5b309a11ca748de078dcf7d1edc28f7258d",
"size": "8044",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pkg/apps/registry/deploylog/rest_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "1842"
},
{
"name": "DIGITAL Command Language",
"bytes": "117"
},
{
"name": "Go",
"bytes": "17911706"
},
{
"name": "Groovy",
"bytes": "5285"
},
{
"name": "HTML",
"bytes": "74732"
},
{
"name": "Makefile",
"bytes": "23319"
},
{
"name": "Perl",
"bytes": "365"
},
{
"name": "Python",
"bytes": "34676"
},
{
"name": "Ruby",
"bytes": "484"
},
{
"name": "Shell",
"bytes": "1632752"
},
{
"name": "Smarty",
"bytes": "1010"
}
],
"symlink_target": ""
} |
package com.siemens.oss.omniproperties.builders;
import java.io.File;
import java.io.IOException;
import com.siemens.oss.omniproperties.ObjectBuilder;
/**
* @author Markus Michael Geipel
*
*/
public class ExistingDirectory implements ObjectBuilder<File> {
final private File file;
public ExistingDirectory(final String path) {
this.file = new File(path);
}
public ExistingDirectory(final File dir, final String path) {
this.file = new File(dir, path);
}
@Override
public File build() throws IOException {
if (!file.exists()) {
throw new IllegalArgumentException("Directory '" + file
+ "' does not exist");
}
if (!file.isDirectory()) {
throw new IllegalArgumentException("'" + file
+ "' is not a directory");
}
return new File(file.getCanonicalPath());
}
}
| {
"content_hash": "5abda040b84f06c24220d5786fa7c7d9",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 63,
"avg_line_length": 19.73170731707317,
"alnum_prop": 0.6996291718170581,
"repo_name": "siemens/omniproperties",
"id": "932b52e4854009a7dc1c932967f2c78acff843ec",
"size": "1383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/siemens/oss/omniproperties/builders/ExistingDirectory.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GAP",
"bytes": "14981"
},
{
"name": "Java",
"bytes": "145876"
}
],
"symlink_target": ""
} |
import java.io.Console;
// June 2017, JMI
// Program to do ask user input for what they ate, then to print the given input.
public class MealTime {
public static void main(String[] args) {
/* First iteration of MealTime, without using a custom method
Console myConsole = System.console();
System.out.println("What did you eat for breakfast?");
String yourBreakfast = myConsole.readLine();
yourBreakfast = yourBreakfast.toLowerCase(); // make string lowercase
System.out.println("What did you eat for lunch?");
String yourLunch = myConsole.readLine().toLowerCase(); // chaining the methods, since yourLunch is an object that is allowed
System.out.println("What did you eat for dinner?");
String yourDinner = myConsole.readLine().toLowerCase();
System.out.println("You ate " + yourBreakfast + " for breakfast.");
System.out.println("You ate " + yourLunch + " for lunch.");
System.out.println("You ate " + yourDinner + " for dinner.");
*/
askWhatYouAte("breakfast");
askWhatYouAte("lunch");
askWhatYouAte("dinner");
} // end main
public static void askWhatYouAte(String whichMeal) { // method to print the question to stdout, read from stdin, then call the regurgitateTheMeal method to print the gathered info
Console myConsole = System.console();
System.out.println("What did you eat for " + whichMeal + " ?");
//String whichMeal = whichMeal; //This is redundant, since "String whichMeal" is defined in the argument
String mealName = myConsole.readLine().toLowerCase();
regurgitateTheMeal(whichMeal, mealName);
} // end askWhatYouAte method
public static void regurgitateTheMeal(String whichMeal, String mealName) { // method to print the input given by user for the meal
System.out.println("You ate " + mealName + " for " + whichMeal + ".");
} // end regurgitateTheMeal method
} // end class MealTime
| {
"content_hash": "d08843b72b0b811d207cd27d306d78c0",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 183,
"avg_line_length": 50.2,
"alnum_prop": 0.670816733067729,
"repo_name": "InertialObserver/JavaTidbits",
"id": "a88ff607e751ef96f6023e0c4cdaaa6596fea46f",
"size": "2008",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MealTime.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "9286"
}
],
"symlink_target": ""
} |
<div class="yoast_breadcrumb">
<div id="breadcrumbs" vocab="http://schema.org/" typeof="BreadcrumbList">
<span property="itemListElement" typeof="ListItem">
<a href="<?php echo home_url(); ?>" property="item" typeof="WebPage" >
<span property="name">Home</span></a><meta property="position" content="1"> »
</span>
<span property="itemListElement" typeof="ListItem">
<a href="<?php echo home_url(); ?>/about/" property="item" typeof="WebPage" >
<span property="name">About</span></a><meta property="position" content="2"> »
</span>
<span property="itemListElement" typeof="ListItem">
<a href="<?php echo home_url(); ?>/about/meet-our-staff/" property="item" typeof="WebPage" >
<span property="name">Meet Our Staff</span></a><meta property="position" content="3"> »
</span>
<span property="itemListElement" typeof="ListItem" property="item" typeof="WebPage" >
<span property="name">
<span property="name"><?php the_title();?></span><meta property="position" content="4">
</span>
</div>
</div>
<?php while (have_posts()) : the_post(); ?>
<article <?php post_class(); ?>>
<?php if ( has_post_thumbnail( $post->ID ) ) { ?>
<img class="img-responsive alignleft" src="<?php the_post_thumbnail_url('medium'); ?>" />
<?php } ?>
<div class="entry-content">
<h5 class="blog-title text-left"><span><?php the_title();?></span></h5>
<h6><?php the_field('employee_title'); ?></h6>
<?php edit_post_link(); ?>
<hr>
<?php the_content(); ?>
</div>
</article>
<?php endwhile; ?>
| {
"content_hash": "7068c3c8c3e6919c1b9c2fa9dda20a44",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 95,
"avg_line_length": 43.857142857142854,
"alnum_prop": 0.6273615635179153,
"repo_name": "smmacdonald/msr",
"id": "eead564df3b23de3842981e5439e7026b8653e21",
"size": "1538",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "single-employee.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "93146"
},
{
"name": "JavaScript",
"bytes": "47668"
},
{
"name": "PHP",
"bytes": "96920"
}
],
"symlink_target": ""
} |
package kafka.server
import kafka.utils.ZkUtils._
import kafka.utils.{Utils, SystemTime, Logging}
import org.I0Itec.zkclient.exception.ZkNodeExistsException
import org.I0Itec.zkclient.IZkDataListener
import kafka.controller.ControllerContext
import kafka.controller.KafkaController
import kafka.common.KafkaException
/**
* This class handles zookeeper based leader election based on an ephemeral path. The election module does not handle
* session expiration, instead it assumes the caller will handle it by probably try to re-elect again. If the existing
* leader is dead, this class will handle automatic re-election and if it succeeds, it invokes the leader state change
* callback
*/
class ZookeeperLeaderElector(controllerContext: ControllerContext, electionPath: String, onBecomingLeader: () => Unit,
brokerId: Int)
extends LeaderElector with Logging {
var leaderId = -1
// create the election path in ZK, if one does not exist
val index = electionPath.lastIndexOf("/")
if (index > 0)
makeSurePersistentPathExists(controllerContext.zkClient, electionPath.substring(0, index))
val leaderChangeListener = new LeaderChangeListener
def startup {
controllerContext.controllerLock synchronized {
controllerContext.zkClient.subscribeDataChanges(electionPath, leaderChangeListener)
elect
}
}
def elect: Boolean = {
val timestamp = SystemTime.milliseconds.toString
val electString =
Utils.mergeJsonFields(Utils.mapToJsonFields(Map("version" -> 1.toString, "brokerid" -> brokerId.toString), valueInQuotes = false)
++ Utils.mapToJsonFields(Map("timestamp" -> timestamp), valueInQuotes = true))
try {
createEphemeralPathExpectConflictHandleZKBug(controllerContext.zkClient, electionPath, electString, brokerId,
(controllerString : String, leaderId : Any) => KafkaController.parseControllerId(controllerString) == leaderId.asInstanceOf[Int],
controllerContext.zkSessionTimeout)
info(brokerId + " successfully elected as leader")
leaderId = brokerId
onBecomingLeader()
} catch {
case e: ZkNodeExistsException =>
// If someone else has written the path, then
leaderId = readDataMaybeNull(controllerContext.zkClient, electionPath)._1 match {
case Some(controller) => KafkaController.parseControllerId(controller)
case None => {
warn("A leader has been elected but just resigned, this will result in another round of election")
-1
}
}
if (leaderId != -1)
debug("Broker %d was elected as leader instead of broker %d".format(leaderId, brokerId))
case e2: Throwable =>
error("Error while electing or becoming leader on broker %d".format(brokerId), e2)
leaderId = -1
}
amILeader
}
def close = {
leaderId = -1
}
def amILeader : Boolean = leaderId == brokerId
def resign() = {
leaderId = -1
deletePath(controllerContext.zkClient, electionPath)
}
/**
* We do not have session expiration listen in the ZkElection, but assuming the caller who uses this module will
* have its own session expiration listener and handler
*/
class LeaderChangeListener extends IZkDataListener with Logging {
/**
* Called when the leader information stored in zookeeper has changed. Record the new leader in memory
* @throws Exception On any error.
*/
@throws(classOf[Exception])
def handleDataChange(dataPath: String, data: Object) {
controllerContext.controllerLock synchronized {
leaderId = KafkaController.parseControllerId(data.toString)
info("New leader is %d".format(leaderId))
}
}
/**
* Called when the leader information stored in zookeeper has been delete. Try to elect as the leader
* @throws Exception
* On any error.
*/
@throws(classOf[Exception])
def handleDataDeleted(dataPath: String) {
controllerContext.controllerLock synchronized {
debug("%s leader change listener fired for path %s to handle data deleted: trying to elect as a leader"
.format(brokerId, dataPath))
elect
}
}
}
}
| {
"content_hash": "2e1ac552b7b25542b5534b1c5951e944",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 137,
"avg_line_length": 38.74545454545454,
"alnum_prop": 0.6989676208352886,
"repo_name": "archieco/kafka",
"id": "33b73609b1178c56e692fb60e35aca04ad1af586",
"size": "5063",
"binary": false,
"copies": "3",
"ref": "refs/heads/0.8",
"path": "core/src/main/scala/kafka/server/ZookeeperLeaderElector.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "112585"
},
{
"name": "Python",
"bytes": "252378"
},
{
"name": "Scala",
"bytes": "1406160"
},
{
"name": "Shell",
"bytes": "101799"
}
],
"symlink_target": ""
} |
package ch.softappeal.yass.transport.socket.test;
import ch.softappeal.yass.core.remote.ContractId;
import ch.softappeal.yass.core.remote.OneWay;
import ch.softappeal.yass.core.remote.Server;
import ch.softappeal.yass.core.remote.SimpleMethodMapper;
import ch.softappeal.yass.core.remote.session.SimpleSession;
import ch.softappeal.yass.serialize.JavaSerializer;
import ch.softappeal.yass.serialize.Serializer;
import ch.softappeal.yass.transport.TransportSetup;
import ch.softappeal.yass.transport.socket.AsyncSocketConnection;
import ch.softappeal.yass.transport.socket.SimpleSocketBinder;
import ch.softappeal.yass.transport.socket.SimpleSocketConnector;
import ch.softappeal.yass.transport.socket.SocketTransport;
import ch.softappeal.yass.transport.socket.SyncSocketConnection;
import ch.softappeal.yass.util.Exceptions;
import ch.softappeal.yass.util.NamedThreadFactory;
import ch.softappeal.yass.util.Nullable;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public final class AsyncSocketConnectionTest {
public interface Busy {
@OneWay void busy();
}
private static final ContractId<Busy> BUSY_ID = ContractId.create(Busy.class, 0, SimpleMethodMapper.FACTORY);
private static final SocketAddress ADDRESS = new InetSocketAddress("localhost", 28947);
private static final Serializer PACKET_SERIALIZER = JavaSerializer.INSTANCE;
public static void main(final String... args) throws InterruptedException {
final ExecutorService executor = Executors.newCachedThreadPool(new NamedThreadFactory("Executor", Exceptions.STD_ERR));
new SocketTransport(
executor,
SyncSocketConnection.FACTORY,
TransportSetup.ofPacketSerializer(
PACKET_SERIALIZER,
connection -> new SimpleSession(connection, executor) {
@Override protected Server server() {
return new Server(
BUSY_ID.service(() -> {
System.out.println("busy");
try {
TimeUnit.MILLISECONDS.sleep(1_000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
})
);
}
@Override protected void opened() {
System.out.println("acceptor opened");
}
@Override protected void closed(final @Nullable Exception exception) {
System.out.println("acceptor closed: " + exception);
}
}
)
).start(executor, new SimpleSocketBinder(ADDRESS));
SocketTransport.connect(
executor,
AsyncSocketConnection.factory(executor, 10),
TransportSetup.ofPacketSerializer(
PACKET_SERIALIZER,
connection -> new SimpleSession(connection, executor) {
@Override protected void opened() {
System.out.println("initiator opened");
final Busy busy = proxy(BUSY_ID);
for (int i = 0; i < 10_000; i++) {
busy.busy();
}
System.out.println("initiator done");
}
@Override protected void closed(final @Nullable Exception exception) {
System.out.println("initiator closed: " + exception);
}
}
),
new SimpleSocketConnector(ADDRESS)
);
}
}
| {
"content_hash": "268da838c924dd7fe6d9eb3349b6bd87",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 127,
"avg_line_length": 44.35227272727273,
"alnum_prop": 0.5977453241096592,
"repo_name": "sushicutta/yass",
"id": "0355177b09f68ce5a3cd9266f0e0d731d587da81",
"size": "3903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/test/ch/softappeal/yass/transport/socket/test/AsyncSocketConnectionTest.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2912"
},
{
"name": "HTML",
"bytes": "1014"
},
{
"name": "Java",
"bytes": "489746"
},
{
"name": "Python",
"bytes": "118152"
},
{
"name": "TypeScript",
"bytes": "26834"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="zh-cn">
<head>
<title>不过满腹 - Authors</title>
<meta charset="utf-8" />
<!-- Mobile viewport optimized: j.mp/bplateviewport -->
<meta name="viewport" content="width=device-width,initial-scale=1, maximum-scale=1">
<link rel="stylesheet" type="text/css" href="/theme/gumby.css" />
<link rel="stylesheet" type="text/css" href="/theme/style.css" />
<link rel="stylesheet" type="text/css" href="/theme/pygment.css" />
<script src="/theme/js/libs/modernizr-2.6.2.min.js"></script>
</head>
<body id="index" class="home">
<div class="container">
<div class="row">
<header id="banner" class="body">
<h1><a href="/">不过满腹 <strong></strong></a></h1>
</header><!-- /#banner -->
<div id="navigation" class="navbar row">
<a href="#" gumby-trigger="#navigation > ul" class="toggle"><i class="icon-menu"></i></a>
<ul class="columns">
<li><a href="/">Home</a></li>
</ul>
</div>
<h1>Authors on 不过满腹</h1>
<ul> <li><a href="/author/alex-hou.html">Alex Hou</a> (4)</li>
</ul>
</div><!-- /.row -->
</div><!-- /.container -->
<div class="container.nopad bg">
<footer id="credits" class="row">
<div class="seven columns left-center">
<address id="about" class="vcard body">
Proudly powered by <a href="http://getpelican.com/">Pelican</a>,
which takes great advantage of <a href="http://python.org">Python</a>.
<br />
Based on the <a target="_blank" href="http://gumbyframework.com">Gumby Framework</a>
</address>
</div>
<div class="seven columns">
<div class="row">
<ul class="socbtns">
</ul>
</div>
</div>
</footer>
</div>
<script type="text/javascript">
var disqus_shortname = 'icantfindone';
(function () {
var s = document.createElement('script'); s.async = true;
s.type = 'text/javascript';
s.src = 'http://' + disqus_shortname + '.disqus.com/count.js';
(document.getElementsByTagName('HEAD')[0] || document.getElementsByTagName('BODY')[0]).appendChild(s);
}());
</script>
<script src="/theme/js/libs/jquery-1.9.1.min.js"></script>
<script src="/theme/js/libs/gumby.min.js"></script>
<script src="/theme/js/plugins.js"></script>
</body>
</html> | {
"content_hash": "e0289082950401d51b52ab472a58a40c",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 110,
"avg_line_length": 27.010309278350515,
"alnum_prop": 0.516412213740458,
"repo_name": "Alex-Hou/Alex-Hou.github.io",
"id": "161d52e3d696dc2bd6898fb34f3375ff254060f8",
"size": "2644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "authors.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "180025"
},
{
"name": "HTML",
"bytes": "176918"
},
{
"name": "JavaScript",
"bytes": "5083"
}
],
"symlink_target": ""
} |
package com.blocklaunch.blwarps.managers.storage.sql.warp;
import com.blocklaunch.blwarps.BLWarps;
import com.blocklaunch.blwarps.Warp;
import com.blocklaunch.blwarps.managers.storage.StorageManager;
import com.blocklaunch.blwarps.managers.storage.sql.SqlManager;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.List;
public class SqlWarpManager extends SqlManager<Warp> implements StorageManager<Warp> {
WarpDAO warpDAO;
ObjectMapper mapper;
public SqlWarpManager(BLWarps plugin) {
super(plugin);
this.mapper = new ObjectMapper();
// Use on demand so we don't have to bother closing connections
this.warpDAO = this.dbi.onDemand(WarpDAO.class);
this.warpDAO.createWarpTable();
}
@Override
public List<Warp> load() {
return this.warpDAO.getAllWarps();
}
@Override
public void saveNew(Warp warp) {
this.warpDAO.insertWarp(warp);
}
@Override
public void delete(Warp warp) {
this.warpDAO.deleteWarp(warp);
}
@Override
public void update(Warp warp) {
this.warpDAO.updateWarp(warp);
}
}
| {
"content_hash": "9ca5abb4fa768cfdf8dc4eb455ca80a6",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 86,
"avg_line_length": 25,
"alnum_prop": 0.6982608695652174,
"repo_name": "BlockLaunch/BLWarps",
"id": "e9abacb649d75725f9191496e4a6c35420c1c50b",
"size": "1150",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/blocklaunch/blwarps/managers/storage/sql/warp/SqlWarpManager.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "111805"
}
],
"symlink_target": ""
} |
import logging
import asyncio
import socket
import setproctitle
class ClientStats():
_client_dict = dict()
@classmethod
def update(cls, service_name, host, method, time_taken):
if not (service_name, host, method) in cls._client_dict.keys():
cls._client_dict[(service_name, host, method)] = (0, 0)
count, average = cls._client_dict[(service_name, host, method)]
count += 1
average = (average * (count - 1) + time_taken)/count
cls._client_dict[(service_name, host, method)] = (count, average)
@classmethod
def periodic_aggregator(cls):
hostname = socket.gethostbyname(socket.gethostname())
service_name = '_'.join(setproctitle.getproctitle().split('_')[1:-1])
logs = []
for key, value in cls._client_dict.items():
d = dict({
"service_name": service_name,
"hostname": hostname,
"client_service": key[0],
"client_host": key[1],
"client_method": key[2],
"average_response_time": int(value[1]),
"interaction_count": value[0]
}
)
logs.append(d)
cls._client_dict = dict()
_logger = logging.getLogger('stats')
for logd in logs:
_logger.info(dict(logd))
asyncio.get_event_loop().call_later(300, cls.periodic_aggregator)
| {
"content_hash": "91175a6d5b6fb28835900e2455026fd7",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 77,
"avg_line_length": 31.711111111111112,
"alnum_prop": 0.5536089698668535,
"repo_name": "amanwriter/vyked",
"id": "ec1a5641eb2e2768d8ebe18de65832ca15d3ccf3",
"size": "1427",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vyked/utils/client_stats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "132563"
}
],
"symlink_target": ""
} |
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
require('./chunk-14c82365.js');
require('./helpers.js');
require('./chunk-1bb51959.js');
require('./chunk-330693d5.js');
require('./chunk-7f8af05c.js');
var __chunk_5 = require('./chunk-13e039f5.js');
require('./chunk-99088816.js');
require('./chunk-21985800.js');
require('./chunk-ae7e641a.js');
require('./chunk-026b445c.js');
require('./chunk-cd6f631e.js');
require('./chunk-e7833c70.js');
var __chunk_16 = require('./chunk-ce6abaaa.js');
var Plugin = {
install: function install(Vue) {
__chunk_5.registerComponent(Vue, __chunk_16.Datepicker);
}
};
__chunk_5.use(Plugin);
exports.BDatepicker = __chunk_16.Datepicker;
exports.default = Plugin;
| {
"content_hash": "192c0c37235ad23d07510834bc1e3742",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 62,
"avg_line_length": 27.22222222222222,
"alnum_prop": 0.6789115646258503,
"repo_name": "cdnjs/cdnjs",
"id": "d62e53aa88f1490b7445c04f700063867dba8c6a",
"size": "735",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ajax/libs/buefy/0.9.7/cjs/datepicker.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<?php
namespace Google\AdsApi\Dfp\v201611;
/**
* This file was generated from WSDL. DO NOT EDIT.
*/
class performProposalActionResponse
{
/**
* @var \Google\AdsApi\Dfp\v201611\UpdateResult $rval
*/
protected $rval = null;
/**
* @param \Google\AdsApi\Dfp\v201611\UpdateResult $rval
*/
public function __construct($rval = null)
{
$this->rval = $rval;
}
/**
* @return \Google\AdsApi\Dfp\v201611\UpdateResult
*/
public function getRval()
{
return $this->rval;
}
/**
* @param \Google\AdsApi\Dfp\v201611\UpdateResult $rval
* @return \Google\AdsApi\Dfp\v201611\performProposalActionResponse
*/
public function setRval($rval)
{
$this->rval = $rval;
return $this;
}
}
| {
"content_hash": "07c53ab8e9ae485076b1acd00e5e4b5c",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 71,
"avg_line_length": 18.46511627906977,
"alnum_prop": 0.593198992443325,
"repo_name": "jeraldfeller/jbenterprises",
"id": "6f0797b602cec2ab4e5588d176e1d125327f2f45",
"size": "794",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "google-adwords/vendor/googleads/googleads-php-lib/src/Google/AdsApi/Dfp/v201611/performProposalActionResponse.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "137"
},
{
"name": "CSS",
"bytes": "4465774"
},
{
"name": "CoffeeScript",
"bytes": "83631"
},
{
"name": "HTML",
"bytes": "2549782"
},
{
"name": "JavaScript",
"bytes": "17552996"
},
{
"name": "PHP",
"bytes": "3092947"
},
{
"name": "Shell",
"bytes": "444"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>User agent detail - Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.3/css/materialize.min.css">
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="../circle.css" rel="stylesheet">
</head>
<body>
<div class="container">
<div class="section">
<h1 class="header center orange-text">User agent detail</h1>
<div class="row center">
<h5 class="header light">
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
</h5>
</div>
</div>
<div class="section">
<table class="striped"><tr><th></th><th colspan="3">General</th><th colspan="5">Device</th><th colspan="3">Bot</th><th colspan="2"></th></tr><tr><th>Provider</th><th>Browser</th><th>Engine</th><th>OS</th><th>Brand</th><th>Model</th><th>Type</th><th>Is mobile</th><th>Is touch</th><th>Is bot</th><th>Name</th><th>Type</th><th>Parse time</th><th>Actions</th></tr><tr><th colspan="14" class="green lighten-3">Test suite</th></tr><tr><td>Browscap<br /><small>6014</small><br /><small>vendor/browscap/browscap/tests/fixtures/issues/issue-081.php</small></td><td> </td><td> </td><td> </td><td style="border-left: 1px solid #555"></td><td></td><td></td><td></td><td></td><td style="border-left: 1px solid #555">yes</td><td>Google PageSpeed Insights</td><td>Bot/Crawler</td><td>0</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-b3ace57b-0c56-4fa0-aa1d-8ee4ba0e42c7">Detail</a>
<!-- Modal Structure -->
<div id="modal-b3ace57b-0c56-4fa0-aa1d-8ee4ba0e42c7" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>Browscap result detail</h4>
<p><pre><code class="php">Array
(
[Comment] => Google PageSpeed Insights
[Browser] => Google PageSpeed Insights
[Browser_Type] => Bot/Crawler
[Browser_Bits] => 64
[Browser_Maker] => Google Inc
[Browser_Modus] => unknown
[Version] => 0.0
[MajorVer] => 0
[MinorVer] => 0
[Platform] => Linux
[Platform_Version] => unknown
[Platform_Description] => Linux
[Platform_Bits] => 64
[Platform_Maker] => Linux Foundation
[Alpha] =>
[Beta] =>
[Win16] =>
[Win32] =>
[Win64] =>
[Frames] => 1
[IFrames] => 1
[Tables] => 1
[Cookies] =>
[BackgroundSounds] =>
[JavaScript] => 1
[VBScript] =>
[JavaApplets] =>
[ActiveXControls] =>
[isMobileDevice] =>
[isTablet] =>
[isSyndicationReader] =>
[Crawler] => 1
[isFake] =>
[isAnonymized] =>
[isModified] =>
[CssVersion] => 0
[AolVersion] => 0
[Device_Name] => Linux Desktop
[Device_Maker] => Various
[Device_Type] => Desktop
[Device_Pointing_Method] => mouse
[Device_Code_Name] => Linux Desktop
[Device_Brand_Name] => unknown
[RenderingEngine_Name] => unknown
[RenderingEngine_Version] => unknown
[RenderingEngine_Maker] => unknown
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>WhichBrowser<br /><small>v2.0.18</small><br /><small>vendor/whichbrowser/parser/tests/data/bots/generic.yaml</small></td><td> </td><td> </td><td> </td><td style="border-left: 1px solid #555"></td><td></td><td></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555">yes</td><td>Google Page Speed</td><td><i class="material-icons">close</i></td><td>0</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-027cff01-4a76-491b-ace3-9289fcbc172f">Detail</a>
<!-- Modal Structure -->
<div id="modal-027cff01-4a76-491b-ace3-9289fcbc172f" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>WhichBrowser result detail</h4>
<p><pre><code class="php">Array
(
[headers] => User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
[result] => Array
(
[browser] => Array
(
[name] => Google Page Speed
)
[device] => Array
(
[type] => bot
)
)
[readable] => Google Page Speed
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><th colspan="14" class="green lighten-3">Providers</th></tr><tr><td>BrowscapFull<br /><small>6014</small><br /></td><td> </td><td> </td><td> </td><td style="border-left: 1px solid #555"></td><td></td><td></td><td></td><td></td><td style="border-left: 1px solid #555">yes</td><td>Google PageSpeed Insights</td><td>Bot/Crawler</td><td>0.008</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-47a9cd06-e213-4882-bc34-db6aed664223">Detail</a>
<!-- Modal Structure -->
<div id="modal-47a9cd06-e213-4882-bc34-db6aed664223" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>BrowscapFull result detail</h4>
<p><pre><code class="php">stdClass Object
(
[browser_name_regex] => /^mozilla\/5\.0 \(.*linux.*x86_64.*\) applewebkit\/.* \(khtml.* like gecko; google page speed insights\) chrome\/.* safari\/.*$/
[browser_name_pattern] => mozilla/5.0 (*linux*x86_64*) applewebkit/* (khtml* like gecko; google page speed insights) chrome/* safari/*
[parent] => Google PageSpeed Insights
[comment] => Google PageSpeed Insights
[browser] => Google PageSpeed Insights
[browser_type] => Bot/Crawler
[browser_bits] => 64
[browser_maker] => Google Inc
[browser_modus] => unknown
[version] => 0.0
[majorver] => 0
[minorver] => 0
[platform] => Linux
[platform_version] => unknown
[platform_description] => Linux
[platform_bits] => 64
[platform_maker] => Linux Foundation
[alpha] =>
[beta] =>
[win16] =>
[win32] =>
[win64] =>
[frames] => 1
[iframes] => 1
[tables] => 1
[cookies] =>
[backgroundsounds] =>
[javascript] => 1
[vbscript] =>
[javaapplets] =>
[activexcontrols] =>
[ismobiledevice] =>
[istablet] =>
[issyndicationreader] =>
[crawler] => 1
[isfake] =>
[isanonymized] =>
[ismodified] =>
[cssversion] => 0
[aolversion] => 0
[device_name] => Linux Desktop
[device_maker] => Various
[device_type] => Desktop
[device_pointing_method] => mouse
[device_code_name] => Linux Desktop
[device_brand_name] => unknown
[renderingengine_name] => unknown
[renderingengine_version] => unknown
[renderingengine_description] => unknown
[renderingengine_maker] => unknown
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>BrowscapLite<br /><small>6014</small><br /></td>
<td colspan="12" class="center-align red lighten-1">
<strong>No result found</strong>
</td>
</tr><tr><td>BrowscapPhp<br /><small>6014</small><br /></td><td> </td><td><i class="material-icons">close</i></td><td> </td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td></td><td></td><td></td><td style="border-left: 1px solid #555">yes</td><td>Google PageSpeed Insights</td><td><i class="material-icons">close</i></td><td>0.011</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-ad0041a2-b0f4-43f6-a70d-cad1443caa68">Detail</a>
<!-- Modal Structure -->
<div id="modal-ad0041a2-b0f4-43f6-a70d-cad1443caa68" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>BrowscapPhp result detail</h4>
<p><pre><code class="php">stdClass Object
(
[browser_name_regex] => /^mozilla\/5\.0 \(.*linux.*\) applewebkit\/.* \(khtml.* like gecko; google page speed insights\) chrome\/.* safari\/.*$/
[browser_name_pattern] => mozilla/5.0 (*linux*) applewebkit/* (khtml* like gecko; google page speed insights) chrome/* safari/*
[parent] => Google PageSpeed Insights
[comment] => Google PageSpeed Insights
[browser] => Google PageSpeed Insights
[browser_type] => unknown
[browser_bits] => 0
[browser_maker] => Google Inc
[browser_modus] => unknown
[version] => 0.0
[majorver] => 0
[minorver] => 0
[platform] => Linux
[platform_version] => unknown
[platform_description] => unknown
[platform_bits] => 0
[platform_maker] => unknown
[alpha] => false
[beta] => false
[win16] => false
[win32] => false
[win64] => false
[frames] => false
[iframes] => false
[tables] => false
[cookies] => false
[backgroundsounds] => false
[javascript] => false
[vbscript] => false
[javaapplets] => false
[activexcontrols] => false
[ismobiledevice] =>
[istablet] =>
[issyndicationreader] => false
[crawler] => 1
[isfake] => false
[isanonymized] => false
[ismodified] => false
[cssversion] => 0
[aolversion] => 0
[device_name] => unknown
[device_maker] => unknown
[device_type] => Desktop
[device_pointing_method] => mouse
[device_code_name] => unknown
[device_brand_name] => unknown
[renderingengine_name] => unknown
[renderingengine_version] => unknown
[renderingengine_description] => unknown
[renderingengine_maker] => unknown
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>DonatjUAParser<br /><small>v0.5.1</small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-15fbc1f0-2615-4d42-b5d9-a30dd647b050">Detail</a>
<!-- Modal Structure -->
<div id="modal-15fbc1f0-2615-4d42-b5d9-a30dd647b050" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>DonatjUAParser result detail</h4>
<p><pre><code class="php">Array
(
[platform] => Linux
[browser] => Chrome
[version] => 27.0.1453
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>JenssegersAgent<br /><small>v2.3.3</small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>Linux </td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.001</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-b85a2b91-6a55-4436-a82c-1ea0d46e2e51">Detail</a>
<!-- Modal Structure -->
<div id="modal-b85a2b91-6a55-4436-a82c-1ea0d46e2e51" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>JenssegersAgent result detail</h4>
<p><pre><code class="php">Array
(
[browserName] => Chrome
[browserVersion] => 27.0.1453
[osName] => Linux
[osVersion] =>
[deviceModel] => WebKit
[isMobile] =>
[isRobot] =>
[botName] =>
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>NeutrinoApiCom<br /><small></small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>Linux </td><td style="border-left: 1px solid #555"></td><td></td><td>desktop-browser</td><td></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.20101</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-8c2a7a4e-3fbf-4df2-8d61-5e730422f67b">Detail</a>
<!-- Modal Structure -->
<div id="modal-8c2a7a4e-3fbf-4df2-8d61-5e730422f67b" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>NeutrinoApiCom result detail</h4>
<p><pre><code class="php">stdClass Object
(
[mobile_screen_height] => 0
[is_mobile] =>
[type] => desktop-browser
[mobile_brand] =>
[mobile_model] =>
[version] => 27.0.1453
[is_android] =>
[browser_name] => Chrome
[operating_system_family] => Linux
[operating_system_version] =>
[is_ios] =>
[producer] => Google Inc.
[operating_system] => Linux
[mobile_screen_width] => 0
[mobile_browser] =>
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>PiwikDeviceDetector<br /><small>3.6.1</small><br /></td><td> </td><td> </td><td> </td><td style="border-left: 1px solid #555"></td><td></td><td></td><td></td><td></td><td style="border-left: 1px solid #555">yes</td><td>Google PageSpeed Insights</td><td>Site Monitor</td><td>0.002</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-4a941d34-a8d3-4914-9724-346f60ad7046">Detail</a>
<!-- Modal Structure -->
<div id="modal-4a941d34-a8d3-4914-9724-346f60ad7046" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>PiwikDeviceDetector result detail</h4>
<p><pre><code class="php">Array
(
[client] =>
[operatingSystem] =>
[device] => Array
(
[brand] =>
[brandName] =>
[model] =>
[device] =>
[deviceName] =>
)
[bot] => Array
(
[name] => Google PageSpeed Insights
[category] => Site Monitor
[url] => http://developers.google.com/speed/pagespeed/insights/
[producer] => Array
(
[name] => Google Inc.
[url] => http://www.google.com
)
)
[extra] => Array
(
[isBot] => 1
[isBrowser] =>
[isFeedReader] =>
[isMobileApp] =>
[isPIM] =>
[isLibrary] =>
[isMediaPlayer] =>
[isCamera] =>
[isCarBrowser] =>
[isConsole] =>
[isFeaturePhone] =>
[isPhablet] =>
[isPortableMediaPlayer] =>
[isSmartDisplay] =>
[isSmartphone] =>
[isTablet] =>
[isTV] =>
[isDesktop] =>
[isMobile] =>
[isTouchEnabled] =>
)
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>SinergiBrowserDetector<br /><small>6.0.1</small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>Linux </td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td></td><td><i class="material-icons">close</i></td><td></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0.001</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-ec1cd248-02b0-457e-8a9d-35bb99af008c">Detail</a>
<!-- Modal Structure -->
<div id="modal-ec1cd248-02b0-457e-8a9d-35bb99af008c" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>SinergiBrowserDetector result detail</h4>
<p><pre><code class="php">Array
(
[browser] => Sinergi\BrowserDetector\Browser Object
(
[userAgent:Sinergi\BrowserDetector\Browser:private] => Sinergi\BrowserDetector\UserAgent Object
(
[userAgentString:Sinergi\BrowserDetector\UserAgent:private] => Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
)
[name:Sinergi\BrowserDetector\Browser:private] => Chrome
[version:Sinergi\BrowserDetector\Browser:private] => 27.0.1453
[isRobot:Sinergi\BrowserDetector\Browser:private] =>
[isChromeFrame:Sinergi\BrowserDetector\Browser:private] =>
[isFacebookWebView:Sinergi\BrowserDetector\Browser:private] =>
[isCompatibilityMode:Sinergi\BrowserDetector\Browser:private] =>
)
[operatingSystem] => Sinergi\BrowserDetector\Os Object
(
[name:Sinergi\BrowserDetector\Os:private] => Linux
[version:Sinergi\BrowserDetector\Os:private] => unknown
[isMobile:Sinergi\BrowserDetector\Os:private] =>
[userAgent:Sinergi\BrowserDetector\Os:private] => Sinergi\BrowserDetector\UserAgent Object
(
[userAgentString:Sinergi\BrowserDetector\UserAgent:private] => Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
)
)
[device] => Sinergi\BrowserDetector\Device Object
(
[name:Sinergi\BrowserDetector\Device:private] => unknown
[userAgent:Sinergi\BrowserDetector\Device:private] => Sinergi\BrowserDetector\UserAgent Object
(
[userAgentString:Sinergi\BrowserDetector\UserAgent:private] => Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
)
)
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>UAParser<br /><small>v3.4.5</small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>Linux </td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.003</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-3160e405-8a8f-46dd-8f47-5115f06462d2">Detail</a>
<!-- Modal Structure -->
<div id="modal-3160e405-8a8f-46dd-8f47-5115f06462d2" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>UAParser result detail</h4>
<p><pre><code class="php">UAParser\Result\Client Object
(
[ua] => UAParser\Result\UserAgent Object
(
[major] => 27
[minor] => 0
[patch] => 1453
[family] => Chrome
)
[os] => UAParser\Result\OperatingSystem Object
(
[major] =>
[minor] =>
[patch] =>
[patchMinor] =>
[family] => Linux
)
[device] => UAParser\Result\Device Object
(
[brand] =>
[model] =>
[family] => Other
)
[originalUserAgent] => Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>UserAgentApiCom<br /><small></small><br /></td><td>Chrome 27.0.1453</td><td>WebKit 537.36</td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>Desktop</td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.15501</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-afeb05fb-26b9-4509-b8ac-0c604a9e97d6">Detail</a>
<!-- Modal Structure -->
<div id="modal-afeb05fb-26b9-4509-b8ac-0c604a9e97d6" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>UserAgentApiCom result detail</h4>
<p><pre><code class="php">stdClass Object
(
[platform_name] => Linux
[platform_version] => Linux
[platform_type] => Desktop
[browser_name] => Chrome
[browser_version] => 27.0.1453
[engine_name] => WebKit
[engine_version] => 537.36
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>UserAgentStringCom<br /><small></small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>Linux </td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td></td><td>0.10701</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-08a9ddfb-838f-48d7-9ede-1d132306b2ee">Detail</a>
<!-- Modal Structure -->
<div id="modal-08a9ddfb-838f-48d7-9ede-1d132306b2ee" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>UserAgentStringCom result detail</h4>
<p><pre><code class="php">stdClass Object
(
[agent_type] => Browser
[agent_name] => Chrome
[agent_version] => 27.0.1453
[os_type] => Linux
[os_name] => Linux
[os_versionName] =>
[os_versionNumber] =>
[os_producer] =>
[os_producerURL] =>
[linux_distibution] => Null
[agent_language] =>
[agent_languageTag] =>
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>WhatIsMyBrowserCom<br /><small></small><br /></td><td>Chrome 27.0.1453</td><td>WebKit 537.36</td><td>Linux </td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0.24201</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-5fc1ff22-a74d-481b-9ad1-fcfde73ded9c">Detail</a>
<!-- Modal Structure -->
<div id="modal-5fc1ff22-a74d-481b-9ad1-fcfde73ded9c" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>WhatIsMyBrowserCom result detail</h4>
<p><pre><code class="php">stdClass Object
(
[operating_system_name] => Linux
[simple_sub_description_string] =>
[simple_browser_string] => Chrome 27 on Linux
[browser_version] => 27
[extra_info] => Array
(
)
[operating_platform] =>
[extra_info_table] => Array
(
)
[layout_engine_name] => WebKit
[detected_addons] => Array
(
)
[operating_system_flavour_code] =>
[hardware_architecture] => x64
[operating_system_flavour] =>
[operating_system_frameworks] => Array
(
)
[browser_name_code] => chrome
[operating_system_version] =>
[simple_operating_platform_string] =>
[is_abusive] =>
[layout_engine_version] => 537.36
[browser_capabilities] => Array
(
)
[operating_platform_vendor_name] =>
[operating_system] => Linux
[operating_system_version_full] =>
[operating_platform_code] =>
[browser_name] => Chrome
[operating_system_name_code] => linux
[user_agent] => Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Page Speed Insights) Chrome/27.0.1453 Safari/537.36
[browser_version_full] => 27.0.1453
[browser] => Chrome 27
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>WhichBrowser<br /><small>v2.0.18</small><br /></td><td> </td><td> </td><td> </td><td style="border-left: 1px solid #555"></td><td></td><td></td><td></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555">yes</td><td>Google Page Speed</td><td><i class="material-icons">close</i></td><td>0.004</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-083a336f-5d73-4505-84f3-c5fc9bb78652">Detail</a>
<!-- Modal Structure -->
<div id="modal-083a336f-5d73-4505-84f3-c5fc9bb78652" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>WhichBrowser result detail</h4>
<p><pre><code class="php">Array
(
[browser] => Array
(
[name] => Google Page Speed
)
[device] => Array
(
[type] => bot
)
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>Woothee<br /><small>v1.2.0</small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>pc</td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-f00e7198-0e22-49fe-bad0-dbb3a9cde9b9">Detail</a>
<!-- Modal Structure -->
<div id="modal-f00e7198-0e22-49fe-bad0-dbb3a9cde9b9" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>Woothee result detail</h4>
<p><pre><code class="php">Array
(
[name] => Chrome
[vendor] => Google
[version] => 27.0.1453
[category] => pc
[os] => Linux
[os_version] => UNKNOWN
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>Wurfl<br /><small>1.7.1.0</small><br /></td><td>Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>Linux x86_64 </td><td style="border-left: 1px solid #555"></td><td></td><td>Desktop</td><td></td><td></td><td style="border-left: 1px solid #555"></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0.018</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-a2bedf8c-4a95-42a7-96c5-aaf233b2ac50">Detail</a>
<!-- Modal Structure -->
<div id="modal-a2bedf8c-4a95-42a7-96c5-aaf233b2ac50" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>Wurfl result detail</h4>
<p><pre><code class="php">Array
(
[virtual] => Array
(
[is_android] => false
[is_ios] => false
[is_windows_phone] => false
[is_app] => false
[is_full_desktop] => true
[is_largescreen] => true
[is_mobile] => false
[is_robot] => false
[is_smartphone] => false
[is_touchscreen] => false
[is_wml_preferred] => false
[is_xhtmlmp_preferred] => false
[is_html_preferred] => true
[advertised_device_os] => Linux x86_64
[advertised_device_os_version] =>
[advertised_browser] => Chrome
[advertised_browser_version] => 27.0.1453
[complete_device_name] => Google Chrome
[device_name] => Google Chrome
[form_factor] => Desktop
[is_phone] => false
[is_app_webview] => false
)
[all] => Array
(
[brand_name] => Google
[model_name] => Chrome
[unique] => true
[ununiqueness_handler] =>
[is_wireless_device] => false
[device_claims_web_support] => true
[has_qwerty_keyboard] => true
[can_skip_aligned_link_row] => true
[uaprof] =>
[uaprof2] =>
[uaprof3] =>
[nokia_series] => 0
[nokia_edition] => 0
[device_os] => Desktop
[mobile_browser] =>
[mobile_browser_version] => 27.0
[device_os_version] => 0
[pointing_method] => mouse
[release_date] => 2012_november
[marketing_name] =>
[model_extra_info] =>
[nokia_feature_pack] => 0
[can_assign_phone_number] => false
[is_tablet] => false
[manufacturer_name] =>
[is_bot] => false
[is_google_glass] => false
[proportional_font] => false
[built_in_back_button_support] => false
[card_title_support] => false
[softkey_support] => false
[table_support] => false
[numbered_menus] => false
[menu_with_select_element_recommended] => false
[menu_with_list_of_links_recommended] => false
[icons_on_menu_items_support] => false
[break_list_of_links_with_br_element_recommended] => false
[access_key_support] => false
[wrap_mode_support] => false
[times_square_mode_support] => false
[deck_prefetch_support] => false
[elective_forms_recommended] => false
[wizards_recommended] => false
[image_as_link_support] => false
[insert_br_element_after_widget_recommended] => false
[wml_can_display_images_and_text_on_same_line] => false
[wml_displays_image_in_center] => false
[opwv_wml_extensions_support] => false
[wml_make_phone_call_string] => none
[chtml_display_accesskey] => false
[emoji] => false
[chtml_can_display_images_and_text_on_same_line] => false
[chtml_displays_image_in_center] => false
[imode_region] => none
[chtml_make_phone_call_string] => tel:
[chtml_table_support] => true
[xhtml_honors_bgcolor] => true
[xhtml_supports_forms_in_table] => true
[xhtml_support_wml2_namespace] => false
[xhtml_autoexpand_select] => false
[xhtml_select_as_dropdown] => true
[xhtml_select_as_radiobutton] => true
[xhtml_select_as_popup] => true
[xhtml_display_accesskey] => false
[xhtml_supports_invisible_text] => false
[xhtml_supports_inline_input] => false
[xhtml_supports_monospace_font] => false
[xhtml_supports_table_for_layout] => false
[xhtml_supports_css_cell_table_coloring] => false
[xhtml_format_as_css_property] => false
[xhtml_format_as_attribute] => false
[xhtml_nowrap_mode] => false
[xhtml_marquee_as_css_property] => false
[xhtml_readable_background_color1] => #FFFFFF
[xhtml_readable_background_color2] => #FFFFFF
[xhtml_allows_disabled_form_elements] => false
[xhtml_document_title_support] => true
[xhtml_preferred_charset] => utf8
[opwv_xhtml_extensions_support] => false
[xhtml_make_phone_call_string] => none
[xhtmlmp_preferred_mime_type] => text/html
[xhtml_table_support] => false
[xhtml_send_sms_string] => none
[xhtml_send_mms_string] => none
[xhtml_file_upload] => supported
[cookie_support] => true
[accept_third_party_cookie] => true
[xhtml_supports_iframe] => full
[xhtml_avoid_accesskeys] => true
[xhtml_can_embed_video] => play_and_stop
[ajax_support_javascript] => true
[ajax_manipulate_css] => true
[ajax_support_getelementbyid] => true
[ajax_support_inner_html] => true
[ajax_xhr_type] => standard
[ajax_manipulate_dom] => true
[ajax_support_events] => true
[ajax_support_event_listener] => true
[ajax_preferred_geoloc_api] => none
[xhtml_support_level] => 4
[preferred_markup] => html_web_4_0
[wml_1_1] => false
[wml_1_2] => false
[wml_1_3] => false
[html_wi_w3_xhtmlbasic] => true
[html_wi_oma_xhtmlmp_1_0] => false
[html_wi_imode_html_1] => false
[html_wi_imode_html_2] => false
[html_wi_imode_html_3] => false
[html_wi_imode_html_4] => false
[html_wi_imode_html_5] => false
[html_wi_imode_htmlx_1] => false
[html_wi_imode_htmlx_1_1] => false
[html_wi_imode_compact_generic] => false
[html_web_3_2] => true
[html_web_4_0] => true
[voicexml] => false
[multipart_support] => false
[total_cache_disable_support] => false
[time_to_live_support] => false
[resolution_width] => 800
[resolution_height] => 600
[columns] => 120
[max_image_width] => 800
[max_image_height] => 600
[rows] => 200
[physical_screen_width] => 400
[physical_screen_height] => 400
[dual_orientation] => false
[density_class] => 1.0
[wbmp] => false
[bmp] => true
[epoc_bmp] => false
[gif_animated] => true
[jpg] => true
[png] => true
[tiff] => false
[transparent_png_alpha] => false
[transparent_png_index] => false
[svgt_1_1] => true
[svgt_1_1_plus] => false
[greyscale] => false
[gif] => true
[colors] => 65536
[webp_lossy_support] => true
[webp_lossless_support] => true
[post_method_support] => true
[basic_authentication_support] => true
[empty_option_value_support] => true
[emptyok] => false
[nokia_voice_call] => false
[wta_voice_call] => false
[wta_phonebook] => false
[wta_misc] => false
[wta_pdc] => false
[https_support] => true
[phone_id_provided] => false
[max_data_rate] => 3200
[wifi] => true
[sdio] => false
[vpn] => false
[has_cellular_radio] => false
[max_deck_size] => 100000
[max_url_length_in_requests] => 128
[max_url_length_homepage] => 0
[max_url_length_bookmark] => 0
[max_url_length_cached_page] => 0
[max_no_of_connection_settings] => 0
[max_no_of_bookmarks] => 0
[max_length_of_username] => 0
[max_length_of_password] => 0
[max_object_size] => 0
[downloadfun_support] => false
[directdownload_support] => false
[inline_support] => false
[oma_support] => false
[ringtone] => false
[ringtone_3gpp] => false
[ringtone_midi_monophonic] => false
[ringtone_midi_polyphonic] => false
[ringtone_imelody] => false
[ringtone_digiplug] => false
[ringtone_compactmidi] => false
[ringtone_mmf] => false
[ringtone_rmf] => false
[ringtone_xmf] => false
[ringtone_amr] => false
[ringtone_awb] => false
[ringtone_aac] => false
[ringtone_wav] => false
[ringtone_mp3] => false
[ringtone_spmidi] => false
[ringtone_qcelp] => false
[ringtone_voices] => 1
[ringtone_df_size_limit] => 0
[ringtone_directdownload_size_limit] => 0
[ringtone_inline_size_limit] => 0
[ringtone_oma_size_limit] => 0
[wallpaper] => false
[wallpaper_max_width] => 0
[wallpaper_max_height] => 0
[wallpaper_preferred_width] => 0
[wallpaper_preferred_height] => 0
[wallpaper_resize] => none
[wallpaper_wbmp] => false
[wallpaper_bmp] => false
[wallpaper_gif] => false
[wallpaper_jpg] => false
[wallpaper_png] => false
[wallpaper_tiff] => false
[wallpaper_greyscale] => false
[wallpaper_colors] => 2
[wallpaper_df_size_limit] => 0
[wallpaper_directdownload_size_limit] => 0
[wallpaper_inline_size_limit] => 0
[wallpaper_oma_size_limit] => 0
[screensaver] => false
[screensaver_max_width] => 0
[screensaver_max_height] => 0
[screensaver_preferred_width] => 0
[screensaver_preferred_height] => 0
[screensaver_resize] => none
[screensaver_wbmp] => false
[screensaver_bmp] => false
[screensaver_gif] => false
[screensaver_jpg] => false
[screensaver_png] => false
[screensaver_greyscale] => false
[screensaver_colors] => 2
[screensaver_df_size_limit] => 0
[screensaver_directdownload_size_limit] => 0
[screensaver_inline_size_limit] => 0
[screensaver_oma_size_limit] => 0
[picture] => false
[picture_max_width] => 0
[picture_max_height] => 0
[picture_preferred_width] => 0
[picture_preferred_height] => 0
[picture_resize] => none
[picture_wbmp] => false
[picture_bmp] => false
[picture_gif] => false
[picture_jpg] => false
[picture_png] => false
[picture_greyscale] => false
[picture_colors] => 2
[picture_df_size_limit] => 0
[picture_directdownload_size_limit] => 0
[picture_inline_size_limit] => 0
[picture_oma_size_limit] => 0
[video] => false
[oma_v_1_0_forwardlock] => false
[oma_v_1_0_combined_delivery] => false
[oma_v_1_0_separate_delivery] => false
[streaming_video] => false
[streaming_3gpp] => false
[streaming_mp4] => false
[streaming_mov] => false
[streaming_video_size_limit] => 0
[streaming_real_media] => none
[streaming_flv] => false
[streaming_3g2] => false
[streaming_vcodec_h263_0] => -1
[streaming_vcodec_h263_3] => -1
[streaming_vcodec_mpeg4_sp] => -1
[streaming_vcodec_mpeg4_asp] => -1
[streaming_vcodec_h264_bp] => -1
[streaming_acodec_amr] => none
[streaming_acodec_aac] => none
[streaming_wmv] => none
[streaming_preferred_protocol] => rtsp
[streaming_preferred_http_protocol] => none
[wap_push_support] => false
[connectionless_service_indication] => false
[connectionless_service_load] => false
[connectionless_cache_operation] => false
[connectionoriented_unconfirmed_service_indication] => false
[connectionoriented_unconfirmed_service_load] => false
[connectionoriented_unconfirmed_cache_operation] => false
[connectionoriented_confirmed_service_indication] => false
[connectionoriented_confirmed_service_load] => false
[connectionoriented_confirmed_cache_operation] => false
[utf8_support] => true
[ascii_support] => false
[iso8859_support] => false
[expiration_date] => false
[j2me_cldc_1_0] => false
[j2me_cldc_1_1] => false
[j2me_midp_1_0] => false
[j2me_midp_2_0] => false
[doja_1_0] => false
[doja_1_5] => false
[doja_2_0] => false
[doja_2_1] => false
[doja_2_2] => false
[doja_3_0] => false
[doja_3_5] => false
[doja_4_0] => false
[j2me_jtwi] => false
[j2me_mmapi_1_0] => false
[j2me_mmapi_1_1] => false
[j2me_wmapi_1_0] => false
[j2me_wmapi_1_1] => false
[j2me_wmapi_2_0] => false
[j2me_btapi] => false
[j2me_3dapi] => false
[j2me_locapi] => false
[j2me_nokia_ui] => false
[j2me_motorola_lwt] => false
[j2me_siemens_color_game] => false
[j2me_siemens_extension] => false
[j2me_heap_size] => 0
[j2me_max_jar_size] => 0
[j2me_storage_size] => 0
[j2me_max_record_store_size] => 0
[j2me_screen_width] => 0
[j2me_screen_height] => 0
[j2me_canvas_width] => 0
[j2me_canvas_height] => 0
[j2me_bits_per_pixel] => 0
[j2me_audio_capture_enabled] => false
[j2me_video_capture_enabled] => false
[j2me_photo_capture_enabled] => false
[j2me_capture_image_formats] => none
[j2me_http] => false
[j2me_https] => false
[j2me_socket] => false
[j2me_udp] => false
[j2me_serial] => false
[j2me_gif] => false
[j2me_gif89a] => false
[j2me_jpg] => false
[j2me_png] => false
[j2me_bmp] => false
[j2me_bmp3] => false
[j2me_wbmp] => false
[j2me_midi] => false
[j2me_wav] => false
[j2me_amr] => false
[j2me_mp3] => false
[j2me_mp4] => false
[j2me_imelody] => false
[j2me_rmf] => false
[j2me_au] => false
[j2me_aac] => false
[j2me_realaudio] => false
[j2me_xmf] => false
[j2me_wma] => false
[j2me_3gpp] => false
[j2me_h263] => false
[j2me_svgt] => false
[j2me_mpeg4] => false
[j2me_realvideo] => false
[j2me_real8] => false
[j2me_realmedia] => false
[j2me_left_softkey_code] => 0
[j2me_right_softkey_code] => 0
[j2me_middle_softkey_code] => 0
[j2me_select_key_code] => 0
[j2me_return_key_code] => 0
[j2me_clear_key_code] => 0
[j2me_datefield_no_accepts_null_date] => false
[j2me_datefield_broken] => false
[receiver] => false
[sender] => false
[mms_max_size] => 0
[mms_max_height] => 0
[mms_max_width] => 0
[built_in_recorder] => false
[built_in_camera] => false
[mms_jpeg_baseline] => false
[mms_jpeg_progressive] => false
[mms_gif_static] => false
[mms_gif_animated] => false
[mms_png] => false
[mms_bmp] => false
[mms_wbmp] => false
[mms_amr] => false
[mms_wav] => false
[mms_midi_monophonic] => false
[mms_midi_polyphonic] => false
[mms_midi_polyphonic_voices] => 0
[mms_spmidi] => false
[mms_mmf] => false
[mms_mp3] => false
[mms_evrc] => false
[mms_qcelp] => false
[mms_ota_bitmap] => false
[mms_nokia_wallpaper] => false
[mms_nokia_operatorlogo] => false
[mms_nokia_3dscreensaver] => false
[mms_nokia_ringingtone] => false
[mms_rmf] => false
[mms_xmf] => false
[mms_symbian_install] => false
[mms_jar] => false
[mms_jad] => false
[mms_vcard] => false
[mms_vcalendar] => false
[mms_wml] => false
[mms_wbxml] => false
[mms_wmlc] => false
[mms_video] => false
[mms_mp4] => false
[mms_3gpp] => false
[mms_3gpp2] => false
[mms_max_frame_rate] => 0
[nokiaring] => false
[picturemessage] => false
[operatorlogo] => false
[largeoperatorlogo] => false
[callericon] => false
[nokiavcard] => false
[nokiavcal] => false
[sckl_ringtone] => false
[sckl_operatorlogo] => false
[sckl_groupgraphic] => false
[sckl_vcard] => false
[sckl_vcalendar] => false
[text_imelody] => false
[ems] => false
[ems_variablesizedpictures] => false
[ems_imelody] => false
[ems_odi] => false
[ems_upi] => false
[ems_version] => 0
[siemens_ota] => false
[siemens_logo_width] => 101
[siemens_logo_height] => 29
[siemens_screensaver_width] => 101
[siemens_screensaver_height] => 50
[gprtf] => false
[sagem_v1] => false
[sagem_v2] => false
[panasonic] => false
[sms_enabled] => false
[wav] => false
[mmf] => false
[smf] => false
[mld] => false
[midi_monophonic] => false
[midi_polyphonic] => false
[sp_midi] => false
[rmf] => false
[xmf] => false
[compactmidi] => false
[digiplug] => false
[nokia_ringtone] => false
[imelody] => false
[au] => false
[amr] => false
[awb] => false
[aac] => false
[mp3] => false
[voices] => 1
[qcelp] => false
[evrc] => false
[flash_lite_version] =>
[fl_wallpaper] => false
[fl_screensaver] => false
[fl_standalone] => false
[fl_browser] => false
[fl_sub_lcd] => false
[full_flash_support] => true
[css_supports_width_as_percentage] => true
[css_border_image] => none
[css_rounded_corners] => none
[css_gradient] => css3
[css_spriting] => true
[css_gradient_linear] => none
[is_transcoder] => false
[transcoder_ua_header] => user-agent
[rss_support] => false
[pdf_support] => true
[progressive_download] => true
[playback_vcodec_h263_0] => -1
[playback_vcodec_h263_3] => -1
[playback_vcodec_mpeg4_sp] => -1
[playback_vcodec_mpeg4_asp] => -1
[playback_vcodec_h264_bp] => -1
[playback_real_media] => none
[playback_3gpp] => false
[playback_3g2] => false
[playback_mp4] => false
[playback_mov] => false
[playback_acodec_amr] => none
[playback_acodec_aac] => none
[playback_df_size_limit] => 0
[playback_directdownload_size_limit] => 0
[playback_inline_size_limit] => 0
[playback_oma_size_limit] => 0
[playback_acodec_qcelp] => false
[playback_wmv] => none
[hinted_progressive_download] => false
[html_preferred_dtd] => html4
[viewport_supported] => false
[viewport_width] => width_equals_max_image_width
[viewport_userscalable] =>
[viewport_initial_scale] =>
[viewport_maximum_scale] =>
[viewport_minimum_scale] =>
[mobileoptimized] => false
[handheldfriendly] => false
[canvas_support] => none
[image_inlining] => true
[is_smarttv] => false
[is_console] => false
[nfc_support] => false
[ux_full_desktop] => true
[jqm_grade] => A
[is_sencha_touch_ok] => true
)
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr><tr><td>Zsxsoft<br /><small>1.3</small><br /></td><td>Google Chrome 27.0.1453</td><td><i class="material-icons">close</i></td><td>GNU/Linux </td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0</td><td>
<!-- Modal Trigger -->
<a class="modal-trigger btn waves-effect waves-light" href="#modal-5d43e024-b46c-44f6-8914-529b05569bc2">Detail</a>
<!-- Modal Structure -->
<div id="modal-5d43e024-b46c-44f6-8914-529b05569bc2" class="modal modal-fixed-footer">
<div class="modal-content">
<h4>Zsxsoft result detail</h4>
<p><pre><code class="php">Array
(
[browser] => Array
(
[link] => http://google.com/chrome/
[title] => Google Chrome 27.0.1453
[name] => Google Chrome
[version] => 27.0.1453
[code] => chrome
[image] => img/16/browser/chrome.png
)
[os] => Array
(
[link] => http://www.linux.org/
[name] => GNU/Linux
[version] =>
[code] => linux
[x64] => 1
[title] => GNU/Linux x64
[type] => os
[dir] => os
[image] => img/16/os/linux.png
)
[device] => Array
(
[link] =>
[title] =>
[model] =>
[brand] =>
[code] => null
[dir] => device
[type] => device
[image] => img/16/device/null.png
)
[platform] => Array
(
[link] => http://www.linux.org/
[name] => GNU/Linux
[version] =>
[code] => linux
[x64] => 1
[title] => GNU/Linux x64
[type] => os
[dir] => os
[image] => img/16/os/linux.png
)
)
</code></pre></p>
</div>
<div class="modal-footer">
<a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a>
</div>
</div>
</td></tr></table>
</div>
<div class="section">
<h1 class="header center orange-text">About this comparison</h1>
<div class="row center">
<h5 class="header light">
The primary goal of this project is simple<br />
I wanted to know which user agent parser is the most accurate in each part - device detection, bot detection and so on...<br />
<br />
The secondary goal is to provide a source for all user agent parsers to improve their detection based on this results.<br />
<br />
You can also improve this further, by suggesting ideas at <a href="https://github.com/ThaDafinser/UserAgentParserComparison">ThaDafinser/UserAgentParserComparison</a><br />
<br />
The comparison is based on the abstraction by <a href="https://github.com/ThaDafinser/UserAgentParser">ThaDafinser/UserAgentParser</a>
</h5>
</div>
</div>
<div class="card">
<div class="card-content">
Comparison created <i>2016-05-10 08:05:24</i> | by
<a href="https://github.com/ThaDafinser">ThaDafinser</a>
</div>
</div>
</div>
<script src="https://code.jquery.com/jquery-2.1.4.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.3/js/materialize.min.js"></script>
<script src="http://cdnjs.cloudflare.com/ajax/libs/list.js/1.2.0/list.min.js"></script>
<script>
$(document).ready(function(){
// the "href" attribute of .modal-trigger must specify the modal ID that wants to be triggered
$('.modal-trigger').leanModal();
});
</script>
</body>
</html> | {
"content_hash": "c70810d429a13c9f957542de4a1444b2",
"timestamp": "",
"source": "github",
"line_count": 1364,
"max_line_length": 784,
"avg_line_length": 40.51099706744868,
"alnum_prop": 0.5425556943011746,
"repo_name": "ThaDafinser/UserAgentParserComparison",
"id": "1bab5fe69339277f6b184bc1e4b929e61b43f6dd",
"size": "55258",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "v5/user-agent-detail/b0/b7/b0b7d112-9b2e-4629-81dd-731582b2e9ad.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2060859160"
}
],
"symlink_target": ""
} |
var TIME_PERIOD_CHECK_HOURS = 1,
JSON_URL = 'https://jhvisser.com/hidefedora/reports/profiles.json',
fedoras = [],
removalMethod = 'hide',
showReportButton = true,
banned = [],
bannedWords = [];
chrome.storage.sync.get(["removalMethod", "showReportButton"], function(items) {
if(_.has(items, "removalMethod")) {
removalMethod = items.removalMethod;
}
if(_.has(items, "showReportButton")) {
showReportButton = items.showReportButton;
}
});
chrome.storage.local.get(["fedoras", "banned", "bannedWords"], function(items) {
if(_.has(items, "fedoras")) {
fedoras = items.fedoras;
}
if(_.has(items, "banned")) {
banned = items.banned;
}
if(_.has(items, "bannedWords")) {
bannedWords = items.bannedWords;
}
});
var randomInt = function(min, max) {
return Math.floor(Math.random()*(max-min+1)+min);
};
var localBan = function(profileId) {
if(!_.contains(banned, profileId)) {
chrome.storage.local.get("banned", function(items) {
if(_.has(items, "banned")) {
banned = items.banned;
}
banned.push(profileId);
chrome.storage.local.set({
banned: banned
});
});
}
};
var getParentUrl = function() {
var isInIFrame = (parent !== window),
parentUrl = null;
if(isInIFrame) {
parentUrl = _.escape(document.referrer);
}
return parentUrl;
};
var submitReport = function(profileId, comment) {
$.ajax({
url: 'https://jhvisser.com/hidefedora/reports',
type: 'POST',
data: {
submit: 1,
profileUrl: profileId,
comment: comment,
youtubeUrl: window.location.href
}
});
};
var onReportClick = function(e) {
if(confirm('Are you sure you want to report and ban this fedora profile?')) {
var profileId = $(this).data("profileId"),
comment = $(this).data("comment");
localBan(profileId);
submitReport(profileId, comment);
$(this).prop('disabled', true).html('Reported').addClass('hide-fedora-reported');
setTimeout(function() {
execute();
}, 1000);
}
};
var process = function(outerSelector) {
$(outerSelector).each(function(index, element) {
var el = $(element),
profileId = el.find('a').attr('href');
comment = el.find('.comment-renderer-text-content:first').text();
thisEl = $(this);
if(_.contains(fedoras, profileId) ||
_.contains(banned, profileId) ||
_.some(bannedWords, function(word) {
return comment.toLowerCase().indexOf(_.unescape(word.toLowerCase())) > -1;
})) {
switch(removalMethod) {
// Hide
case "hide":
var commentThreadRenderer = thisEl.parent('.comment-thread-renderer');
if (commentThreadRenderer.length === 0) {
thisEl.remove();
} else {
thisEl.parent('.comment-thread-renderer').remove();
}
break;
// Replace
case "replace-fedora-cat":
if(!thisEl.hasClass("hide-fedora-found")) {
thisEl.addClass("hide-fedora-found");
thisEl.parent().find('.comment-replies-renderer').remove();
var fileUrl = chrome.extension.getURL('resources/pics/fedora-cats/' + randomInt(1,22) + '.jpg');
// Title
el.find(".comment-author-text")
.html("Replaced with a cat")
.removeAttr('data-ytid')
.attr("href", fileUrl);
// Text
el.find(".comment-renderer-text-content").html("Meow meow");
// Img
el.find(".comment-author-thumbnail")
.find("img")
.attr("src", "")
.attr("src", fileUrl)
.closest(".yt-uix-sessionlink")
.attr("href", fileUrl)
.removeAttr('data-ytid');
// Controls
el.find(".comment-renderer-footer").remove();
// Replies
el.find('.comment-replies-renderer:first').remove();
}
break;
}
}
else if(showReportButton && !thisEl.hasClass("hide-fedora-tagged")) {
thisEl.addClass("hide-fedora-tagged");
thisEl
.find('.comment-renderer-footer .comment-action-buttons-toolbar')
.append('<button type="button" class="hide-fedora-report-btn">HF</button>');
thisEl.find('.hide-fedora-report-btn')
.data('profileId', profileId)
.data('comment', comment)
.click(onReportClick);
}
});
};
var execute = function() {
process(".comment-renderer");
};
var fetchJSON = function(dateString) {
$.getJSON(JSON_URL, function(res) {
fedoras = res.fedoras;
chrome.storage.local.set({
fedoras: res.fedoras,
lastJSONUpdate: dateString
});
});
};
chrome.storage.local.get("lastJSONUpdate", function(items) {
if(_.has(items, "lastJSONUpdate")) {
var lastJSONUpdate = items.lastJSONUpdate,
now = moment();
if(moment(lastJSONUpdate).add(TIME_PERIOD_CHECK_HOURS, 'hours').isBefore(now)) {
fetchJSON(now.toISOString());
}
}
else {
fetchJSON(new Date().toISOString());
}
});
var trigger = function() {
var target = document.querySelector('#watch-discussion');
if (target === null) return;
// Set MutationObserver
var MutationObserver = window.MutationObserver || window.WebKitMutationObserver;
var observer = new MutationObserver(function() {
execute();
});
var config = { childList: true, subtree: true };
observer.observe(target, config);
// Execute removal a couple of times before MutationObserver kicks in
var counter = 0;
var interval = setInterval(function() {
execute();
counter++;
if (counter === 24) {
clearInterval(interval);
}
}, 250);
};
window.addEventListener('pageshow', function() {
trigger();
});
document.addEventListener('transitionend', function(e) {
if (e.target.id === 'progress') {
trigger();
}
});
| {
"content_hash": "afc0eeb2952b407b6388632f8fd2a445",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 102,
"avg_line_length": 24.756756756756758,
"alnum_prop": 0.6377365356622998,
"repo_name": "hadalin/chrome-hidefedora",
"id": "436579f005d3cf57f676f3141261c80f8d4364af",
"size": "5496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hidefedora/src/content.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1099"
},
{
"name": "HTML",
"bytes": "4743"
},
{
"name": "JavaScript",
"bytes": "10666"
}
],
"symlink_target": ""
} |
import * as React from 'react';
import PropTypes from 'prop-types';
import clsx from 'clsx';
import { styled, Theme, useThemeProps } from '@mui/material/styles';
import {
unstable_composeClasses as composeClasses,
generateUtilityClass,
generateUtilityClasses,
} from '@mui/base';
import MonthPicker from '../MonthPicker/MonthPicker';
import { useCalendarState } from './useCalendarState';
import { useDefaultDates, useUtils } from '../internal/pickers/hooks/useUtils';
import FadeTransitionGroup from './PickersFadeTransitionGroup';
import PickersCalendar, { ExportedCalendarProps } from './PickersCalendar';
import { PickerOnChangeFn, useViews } from '../internal/pickers/hooks/useViews';
import PickersCalendarHeader, { ExportedCalendarHeaderProps } from './PickersCalendarHeader';
import YearPicker, { ExportedYearPickerProps } from '../YearPicker/YearPicker';
import { findClosestEnabledDate } from '../internal/pickers/date-utils';
import { CalendarPickerView } from './shared';
import PickerView from '../internal/pickers/Picker/PickerView';
export interface CalendarPickerClasses {
/** Styles applied to the root element. */
root: string;
/** Styles applied to the transition group element. */
viewTransitionContainer: string;
}
export type CalendarPickerClassKey = keyof CalendarPickerClasses;
export interface CalendarPickerProps<TDate>
extends ExportedCalendarProps<TDate>,
ExportedYearPickerProps<TDate>,
ExportedCalendarHeaderProps<TDate> {
className?: string;
date: TDate | null;
/**
* Default calendar month displayed when `value={null}`.
*/
defaultCalendarMonth?: TDate;
/**
* If `true`, the picker and text field are disabled.
* @default false
*/
disabled?: boolean;
/**
* @default false
*/
disableFuture?: boolean;
/**
* @default false
*/
disablePast?: boolean;
/**
* Max selectable date. @DateIOType
*/
maxDate?: TDate;
/**
* Min selectable date. @DateIOType
*/
minDate?: TDate;
/**
* Callback fired on view change.
*/
onViewChange?: (view: CalendarPickerView) => void;
/**
* Callback fired on date change
*/
onChange: PickerOnChangeFn<TDate>;
/**
* Callback firing on month change. @DateIOType
*/
onMonthChange?: (date: TDate) => void;
/**
* Initially open view.
* @default 'day'
*/
openTo?: CalendarPickerView;
/**
* Make picker read only.
* @default false
*/
readOnly?: boolean;
/**
* Disable heavy animations.
* @default typeof navigator !== 'undefined' && /(android)/i.test(navigator.userAgent)
*/
reduceAnimations?: boolean;
/**
* Component displaying when passed `loading` true.
* @default () => <span data-mui-test="loading-progress">...</span>
*/
renderLoading?: () => React.ReactNode;
/**
* Disable specific date. @DateIOType
*/
shouldDisableDate?: (day: TDate) => boolean;
/**
* Controlled open view.
*/
view?: CalendarPickerView;
/**
* Views for calendar picker.
* @default ['year', 'day']
*/
views?: readonly CalendarPickerView[];
}
export type ExportedCalendarPickerProps<TDate> = Omit<
CalendarPickerProps<TDate>,
| 'date'
| 'view'
| 'views'
| 'openTo'
| 'onChange'
| 'changeView'
| 'slideDirection'
| 'currentMonth'
| 'className'
>;
interface CalendarPickerPropsWithClasses<TDate> extends CalendarPickerProps<TDate> {
classes?: Partial<CalendarPickerClasses>;
}
export function getCalendarPickerUtilityClass(slot: string) {
return generateUtilityClass('MuiCalendarPicker', slot);
}
export const calendarPickerClasses: CalendarPickerClasses = generateUtilityClasses(
'MuiCalendarPicker',
['root', 'viewTransitionContainer'],
);
const useUtilityClasses = (
ownerState: CalendarPickerProps<any> & { classes?: Partial<CalendarPickerClasses> },
) => {
const { classes } = ownerState;
const slots = {
root: ['root'],
viewTransitionContainer: ['viewTransitionContainer'],
};
return composeClasses(slots, getCalendarPickerUtilityClass, classes);
};
const CalendarPickerRoot = styled(PickerView, {
name: 'MuiCalendarPicker',
slot: 'Root',
overridesResolver: (props, styles) => styles.root,
})<{ ownerState: CalendarPickerProps<any> }>({
display: 'flex',
flexDirection: 'column',
});
const CalendarPickerViewTransitionContainer = styled(FadeTransitionGroup, {
name: 'MuiCalendarPicker',
slot: 'ViewTransitionContainer',
overridesResolver: (props, styles) => styles.viewTransitionContainer,
})<{ ownerState: CalendarPickerProps<any> }>({
overflowY: 'auto',
});
export const defaultReduceAnimations =
typeof navigator !== 'undefined' && /(android)/i.test(navigator.userAgent);
const CalendarPicker = React.forwardRef(function CalendarPicker<TDate extends any>(
inProps: CalendarPickerPropsWithClasses<TDate>,
ref: React.Ref<HTMLDivElement>,
) {
const props = useThemeProps<Theme, CalendarPickerProps<TDate>, 'MuiCalendarPicker'>({
props: inProps,
name: 'MuiCalendarPicker',
});
const {
autoFocus,
onViewChange,
date,
disableFuture = false,
disablePast = false,
defaultCalendarMonth,
loading = false,
maxDate: maxDateProp,
minDate: minDateProp,
onChange,
onMonthChange,
reduceAnimations = defaultReduceAnimations,
renderLoading = () => <span data-mui-test="loading-progress">...</span>,
shouldDisableDate,
shouldDisableYear,
view,
views = ['year', 'day'],
openTo = 'day',
className,
...other
} = props;
const utils = useUtils<TDate>();
const defaultDates = useDefaultDates<TDate>();
const minDate = minDateProp ?? defaultDates.minDate;
const maxDate = maxDateProp ?? defaultDates.maxDate;
const { openView, setOpenView } = useViews({
view,
views,
openTo,
onChange,
onViewChange,
});
const {
calendarState,
changeFocusedDay,
changeMonth,
isDateDisabled,
handleChangeMonth,
onMonthSwitchingAnimationEnd,
} = useCalendarState({
date,
defaultCalendarMonth,
reduceAnimations,
onMonthChange,
minDate,
maxDate,
shouldDisableDate,
disablePast,
disableFuture,
});
React.useEffect(() => {
if (date && isDateDisabled(date)) {
const closestEnabledDate = findClosestEnabledDate<TDate>({
utils,
date,
minDate,
maxDate,
disablePast,
disableFuture,
shouldDisableDate: isDateDisabled,
});
onChange(closestEnabledDate, 'partial');
}
// This call is too expensive to run it on each prop change.
// So just ensure that we are not rendering disabled as selected on mount.
}, []); // eslint-disable-line
React.useEffect(() => {
if (date) {
changeMonth(date);
}
}, [date]); // eslint-disable-line
const ownerState = props;
const classes = useUtilityClasses(ownerState);
const monthPickerProps = {
className,
date,
disabled: other.disabled,
disablePast,
disableFuture,
onChange,
minDate,
maxDate,
onMonthChange,
readOnly: other.readOnly,
};
return (
<CalendarPickerRoot ref={ref} className={clsx(classes.root, className)} ownerState={ownerState}>
<PickersCalendarHeader
{...other}
views={views}
openView={openView}
currentMonth={calendarState.currentMonth}
onViewChange={setOpenView}
onMonthChange={(newMonth, direction) => handleChangeMonth({ newMonth, direction })}
minDate={minDate}
maxDate={maxDate}
disablePast={disablePast}
disableFuture={disableFuture}
reduceAnimations={reduceAnimations}
/>
<CalendarPickerViewTransitionContainer
reduceAnimations={reduceAnimations}
className={classes.viewTransitionContainer}
transKey={openView}
ownerState={ownerState}
>
<div>
{openView === 'year' && (
<YearPicker
{...other}
autoFocus={autoFocus}
date={date}
onChange={onChange}
minDate={minDate}
maxDate={maxDate}
disableFuture={disableFuture}
disablePast={disablePast}
isDateDisabled={isDateDisabled}
shouldDisableYear={shouldDisableYear}
onFocusedDayChange={changeFocusedDay}
/>
)}
{openView === 'month' && <MonthPicker {...monthPickerProps} />}
{openView === 'day' && (
<PickersCalendar
{...other}
{...calendarState}
autoFocus={autoFocus}
onMonthSwitchingAnimationEnd={onMonthSwitchingAnimationEnd}
onFocusedDayChange={changeFocusedDay}
reduceAnimations={reduceAnimations}
date={date}
onChange={onChange}
isDateDisabled={isDateDisabled}
loading={loading}
renderLoading={renderLoading}
/>
)}
</div>
</CalendarPickerViewTransitionContainer>
</CalendarPickerRoot>
);
});
CalendarPicker.propTypes /* remove-proptypes */ = {
// ----------------------------- Warning --------------------------------
// | These PropTypes are generated from the TypeScript type definitions |
// | To update them edit TypeScript types and run "yarn proptypes" |
// ----------------------------------------------------------------------
/**
* @ignore
*/
autoFocus: PropTypes.bool,
/**
* @ignore
*/
classes: PropTypes.object,
/**
* @ignore
*/
className: PropTypes.string,
/**
* @ignore
*/
date: PropTypes.any,
/**
* Default calendar month displayed when `value={null}`.
*/
defaultCalendarMonth: PropTypes.any,
/**
* If `true`, the picker and text field are disabled.
* @default false
*/
disabled: PropTypes.bool,
/**
* @default false
*/
disableFuture: PropTypes.bool,
/**
* @default false
*/
disablePast: PropTypes.bool,
/**
* If `true` renders `LoadingComponent` in calendar instead of calendar view.
* Can be used to preload information and show it in calendar.
* @default false
*/
loading: PropTypes.bool,
/**
* Max selectable date. @DateIOType
*/
maxDate: PropTypes.any,
/**
* Min selectable date. @DateIOType
*/
minDate: PropTypes.any,
/**
* Callback fired on date change
*/
onChange: PropTypes.func.isRequired,
/**
* Callback firing on month change. @DateIOType
*/
onMonthChange: PropTypes.func,
/**
* Callback fired on view change.
*/
onViewChange: PropTypes.func,
/**
* Initially open view.
* @default 'day'
*/
openTo: PropTypes.oneOf(['day', 'month', 'year']),
/**
* Make picker read only.
* @default false
*/
readOnly: PropTypes.bool,
/**
* Disable heavy animations.
* @default typeof navigator !== 'undefined' && /(android)/i.test(navigator.userAgent)
*/
reduceAnimations: PropTypes.bool,
/**
* Component displaying when passed `loading` true.
* @default () => <span data-mui-test="loading-progress">...</span>
*/
renderLoading: PropTypes.func,
/**
* Disable specific date. @DateIOType
*/
shouldDisableDate: PropTypes.func,
/**
* Disable specific years dynamically.
* Works like `shouldDisableDate` but for year selection view @DateIOType.
*/
shouldDisableYear: PropTypes.func,
/**
* Controlled open view.
*/
view: PropTypes.oneOf(['day', 'month', 'year']),
/**
* Views for calendar picker.
* @default ['year', 'day']
*/
views: PropTypes.arrayOf(PropTypes.oneOf(['day', 'month', 'year']).isRequired),
} as any;
/**
*
* Demos:
*
* - [Date Picker](https://mui.com/components/date-picker/)
*
* API:
*
* - [CalendarPicker API](https://mui.com/api/calendar-picker/)
*/
export default CalendarPicker as <TDate>(
props: CalendarPickerPropsWithClasses<TDate> & React.RefAttributes<HTMLDivElement>,
) => JSX.Element;
| {
"content_hash": "69bb23170bf3d65e91dc2bb742acc9c5",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 100,
"avg_line_length": 26.7260579064588,
"alnum_prop": 0.6445,
"repo_name": "oliviertassinari/material-ui",
"id": "1bf48739fd1d4bcc913e24a4337dcb4073e831d5",
"size": "12000",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/mui-lab/src/CalendarPicker/CalendarPicker.tsx",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2126"
},
{
"name": "JavaScript",
"bytes": "3779365"
},
{
"name": "TypeScript",
"bytes": "2514535"
}
],
"symlink_target": ""
} |
layout: news_item
title: 'Frida 1.2.1 Released'
date: 2014-04-21 16:00:00 +0100
author: oleavr
version: 1.2.1
categories: [release]
---
Had some fun tracing Apple's crypto APIs, which lead to the discovery of
a few bugs. So here's 1.2.1 bringing some critical ARM-related bugfixes:
- ARM32: Fix crashes caused by register clobber issue in V8 on ARM32 due to
an ABI difference regarding `r9` in Apple's ABI compared to AAPCS.
- ARM32: Fix ARM32/Thumb relocator branch rewriting for immediate same-mode
branches.
- ARM64: Improve ARM64 relocator to support rewriting `b` and `bl`.
| {
"content_hash": "37fe04073ea8aa7d1c1f5cf1069634b0",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 77,
"avg_line_length": 37.1875,
"alnum_prop": 0.7411764705882353,
"repo_name": "frida/frida-website",
"id": "1e7c26ab924ca6544b96c9fd96a806be73847fcf",
"size": "599",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "_i18n/en/_posts/2014-04-21-frida-1-2-1-released.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22723"
},
{
"name": "HTML",
"bytes": "18097"
},
{
"name": "Python",
"bytes": "1487"
},
{
"name": "Ruby",
"bytes": "255"
}
],
"symlink_target": ""
} |
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving, RankNTypes,
DeriveDataTypeable #-}
-- | Internal functions intended only to be used by this package. API stability
-- not guaranteed!
module Qualys.Internal
(
QualysT (..)
, QID (..)
, liftReader
-- * Logging
, QLogLevel (..)
, Logger
-- * Configuration
, QualysConf (..)
, QualysPlatform (..)
, qualysUSPlatform1
, qualysUSPlatform2
, qualysEUPlatform
, qualysPrivateCloudPlatform
, retryStrat
-- * Session data
, QualysSess (..)
, qualysHeaders
, qualTimeout
, qualUser
, qualPass
, qualPlatform
, qualLogger
, qualRetries
-- * Convenience functions for XML processing
, parseBool
, parseUInt
, parseBound
, parseSev
, parseDate
, requireTagNoAttr
, requireWith
, optionalWith
, parseDiscard
)where
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.Catch
import Control.Retry
import qualified Data.ByteString as B
import Data.Conduit (ConduitM)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Read as T
import Data.Time.Clock (UTCTime)
import Data.Time.Format (parseTime)
import Data.Typeable
import Data.XML.Types
import Network.HTTP.Client
import Network.HTTP.Types.Header (HeaderName)
import System.Locale (defaultTimeLocale)
import Text.XML.Stream.Parse
-- | Qualys transformer, which carries around a session and returns an @a@.
newtype QualysT m a = QualysT { unQualysT :: ReaderT QualysSess m a}
deriving (Functor, Applicative, Monad)
instance MonadIO m => MonadIO (QualysT m) where
liftIO = QualysT . liftIO
instance MonadThrow m => MonadThrow (QualysT m) where
throwM = lift . throwM
instance MonadCatch m => MonadCatch (QualysT m) where
catch (QualysT x) handler = QualysT $ catch x (unQualysT . handler)
instance MonadMask m => MonadMask (QualysT m) where
mask x = QualysT $ mask $ \y -> unQualysT $ x (QualysT . y . unQualysT)
uninterruptibleMask x = QualysT $ uninterruptibleMask $ \y -> unQualysT $ x (QualysT . y . unQualysT)
instance MonadTrans QualysT where
lift = QualysT . lift
newtype QID = QID { unQID :: Int } deriving (Show, Eq, Ord, Typeable)
-- | Lifts an action that works on @ReaderT@ to one that works on
-- @QualysT@.
liftReader :: Monad m => ReaderT QualysSess m a -> QualysT m a
liftReader = QualysT
type Logger m = QLogLevel -> Text -> m ()
data QLogLevel = QLogDebug
| QLogWarn
| QLogError
deriving (Eq, Ord, Show)
-- | Qualys Configuration
data QualysConf = QualysConf
{ qcPlatform :: QualysPlatform -- ^ Qualys Platform to use
, qcUsername :: B.ByteString -- ^ Qualys Username
, qcPassword :: B.ByteString -- ^ Qualys Password
, qcTimeOut :: Int -- ^ Timeout (in seconds)
, qcRetries :: Int -- ^ Number of retries
, qcLogger :: forall m.(MonadIO m) => Logger m -- ^ Function to log
} -- deriving Show
-- | Qualys platform to send requests to.
newtype QualysPlatform = QualysPlatform { unQualysPlatform :: String }
deriving Show
-- | Qualys US Platform 1
qualysUSPlatform1 :: QualysPlatform
qualysUSPlatform1 = QualysPlatform "qualysapi.qualys.com"
-- | Qualys US Platform 2
qualysUSPlatform2 :: QualysPlatform
qualysUSPlatform2 = QualysPlatform "qualysapi.qg2.apps.qualys.com"
-- | Qualys EU Platform
qualysEUPlatform :: QualysPlatform
qualysEUPlatform = QualysPlatform "qualysapi.qualys.eu"
-- | Qualys Private Cloud Platform. Given a string, this function will
-- create a @QualysPlatform@ with "qualysapi." prepended.
qualysPrivateCloudPlatform :: String -> QualysPlatform
qualysPrivateCloudPlatform = QualysPlatform . (<>) "qualysapi."
-- | Required Qualys headers
qualysHeaders :: [(HeaderName, B.ByteString)]
qualysHeaders = [ ("User-Agent", "Qualys/Haskell" )
, ("X-Requested-With", "Irrelevant" )
, ("Content-Type", "text/xml")
]
-- | Qualys session. Contains configuation and @Manager@.
data QualysSess = QualysSess
{ qConf :: QualysConf -- ^ Qualys Configuration
, qManager :: Manager -- ^ HTTP Manager
}
-- | Reach into a @QualysSess@ and grab the timeout
qualTimeout :: QualysSess -> Int
qualTimeout = qcTimeOut . qConf
-- | Get the username from a @QualysSess@.
qualUser :: QualysSess -> B.ByteString
qualUser = qcUsername . qConf
-- | Get the password from a @QualysSess@.
qualPass :: QualysSess -> B.ByteString
qualPass = qcPassword . qConf
-- | Grab the platform from a @QualysSess@.
qualPlatform:: QualysSess -> String
qualPlatform = unQualysPlatform . qcPlatform . qConf
-- | Grab the logger from a @QualysSess@.
qualLogger :: (MonadIO m) => QualysSess -> Logger m
qualLogger = qcLogger . qConf
-- | Get the number of retries from a @QualysSess@.
qualRetries :: QualysSess -> Int
qualRetries = qcRetries . qConf
-- | Retry Strategy - 5 seconds with exponential backoff
retryStrat :: Monad m => QualysSess -> RetryPolicyM m
retryStrat sess = exponentialBackoff 5000000 <> limitRetries (qualRetries sess)
-- | Parse a text into a Bool.
parseBool :: Text -> Maybe Bool
parseBool "0" = Just False
parseBool "false" = Just False
parseBool "1" = Just True
parseBool "true" = Just True
parseBool _ = Nothing
-- | Parse text into an unsigned integral.
parseUInt :: Integral a => Text -> Maybe a
parseUInt x = case T.decimal x of
Right (n,"") -> Just n
_ -> Nothing
-- | Parse text to an int bound by a min and max.
parseBound :: Integral a => a -> a -> Text -> Maybe a
parseBound mn mx x = check =<< parseUInt x
where
check n
| n >= mn && n <= mx = Just n
| otherwise = Nothing
-- | Parse a text value into a severity, enforcing the correct range.
parseSev :: Integral a => Text -> Maybe a
parseSev = parseBound 0 5
-- | Parse a text value into UTCTime
parseDate :: Text -> Maybe UTCTime
parseDate = parseTime defaultTimeLocale "%FT%T%QZ" . T.unpack
-- | Require a tag with no attributes set.
requireTagNoAttr :: (MonadThrow m) => Name -> ConduitM Event o m a ->
ConduitM Event o m a
requireTagNoAttr x = force err . tagNoAttr x
where
err = "Tag '" <> show x <> "' required!"
-- | Required value with a parsing function.
requireWith :: Show a => (a -> Maybe b) -> ConduitM Event o m (Maybe a) ->
ConduitM Event o m b
requireWith p i = do
x <- i
case join (fmap p x) of
Nothing -> fail $ "Bad value '" <> show x <> "'"
Just y -> return y
-- | Optional value with a parsing function.
optionalWith :: MonadThrow m => (a -> Maybe b) ->
ConduitM Event o m (Maybe a) -> ConduitM Event o m (Maybe b)
optionalWith f v = do
x <- v
return $ f =<< x
-- | Convenience function for ignoring an element and content
parseDiscard :: (MonadThrow m) => Name -> ConduitM Event o m ()
parseDiscard x = void $ tagNoAttr x contentMaybe
| {
"content_hash": "07259063a4950e9342680ba8d0b78572",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 105,
"avg_line_length": 32.734234234234236,
"alnum_prop": 0.6503371404981423,
"repo_name": "ahodgen/qualys",
"id": "af952c5d9c96708607fb86ebc93002d210107c35",
"size": "7267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Qualys/Internal.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Haskell",
"bytes": "110325"
}
],
"symlink_target": ""
} |
'use strict';
/**
* Determines whether one string may be found within another string, returning true or false as appropriate.
*
* @category String
* @param {String} subjectString The string to process.
* @param {String} searchString A string to be searched for within the original string.
* @param {Number} [position=0] The position in the original string at which to begin searching for searchString; defaults to 0.
* @throws {TypeError} If `subjectString` is not string.
* @return {Boolean} True if searchString is found within subjectString, else false.
* @example
*
* contains(str, 'To be');
* // -> true
*
* contains(str, 'question');
* // -> true
*
* contains(str, 'nonexistent');
* // -> false
*
* contains(str, 'To be', 1);
* // -> false
*
* contains(str, 'TO BE');
* // -> false
*/
function contains(subjectString, searchString, position) {
var subjectLength, searchLength;
if (typeof subjectString !== 'string') {
throw new TypeError('Expected a string for first argument');
}
if (String.prototype.includes) {
return subjectString.includes(searchString, position);
}
if (searchString == null) {
return false;
}
subjectString = subjectString.toString();
searchString = searchString.toString();
position = parseInt(position, 10) || 0;
subjectLength = subjectString.length;
searchLength = searchString.length;
return position + searchLength <= subjectLength
? subjectString.indexOf(searchString, position) !== -1
: false;
}
module.exports = contains; | {
"content_hash": "b27dec24d89ad86cbe5f98fed9268a50",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 128,
"avg_line_length": 27.428571428571427,
"alnum_prop": 0.6888020833333334,
"repo_name": "georapbox/jsEssentials",
"id": "b6d9d74f87d403860bbd35d7df6bae445f5941eb",
"size": "1536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/strings/contains/contains.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "280961"
}
],
"symlink_target": ""
} |
<p>Users confirm they have to wait prolonged times to get verified because of the amount of new people getting registered.</p>
<blockquote>As of April 2017, Gemini is experiencing extremely high application volume so please allow additional time for the compliance team to complete its review.</blockquote>
<p><a href="https://gemini24.zendesk.com/hc/en-us/articles/206522496-How-long-does-it-take-for-my-identity-to-be-verified-">Gemini support page</a></p>
| {
"content_hash": "30da24428cb7b74e703c274cec513a4f",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 179,
"avg_line_length": 92.2,
"alnum_prop": 0.7874186550976139,
"repo_name": "AT-Publishing/editors",
"id": "3f3f9ab080d9666f30cb581fbffe368e82979dc5",
"size": "461",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_news_docroot/feed/newsblade/to-the-btc-rise-discussion.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "40601"
},
{
"name": "JavaScript",
"bytes": "1019642"
},
{
"name": "Ruby",
"bytes": "1675"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Coq bench</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../../..">Unstable</a></li>
<li><a href=".">8.4.5 / contrib:descente-infinie dev</a></li>
<li class="active"><a href="">2015-02-02 21:24:19</a></li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li><a href="../../../../../about.html">About</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href=".">« Up</a>
<h1>
contrib:descente-infinie
<small>
dev
<span class="label label-info">Not compatible with this Coq</span>
</small>
</h1>
<p><em><script>document.write(moment("2015-02-02 21:24:19 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2015-02-02 21:24:19 UTC)</em><p>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>ruby lint.rb unstable ../unstable/packages/coq:contrib:descente-infinie/coq:contrib:descente-infinie.dev</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Dry install</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --dry-run coq:contrib:descente-infinie.dev coq.8.4.5</code></dd>
<dt>Return code</dt>
<dd>768</dd>
<dt>Duration</dt>
<dd>0 s</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.4.5).
The following dependencies couldn't be met:
- coq:contrib:descente-infinie -> coq >= 8.5beta1
Your request can't be satisfied:
- Conflicting version constraints for coq
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq, to test if the problem was incompatibility with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --dry-run coq:contrib:descente-infinie.dev</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>4 s</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
<small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small>
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html> | {
"content_hash": "b4f056c1d567dc73aac89b00ef3537d6",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 157,
"avg_line_length": 40.319148936170215,
"alnum_prop": 0.4905892700087951,
"repo_name": "coq-bench/coq-bench.github.io-old",
"id": "5d7b0f969ae623932d796937ecbd5bcc662b45fd",
"size": "5687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.02.1-1.2.0/unstable/8.4.5/contrib:descente-infinie/dev/2015-02-02_21-24-19.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<?php
namespace lo\core\actions\crud;
use kartik\grid\EditableColumnAction;
use Yii;
use yii\web\Response;
/**
* Class XEditable
* Класс действия обновления модели через расширение XEditable
* @package lo\core\actions\crud
* @author Lukyanov Andrey <loveorigami@mail.ru>
*/
class KEditable extends EditableColumnAction
{
/**
* @inheritdoc
*/
public function run()
{
$out = $this->validateEditable();
return Yii::createObject(['class' => Response::class, 'format' => Response::FORMAT_JSON, 'data' => $out]);
}
} | {
"content_hash": "ce22627b67feda3e18dd6f90cab6cbcf",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 114,
"avg_line_length": 22.4,
"alnum_prop": 0.6678571428571428,
"repo_name": "loveorigami/lo-core",
"id": "47d940bcb4a1bf3c66b5f87b763c8bd622625489",
"size": "604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/actions/crud/KEditable.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "35512"
},
{
"name": "JavaScript",
"bytes": "7204"
},
{
"name": "PHP",
"bytes": "929142"
},
{
"name": "Smarty",
"bytes": "200"
}
],
"symlink_target": ""
} |
package com.amazon.ask.servlet.verifiers;
/**
* Verifiers are run against incoming requests to verify authenticity and integrity of the request before processing
* it.
*/
public interface SkillServletVerifier {
/**
* Verifies an incoming request.
*
* @param alexaHttpRequest formed from the servlet request,
* the request envelope, in serialized form and
* the request envelope, in deserialized form
* @throws SecurityException if verification fails.
*/
void verify(AlexaHttpRequest alexaHttpRequest) throws SecurityException;
}
| {
"content_hash": "532dde289c503e22356d3638301f6166",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 116,
"avg_line_length": 29.904761904761905,
"alnum_prop": 0.6735668789808917,
"repo_name": "amzn/alexa-skills-kit-java",
"id": "42c1b03b53f1f0849f06307e9be5e20b62d6c0b3",
"size": "1206",
"binary": false,
"copies": "1",
"ref": "refs/heads/2.0.x",
"path": "ask-sdk-servlet-support/src/com/amazon/ask/servlet/verifiers/SkillServletVerifier.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "730603"
}
],
"symlink_target": ""
} |
<!DOCTYPE html><html lang="en" xmlns:db="urn:newtelligence-com:dasblog:runtime:data" xmlns:msxsl="urn:schemas-microsoft-com:xslt" xmlns:ploeh="urn:dk:ploeh">
<head>
<META http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Content moved</title>
<meta name="author" content="Mark Seemann">
<meta http-equiv="refresh" content="0;/2009/03/05/ConstrainedNon-Determinism"><script language="javascript">
window.location.href = "/2009/03/05/ConstrainedNon-Determinism"
</script><link rel="canonical" href="/2009/03/05/ConstrainedNon-Determinism">
</head>
<body>
This page has moved to <a href="/2009/03/05/ConstrainedNon-Determinism">/2009/03/05/ConstrainedNon-Determinism</a>.
</body>
</html> | {
"content_hash": "464c177c8e1344e07dca70b5e6ec6cec",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 157,
"avg_line_length": 57.15384615384615,
"alnum_prop": 0.7133243606998654,
"repo_name": "ploeh/ploeh.github.com",
"id": "2d3d6238e053b73782a45e6c0ddf695238f94599",
"size": "743",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2009/03/05/ConstrainedNonDeterminism.aspx/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5373"
},
{
"name": "HTML",
"bytes": "12023574"
},
{
"name": "Ruby",
"bytes": "11690"
}
],
"symlink_target": ""
} |
namespace Vidly.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class AddRequiredAnnotationToMembershipTypeName : DbMigration
{
public override void Up()
{
AlterColumn("dbo.MembershipTypes", "Name", c => c.String(nullable: false, maxLength: 255));
}
public override void Down()
{
AlterColumn("dbo.MembershipTypes", "Name", c => c.String(maxLength: 255));
}
}
}
| {
"content_hash": "d356a151d88bc9f327a0228fc53610d7",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 103,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.5963855421686747,
"repo_name": "imranaskem/Vidly",
"id": "8eb86759b49116dbac0a17cfefbab2cb4bb90375",
"size": "498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Vidly/Migrations/201701101407412_AddRequiredAnnotationToMembershipTypeName.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "96"
},
{
"name": "C#",
"bytes": "132804"
},
{
"name": "CSS",
"bytes": "166408"
},
{
"name": "HTML",
"bytes": "5269"
},
{
"name": "JavaScript",
"bytes": "1642751"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "696417cc9ba23ddd8808c565bd341103",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "d0a69d605eab64cc62aad3b85bcdd196d8204827",
"size": "196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Brickellia palmeri/ Syn. Brickellia palmeri typica/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace RegPointApi.ClassicAsp
{
public class AddAnswerResponse
{
public bool WasSuccessful { get; set; }
public string ErrorMessage { get; set; }
public string OriginalContent { get; set; }
public string RequestUrl { get; set; }
public AddAnswerData Data { get; set; }
public AddAnswerResponse() { }
public AddAnswerResponse(Response<AddData<Answer>> values)
{
WasSuccessful = values.WasSuccessful;
OriginalContent = values.OriginalContent;
RequestUrl = values.RequestUrl;
if (WasSuccessful)
{
Data = new AddAnswerData()
{
id = values.Data.id,
status = values.Data.status,
data_submitted = values.Data.data_submitted,
data_saved = values.Data.data_saved,
};
}
else
{
ErrorMessage = values.Exception.GetMessage();
}
}
}
public class AddAnswerData
{
public string status { get; set; }
public int id { get; set; }
public Answer data_submitted { get; set; }
public Answer data_saved { get; set; }
}
public class EditAnswerResponse
{
public bool WasSuccessful { get; set; }
public string ErrorMessage { get; set; }
public string OriginalContent { get; set; }
public string RequestUrl { get; set; }
public EditAnswerData Data { get; set; }
public EditAnswerResponse() { }
public EditAnswerResponse(Response<EditData<Answer>> values)
{
WasSuccessful = values.WasSuccessful;
OriginalContent = values.OriginalContent;
RequestUrl = values.RequestUrl;
if (WasSuccessful)
{
Data = new EditAnswerData()
{
message = values.Data.message,
status = values.Data.status,
data = values.Data.data,
};
}
else
{
ErrorMessage = values.Exception.GetMessage();
}
}
}
public class EditAnswerData
{
public string status { get; set; }
public string message { get; set; }
public Answer data { get; set; }
}
public class ViewAnswersResponse
{
public bool WasSuccessful { get; set; }
public string ErrorMessage { get; set; }
public string OriginalContent { get; set; }
public string RequestUrl { get; set; }
public ViewAnswersData Data { get; set; }
public ViewAnswersResponse() { }
public ViewAnswersResponse(Response<ViewAnswersData> values)
{
WasSuccessful = values.WasSuccessful;
OriginalContent = values.OriginalContent;
RequestUrl = values.RequestUrl;
if (WasSuccessful)
{
Data = values.Data;
}
else
{
ErrorMessage = values.Exception.GetMessage();
}
}
}
}
| {
"content_hash": "4fa853e5b2f3a5aa5ef2b11f16bc4468",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 68,
"avg_line_length": 30.18867924528302,
"alnum_prop": 0.5259375,
"repo_name": "NickLarsen/RegPointApi.NET",
"id": "c10e7e2d1c903c8dbcc68a10829e278f957fc2be",
"size": "3202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RegPointApi.ClassicAsp/AnswerResponses.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "114462"
}
],
"symlink_target": ""
} |
try:
import sys
reload(sys).setdefaultencoding("UTF-8")
except:
pass
try:
from setuptools import setup, find_packages
except ImportError:
print('Please install or upgrade setuptools or pip to continue')
sys.exit(1)
import codecs
def read(filename):
return codecs.open(filename, encoding='utf-8').read()
long_description = '\n\n'.join([read('README.rst'),
read('AUTHORS'),
read('CHANGES')])
__doc__ = long_description
setup(name='eapii',
description='Easy Python instrument interfacing',
version='0.1.0dev',
long_description=long_description,
author='Matthieu Dartiailh',
author_email='m.dartiailh@gmail.com',
url='https://github.com/MatthieuDartiailh/pyvisa',
requires=['future', 'pint'],
install_requires=['future', 'pint'],
keywords='instrument interfacing measurement',
license='Modified BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
packages=find_packages(),
platforms="Linux, Windows, Mac",
use_2to3=False,
zip_safe=False)
| {
"content_hash": "a827bcd7be30cd80a8f08ce1c808891e",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 84,
"avg_line_length": 31.719298245614034,
"alnum_prop": 0.599004424778761,
"repo_name": "MatthieuDartiailh/eapii",
"id": "3320a228633cee5e966ebd00b7afa3a69e5866b4",
"size": "2217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "176627"
}
],
"symlink_target": ""
} |
package help
import (
"bufio"
"fmt"
"io"
"os"
"strings"
"text/template"
"unicode"
"unicode/utf8"
"cmd/go/internal/base"
"cmd/go/internal/modload"
)
// Help implements the 'help' command.
func Help(w io.Writer, args []string) {
// 'go help documentation' generates doc.go.
if len(args) == 1 && args[0] == "documentation" {
fmt.Fprintln(w, "// Copyright 2011 The Go Authors. All rights reserved.")
fmt.Fprintln(w, "// Use of this source code is governed by a BSD-style")
fmt.Fprintln(w, "// license that can be found in the LICENSE file.")
fmt.Fprintln(w)
fmt.Fprintln(w, "// Code generated by mkalldocs.sh; DO NOT EDIT.")
fmt.Fprintln(w, "// Edit the documentation in other files and rerun mkalldocs.sh to generate this one.")
fmt.Fprintln(w)
buf := new(strings.Builder)
PrintUsage(buf, base.Go)
usage := &base.Command{Long: buf.String()}
cmds := []*base.Command{usage}
for _, cmd := range base.Go.Commands {
// Avoid duplication of the "get" documentation.
if cmd.UsageLine == "module-get" && modload.Enabled() {
continue
} else if cmd.UsageLine == "gopath-get" && !modload.Enabled() {
continue
}
cmds = append(cmds, cmd)
cmds = append(cmds, cmd.Commands...)
}
tmpl(&commentWriter{W: w}, documentationTemplate, cmds)
fmt.Fprintln(w, "package main")
return
}
cmd := base.Go
Args:
for i, arg := range args {
for _, sub := range cmd.Commands {
if sub.Name() == arg {
cmd = sub
continue Args
}
}
// helpSuccess is the help command using as many args as possible that would succeed.
helpSuccess := "go help"
if i > 0 {
helpSuccess += " " + strings.Join(args[:i], " ")
}
fmt.Fprintf(os.Stderr, "go help %s: unknown help topic. Run '%s'.\n", strings.Join(args, " "), helpSuccess)
base.SetExitStatus(2) // failed at 'go help cmd'
base.Exit()
}
if len(cmd.Commands) > 0 {
PrintUsage(os.Stdout, cmd)
} else {
tmpl(os.Stdout, helpTemplate, cmd)
}
// not exit 2: succeeded at 'go help cmd'.
return
}
var usageTemplate = `{{.Long | trim}}
Usage:
{{.UsageLine}} <command> [arguments]
The commands are:
{{range .Commands}}{{if or (.Runnable) .Commands}}
{{.Name | printf "%-11s"}} {{.Short}}{{end}}{{end}}
Use "go help{{with .LongName}} {{.}}{{end}} <command>" for more information about a command.
{{if eq (.UsageLine) "go"}}
Additional help topics:
{{range .Commands}}{{if and (not .Runnable) (not .Commands)}}
{{.Name | printf "%-15s"}} {{.Short}}{{end}}{{end}}
Use "go help{{with .LongName}} {{.}}{{end}} <topic>" for more information about that topic.
{{end}}
`
var helpTemplate = `{{if .Runnable}}usage: {{.UsageLine}}
{{end}}{{.Long | trim}}
`
var documentationTemplate = `{{range .}}{{if .Short}}{{.Short | capitalize}}
{{end}}{{if .Commands}}` + usageTemplate + `{{else}}{{if .Runnable}}Usage:
{{.UsageLine}}
{{end}}{{.Long | trim}}
{{end}}{{end}}`
// commentWriter writes a Go comment to the underlying io.Writer,
// using line comment form (//).
type commentWriter struct {
W io.Writer
wroteSlashes bool // Wrote "//" at the beginning of the current line.
}
func (c *commentWriter) Write(p []byte) (int, error) {
var n int
for i, b := range p {
if !c.wroteSlashes {
s := "//"
if b != '\n' {
s = "// "
}
if _, err := io.WriteString(c.W, s); err != nil {
return n, err
}
c.wroteSlashes = true
}
n0, err := c.W.Write(p[i : i+1])
n += n0
if err != nil {
return n, err
}
if b == '\n' {
c.wroteSlashes = false
}
}
return len(p), nil
}
// An errWriter wraps a writer, recording whether a write error occurred.
type errWriter struct {
w io.Writer
err error
}
func (w *errWriter) Write(b []byte) (int, error) {
n, err := w.w.Write(b)
if err != nil {
w.err = err
}
return n, err
}
// tmpl executes the given template text on data, writing the result to w.
func tmpl(w io.Writer, text string, data any) {
t := template.New("top")
t.Funcs(template.FuncMap{"trim": strings.TrimSpace, "capitalize": capitalize})
template.Must(t.Parse(text))
ew := &errWriter{w: w}
err := t.Execute(ew, data)
if ew.err != nil {
// I/O error writing. Ignore write on closed pipe.
if strings.Contains(ew.err.Error(), "pipe") {
base.SetExitStatus(1)
base.Exit()
}
base.Fatalf("writing output: %v", ew.err)
}
if err != nil {
panic(err)
}
}
func capitalize(s string) string {
if s == "" {
return s
}
r, n := utf8.DecodeRuneInString(s)
return string(unicode.ToTitle(r)) + s[n:]
}
func PrintUsage(w io.Writer, cmd *base.Command) {
bw := bufio.NewWriter(w)
tmpl(bw, usageTemplate, cmd)
bw.Flush()
}
| {
"content_hash": "3a26421ebdf1641226a51c76dd0bef7c",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 109,
"avg_line_length": 24.278947368421054,
"alnum_prop": 0.6247561239973987,
"repo_name": "golang/go",
"id": "804c910dacc2ee23f3ecfd5af5c06ae08001f936",
"size": "4827",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/cmd/go/internal/help/help.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "2705689"
},
{
"name": "Awk",
"bytes": "450"
},
{
"name": "Batchfile",
"bytes": "8497"
},
{
"name": "C",
"bytes": "127970"
},
{
"name": "C++",
"bytes": "917"
},
{
"name": "Dockerfile",
"bytes": "2789"
},
{
"name": "Fortran",
"bytes": "100"
},
{
"name": "Go",
"bytes": "41103717"
},
{
"name": "HTML",
"bytes": "2621340"
},
{
"name": "JavaScript",
"bytes": "20492"
},
{
"name": "Makefile",
"bytes": "748"
},
{
"name": "Perl",
"bytes": "31365"
},
{
"name": "Python",
"bytes": "15738"
},
{
"name": "Shell",
"bytes": "62900"
}
],
"symlink_target": ""
} |
#include <Windows.h>
#include <winevt.h>
#include <boost/algorithm/string.hpp>
#include <boost/property_tree/ptree.hpp>
#include <boost/property_tree/xml_parser.hpp>
#include <osquery/core/tables.h>
#include <osquery/logger/logger.h>
#include <osquery/core/windows/wmi.h>
#include <osquery/events/windows/windowseventlogparser.h>
#include <osquery/sql/dynamic_table_row.h>
#include <osquery/tables/system/windows/windows_eventlog.h>
#include <osquery/utils/conversions/join.h>
#include <osquery/utils/conversions/split.h>
#include <osquery/utils/conversions/windows/strings.h>
namespace pt = boost::property_tree;
namespace osquery {
namespace tables {
const std::string kEventLogXmlPrefix = "<QueryList><Query Id=\"0\">";
const std::string kEventLogXmlSuffix = "</Query></QueryList>";
Status parseWelXml(QueryContext& context, std::wstring& xml_event, Row& row) {
pt::ptree propTree;
WELEvent windows_event;
auto xml_status = parseWindowsEventLogXML(propTree, xml_event);
if (!xml_status.ok()) {
VLOG(1) << "Error parsing event log XML: " << xml_status.toString();
return xml_status;
}
auto pt_status = parseWindowsEventLogPTree(windows_event, propTree);
if (!pt_status.ok()) {
VLOG(1) << "Error parsing event log PTree: " << pt_status.toString();
return pt_status;
}
row["time"] = INTEGER(windows_event.osquery_time);
row["datetime"] = SQL_TEXT(windows_event.datetime);
row["channel"] = SQL_TEXT(windows_event.source);
row["provider_name"] = SQL_TEXT(windows_event.provider_name);
row["provider_guid"] = SQL_TEXT(windows_event.provider_guid);
row["eventid"] = INTEGER(windows_event.event_id);
row["task"] = INTEGER(windows_event.task_id);
row["level"] = INTEGER(windows_event.level);
row["pid"] = INTEGER(windows_event.pid);
row["tid"] = INTEGER(windows_event.tid);
row["keywords"] = SQL_TEXT(windows_event.keywords);
row["data"] = SQL_TEXT(windows_event.data);
if (context.hasConstraint("time_range", EQUALS)) {
auto time_range = context.constraints["time_range"].getAll(EQUALS);
row["time_range"] = SQL_TEXT(*time_range.begin());
} else {
row["time_range"] = SQL_TEXT("");
}
if (context.hasConstraint("timestamp", EQUALS)) {
auto timestamp = context.constraints["timestamp"].getAll(EQUALS);
row["timestamp"] = SQL_TEXT(*timestamp.begin());
}
if (context.hasConstraint("xpath", EQUALS)) {
auto xpaths = context.constraints["xpath"].getAll(EQUALS);
row["xpath"] = SQL_TEXT(*xpaths.begin());
}
return Status::success();
}
void renderQueryResults(QueryContext& context,
EVT_HANDLE queryResults,
RowYield& yield) {
uint32_t numEventsBlock = 1024;
uint32_t position = 0;
std::vector<EVT_HANDLE> events(numEventsBlock);
// The batch size should be more than 32. It is not documented
// but `EvtNext` should not fail (RPC_S_INVALID_BOUND error)
// with low batch size.
while (numEventsBlock > 32) {
unsigned long numEvents = 0;
// Retrieve the events one block at a time
auto ret = EvtNext(
queryResults, numEventsBlock, events.data(), INFINITE, 0, &numEvents);
while (ret != FALSE) {
for (unsigned long i = 0; i < numEvents; i++) {
unsigned long renderedBuffSize = 0;
unsigned long renderedBuffUsed = 0;
unsigned long propCount = 0;
position += 1;
if (!EvtRender(nullptr,
events[i],
EvtRenderEventXml,
renderedBuffSize,
nullptr,
&renderedBuffUsed,
&propCount)) {
if (GetLastError() != ERROR_INSUFFICIENT_BUFFER) {
LOG(WARNING) << "Failed to get the size of rendered event "
<< GetLastError();
EvtClose(events[i]);
continue;
}
}
std::vector<wchar_t> renderedContent(renderedBuffUsed);
renderedBuffSize = renderedBuffUsed;
if (!EvtRender(nullptr,
events[i],
EvtRenderEventXml,
renderedBuffSize,
renderedContent.data(),
&renderedBuffUsed,
&propCount)) {
LOG(WARNING) << "Failed to render windows event with "
<< GetLastError();
EvtClose(events[i]);
continue;
}
EvtClose(events[i]);
Row row;
std::wstringstream xml_event;
xml_event << renderedContent.data();
auto status = parseWelXml(context, xml_event.str(), row);
if (status.ok()) {
yield(TableRowHolder(new DynamicTableRow(std::move(row))));
}
}
ret = EvtNext(
queryResults, numEventsBlock, events.data(), INFINITE, 0, &numEvents);
}
// While reading a batch of large event log reports `EvtNext` may
// fail with error code 1734 (RPC_S_INVALID_BOUND) and loose the
// chunk of events. This is an unusual behavior and not documented.
// The fix reduces the batch size to half and retries `EvtNext`
if (RPC_S_INVALID_BOUND == GetLastError()) {
numEventsBlock = numEventsBlock / 2;
// Resize the events vector to the current batch size
events.resize(numEventsBlock);
// `EvtNext` may update the event position in query handler on
// failure with RPC_S_INVALID_BOUND error. `EvtSeek` reset the
// position before calling EvtNext with lower batch size.
if (!EvtSeek(
queryResults, position, nullptr, 0, EvtSeekRelativeToFirst)) {
VLOG(1) << "EvtSeek failed with error " << GetLastError();
}
continue;
}
break;
}
if (ERROR_NO_MORE_ITEMS != GetLastError()) {
// No need to close the handler after error; The query
// EvtClose will also close all the event handler
VLOG(1) << "EvtNext failed with error " << GetLastError();
}
}
void genXfilterFromConstraints(QueryContext& context, std::string& xfilter) {
std::vector<std::string> xfilterList;
auto eids = context.constraints["eventid"].getAll(EQUALS);
if (!eids.empty()) {
xfilterList.emplace_back(
"(EventID=" + osquery::join(eids, ") or (EventID=") + ")");
}
auto pids = context.constraints["pid"].getAll(EQUALS);
if (!pids.empty()) {
xfilterList.emplace_back(
"(Execution[@ProcessID=" +
osquery::join(pids, "]) or (Execution[@ProcessID=") + "])");
}
auto times = context.constraints["time_range"].getAll(EQUALS);
auto timestamps = context.constraints["timestamp"].getAll(EQUALS);
if (!times.empty()) {
auto datetime = *times.begin();
auto time_vec = osquery::split(datetime, ";");
if (time_vec.size() == 1) {
auto _start = time_vec.front();
xfilterList.emplace_back("TimeCreated[@SystemTime>='" + _start + "']");
} else if (time_vec.size() == 2) {
auto _start = time_vec.front();
auto _end = time_vec.at(1);
xfilterList.emplace_back("TimeCreated[@SystemTime>='" + _start +
"' and @SystemTime<='" + _end + "']");
}
} else if (!timestamps.empty()) {
auto time_diff = *timestamps.begin();
xfilterList.emplace_back(
"TimeCreated[timediff(@SystemTime) <= " + time_diff + "]");
}
xfilter = xfilterList.empty()
? "*"
: "*[System[" + osquery::join(xfilterList, " and ") + "]]";
}
bool shouldHandleXpath(QueryContext& context) {
// xpaths are mutually execlusive and can't be used with itself
// and the other constraints
auto xpaths = context.constraints["xpath"].getAll(EQUALS);
if (xpaths.size() > 1) {
return false;
}
return !(context.hasConstraint("channel", EQUALS) ||
context.hasConstraint("time_range", EQUALS) ||
context.hasConstraint("timestamp", EQUALS));
}
void genWindowsEventLog(RowYield& yield, QueryContext& context) {
std::set<std::pair<std::string, std::string>> xpath_set;
auto hasXpath = context.hasConstraint("xpath", EQUALS);
if (hasXpath && !shouldHandleXpath(context)) {
LOG(WARNING) << "Xpaths are mutually exclusive and cannot be "
"used with constraints (channel, time_range, timestamp)";
return;
}
// Check if the `xpath` constraint is available and query
// the events with xpath
if (hasXpath) {
auto xpaths = context.constraints["xpath"].getAll(EQUALS);
auto xpath = *xpaths.begin();
try {
pt::ptree propTree;
std::stringstream ss;
ss << xpath;
pt::read_xml(ss, propTree);
auto channel = propTree.get("QueryList.Query.Select.<xmlattr>.Path", "");
if (!channel.empty()) {
xpath_set.insert(std::make_pair(channel, xpath));
} else {
LOG(WARNING) << "Invalid xpath format: " << xpath;
}
} catch (std::exception& e) {
LOG(WARNING) << "Failed to parse the xpath xml string " << e.what();
return;
}
} else if (context.hasConstraint("channel", EQUALS)) {
auto channels = context.constraints["channel"].getAll(EQUALS);
std::string xfilter("");
genXfilterFromConstraints(context, xfilter);
std::string welSearchQuery = kEventLogXmlPrefix;
for (const auto& channel : channels) {
welSearchQuery += "<Select Path=\"" + channel + "\">";
welSearchQuery += xfilter;
welSearchQuery += "</Select>" + kEventLogXmlSuffix;
xpath_set.insert(std::make_pair(channel, welSearchQuery));
}
} else {
LOG(WARNING) << "Query constraints are invalid: the event "
"channel or xpath must be specified";
return;
}
for (const auto& path : xpath_set) {
auto queryResults =
EvtQuery(nullptr,
stringToWstring(path.first).c_str(),
stringToWstring(path.second).c_str(),
EvtQueryChannelPath | EvtQueryReverseDirection);
if (queryResults == nullptr) {
LOG(WARNING) << "Failed to search event log for query with "
<< GetLastError();
return;
}
renderQueryResults(context, queryResults, yield);
EvtClose(queryResults);
}
}
}; // namespace tables
}; // namespace osquery
| {
"content_hash": "d3bfcba761e95c9181429e7e8c3a28ae",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 80,
"avg_line_length": 34.51178451178451,
"alnum_prop": 0.616780487804878,
"repo_name": "hackgnar/osquery",
"id": "0461c25acac514e1bdaedf081dca8d2223a07261",
"size": "10486",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "osquery/tables/system/windows/windows_eventlog.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "38093"
},
{
"name": "C++",
"bytes": "2088263"
},
{
"name": "CMake",
"bytes": "72851"
},
{
"name": "Makefile",
"bytes": "6220"
},
{
"name": "Objective-C++",
"bytes": "62318"
},
{
"name": "Shell",
"bytes": "2038"
},
{
"name": "Thrift",
"bytes": "2969"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
Uses of Class org.apache.cxf.configuration.security.SecureRandomParameters (Apache CXF API 2.7.5 API)
</TITLE>
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.cxf.configuration.security.SecureRandomParameters (Apache CXF API 2.7.5 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
Apache CXF API</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/cxf/configuration/security//class-useSecureRandomParameters.html" target="_top"><B>FRAMES</B></A>
<A HREF="SecureRandomParameters.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.cxf.configuration.security.SecureRandomParameters</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.cxf.configuration.jsse"><B>org.apache.cxf.configuration.jsse</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.cxf.configuration.security"><B>org.apache.cxf.configuration.security</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.cxf.configuration.jsse"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A> in <A HREF="../../../../../../org/apache/cxf/configuration/jsse/package-summary.html">org.apache.cxf.configuration.jsse</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../org/apache/cxf/configuration/jsse/package-summary.html">org.apache.cxf.configuration.jsse</A> with parameters of type <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://docs.oracle.com/javase/6/docs/api/java/security/SecureRandom.html?is-external=true" title="class or interface in java.security">SecureRandom</A></CODE></FONT></TD>
<TD><CODE><B>TLSParameterJaxBUtils.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/jsse/TLSParameterJaxBUtils.html#getSecureRandom(org.apache.cxf.configuration.security.SecureRandomParameters)">getSecureRandom</A></B>(<A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A> secureRandomParams)</CODE>
<BR>
This method converts the JAXB generated type into a SecureRandom.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.cxf.configuration.security"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A> in <A HREF="../../../../../../org/apache/cxf/configuration/security/package-summary.html">org.apache.cxf.configuration.security</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Fields in <A HREF="../../../../../../org/apache/cxf/configuration/security/package-summary.html">org.apache.cxf.configuration.security</A> declared as <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>protected <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></CODE></FONT></TD>
<TD><CODE><B>TLSClientParametersType.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/TLSClientParametersType.html#secureRandomParameters">secureRandomParameters</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>protected <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></CODE></FONT></TD>
<TD><CODE><B>TLSServerParametersType.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/TLSServerParametersType.html#secureRandomParameters">secureRandomParameters</A></B></CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../org/apache/cxf/configuration/security/package-summary.html">org.apache.cxf.configuration.security</A> that return <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></CODE></FONT></TD>
<TD><CODE><B>ObjectFactory.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/ObjectFactory.html#createSecureRandomParameters()">createSecureRandomParameters</A></B>()</CODE>
<BR>
Create an instance of <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security"><CODE>SecureRandomParameters</CODE></A></TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></CODE></FONT></TD>
<TD><CODE><B>TLSClientParametersType.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/TLSClientParametersType.html#getSecureRandomParameters()">getSecureRandomParameters</A></B>()</CODE>
<BR>
Gets the value of the secureRandomParameters property.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></CODE></FONT></TD>
<TD><CODE><B>TLSServerParametersType.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/TLSServerParametersType.html#getSecureRandomParameters()">getSecureRandomParameters</A></B>()</CODE>
<BR>
Gets the value of the secureRandomParameters property.</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../org/apache/cxf/configuration/security/package-summary.html">org.apache.cxf.configuration.security</A> with parameters of type <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>TLSClientParametersType.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/TLSClientParametersType.html#setSecureRandomParameters(org.apache.cxf.configuration.security.SecureRandomParameters)">setSecureRandomParameters</A></B>(<A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A> value)</CODE>
<BR>
Sets the value of the secureRandomParameters property.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>TLSServerParametersType.</B><B><A HREF="../../../../../../org/apache/cxf/configuration/security/TLSServerParametersType.html#setSecureRandomParameters(org.apache.cxf.configuration.security.SecureRandomParameters)">setSecureRandomParameters</A></B>(<A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security">SecureRandomParameters</A> value)</CODE>
<BR>
Sets the value of the secureRandomParameters property.</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/cxf/configuration/security/SecureRandomParameters.html" title="class in org.apache.cxf.configuration.security"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
Apache CXF API</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/cxf/configuration/security//class-useSecureRandomParameters.html" target="_top"><B>FRAMES</B></A>
<A HREF="SecureRandomParameters.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Apache CXF
</BODY>
</HTML>
| {
"content_hash": "b77720852621c6ad04e73e623e31ebec",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 455,
"avg_line_length": 58.05882352941177,
"alnum_prop": 0.6755952380952381,
"repo_name": "mulesoft-consulting/sumtotal-connector",
"id": "b377f614d708e37920311470f786db6b248c26aa",
"size": "15792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/apache-cxf-2.7.5/docs/api/org/apache/cxf/configuration/security/class-use/SecureRandomParameters.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "1843"
},
{
"name": "Java",
"bytes": "1644889"
},
{
"name": "JavaScript",
"bytes": "37150"
},
{
"name": "Perl",
"bytes": "3647"
},
{
"name": "Ruby",
"bytes": "951"
},
{
"name": "Scala",
"bytes": "108"
},
{
"name": "Shell",
"bytes": "59560"
}
],
"symlink_target": ""
} |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const fs = require("fs");
const dynamic_path_parser_1 = require("./dynamic-path-parser");
function resolveModulePath(moduleNameFromFlag, project, projectRoot, appConfig) {
let baseModuleName = moduleNameFromFlag;
let parentFolders = '';
if (baseModuleName.includes(path.sep)) {
const splitPath = baseModuleName.split(path.sep);
baseModuleName = splitPath.pop();
parentFolders = splitPath.join(path.sep);
}
if (baseModuleName.includes('.')) {
baseModuleName = baseModuleName
.split('.')
.filter(part => part !== 'module' && part !== 'ts')
.join('.');
}
const baseModuleWithFileSuffix = `${baseModuleName}.module.ts`;
const moduleRelativePath = path.join(parentFolders, baseModuleWithFileSuffix);
let fullModulePath = buildFullPath(project, moduleRelativePath, appConfig, projectRoot);
if (!fs.existsSync(fullModulePath)) {
const moduleWithFolderPrefix = path.join(parentFolders, baseModuleName, baseModuleWithFileSuffix);
fullModulePath = buildFullPath(project, moduleWithFolderPrefix, appConfig, projectRoot);
}
if (!fs.existsSync(fullModulePath)) {
throw 'Specified module does not exist';
}
return fullModulePath;
}
exports.resolveModulePath = resolveModulePath;
function buildFullPath(project, relativeModulePath, appConfig, projectRoot) {
const parsedPath = dynamic_path_parser_1.dynamicPathParser(project, relativeModulePath, appConfig);
const fullModulePath = path.join(projectRoot, parsedPath.dir, parsedPath.base);
return fullModulePath;
}
//# sourceMappingURL=/users/johnny/myfiles/angular-cli/utilities/resolve-module-file.js.map | {
"content_hash": "8bb49e2a17cc1252c7f016be1303d818",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 106,
"avg_line_length": 47.526315789473685,
"alnum_prop": 0.7137320044296789,
"repo_name": "duxiaofeng-github/angular-cli-dist",
"id": "283e2e6868c1c31e126e4ecb62ab134898d2bfa6",
"size": "1806",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utilities/resolve-module-file.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "457"
},
{
"name": "JavaScript",
"bytes": "143804"
},
{
"name": "TypeScript",
"bytes": "49811"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<DatePicker
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/date_picker"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/> | {
"content_hash": "fdf359c798d11bfdf856816817e4cac0",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 66,
"avg_line_length": 42.5,
"alnum_prop": 0.6274509803921569,
"repo_name": "tntntnt7/TN-zhihu",
"id": "3416fde8321e27b57362b55ba5a4e22fcf913c24",
"size": "255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/am_date_picker.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "90544"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="refresh" content="0;URL=fn.SDL_GameControllerName.html">
</head>
<body>
<p>Redirecting to <a href="fn.SDL_GameControllerName.html">fn.SDL_GameControllerName.html</a>...</p>
<script>location.replace("fn.SDL_GameControllerName.html" + location.search + location.hash);</script>
</body>
</html> | {
"content_hash": "69208554342a4fb27188520ade4fa365",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 106,
"avg_line_length": 36.1,
"alnum_prop": 0.703601108033241,
"repo_name": "nitro-devs/nitro-game-engine",
"id": "58343147fab6208bc4ae2a4ece4a4b77c958d0d9",
"size": "361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/sdl2_sys/controller/SDL_GameControllerName.v.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "1032"
},
{
"name": "Rust",
"bytes": "59380"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<phpunit backupGlobals="false"
backupStaticAttributes="false"
bootstrap="vendor/autoload.php"
colors="true"
convertErrorsToExceptions="true"
convertNoticesToExceptions="true"
convertWarningsToExceptions="true"
processIsolation="false"
stopOnFailure="false"
syntaxCheck="true"
>
<testsuites>
<testsuite>
<directory>./tests/</directory>
</testsuite>
</testsuites>
</phpunit>
| {
"content_hash": "64467b9d676997b88ade76326f4787ca",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 43,
"avg_line_length": 29.38888888888889,
"alnum_prop": 0.6124763705103969,
"repo_name": "SugiPHP/Money",
"id": "006b8b9891f9e8169f13dcaac55c109abdc9925d",
"size": "529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phpunit.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "35501"
}
],
"symlink_target": ""
} |
#include <string.h>
#include "AnyShort.h"
#include "Index12Gray.h"
#include "AlphaMacros.h"
#include "IntArgb.h"
#include "IntArgbPre.h"
#include "IntRgb.h"
#include "ThreeByteBgr.h"
#include "ByteGray.h"
#include "ByteIndexed.h"
#include "Index8Gray.h"
/*
* This file declares, registers, and defines the various graphics
* primitive loops to manipulate surfaces of type "Index12Gray".
*
* See also LoopMacros.h
*/
RegisterFunc RegisterIndex12Gray;
DECLARE_CONVERT_BLIT(Index12Gray, IntArgb);
DECLARE_CONVERT_BLIT(IntArgb, Index12Gray);
DECLARE_CONVERT_BLIT(ThreeByteBgr, Index12Gray);
DECLARE_CONVERT_BLIT(ByteGray, Index12Gray);
DECLARE_CONVERT_BLIT(Index8Gray, Index12Gray);
DECLARE_CONVERT_BLIT(ByteIndexed, Index12Gray);
DECLARE_CONVERT_BLIT(Index12Gray, Index12Gray);
DECLARE_SCALE_BLIT(Index12Gray, Index12Gray);
DECLARE_SCALE_BLIT(Index12Gray, IntArgb);
DECLARE_SCALE_BLIT(IntArgb, Index12Gray);
DECLARE_SCALE_BLIT(ThreeByteBgr, Index12Gray);
DECLARE_SCALE_BLIT(UshortGray, Index12Gray);
DECLARE_SCALE_BLIT(ByteIndexed, Index12Gray);
DECLARE_SCALE_BLIT(ByteGray, Index12Gray);
DECLARE_SCALE_BLIT(Index8Gray, Index12Gray);
DECLARE_XPAR_CONVERT_BLIT(ByteIndexedBm, Index12Gray);
DECLARE_XPAR_BLITBG(ByteIndexedBm, Index12Gray);
DECLARE_XOR_BLIT(IntArgb, Index12Gray);
DECLARE_ALPHA_MASKFILL(Index12Gray);
DECLARE_ALPHA_MASKBLIT(IntArgb, Index12Gray);
DECLARE_ALPHA_MASKBLIT(IntArgbPre, Index12Gray);
DECLARE_ALPHA_MASKBLIT(IntRgb, Index12Gray);
DECLARE_SRCOVER_MASKFILL(Index12Gray);
DECLARE_SRCOVER_MASKBLIT(IntArgb, Index12Gray);
DECLARE_SRCOVER_MASKBLIT(IntArgbPre, Index12Gray);
DECLARE_SOLID_DRAWGLYPHLISTAA(Index12Gray);
DECLARE_TRANSFORMHELPER_FUNCS(Index12Gray);
NativePrimitive Index12GrayPrimitives[] = {
REGISTER_CONVERT_BLIT(IntArgb, Index12Gray),
REGISTER_CONVERT_BLIT_EQUIV(IntRgb, Index12Gray,
NAME_CONVERT_BLIT(IntArgb, Index12Gray)),
REGISTER_CONVERT_BLIT(ThreeByteBgr, Index12Gray),
REGISTER_CONVERT_BLIT(ByteGray, Index12Gray),
REGISTER_CONVERT_BLIT(Index8Gray, Index12Gray),
REGISTER_CONVERT_BLIT_FLAGS(Index12Gray, Index12Gray,
SD_LOCK_LUT,
SD_LOCK_LUT | SD_LOCK_INVGRAY),
REGISTER_CONVERT_BLIT(ByteIndexed, Index12Gray),
REGISTER_SCALE_BLIT(Index12Gray, IntArgb),
REGISTER_SCALE_BLIT(IntArgb, Index12Gray),
REGISTER_SCALE_BLIT_EQUIV(IntRgb, Index12Gray,
NAME_SCALE_BLIT(IntArgb, Index12Gray)),
REGISTER_SCALE_BLIT(ThreeByteBgr, Index12Gray),
REGISTER_SCALE_BLIT(UshortGray, Index12Gray),
REGISTER_SCALE_BLIT(ByteIndexed, Index12Gray),
REGISTER_SCALE_BLIT(ByteGray, Index12Gray),
REGISTER_SCALE_BLIT(Index8Gray, Index12Gray),
REGISTER_SCALE_BLIT_FLAGS(Index12Gray, Index12Gray, 0,
SD_LOCK_LUT | SD_LOCK_INVGRAY),
REGISTER_XPAR_CONVERT_BLIT(ByteIndexedBm, Index12Gray),
REGISTER_XPAR_BLITBG(ByteIndexedBm, Index12Gray),
REGISTER_XOR_BLIT(IntArgb, Index12Gray),
REGISTER_ALPHA_MASKFILL(Index12Gray),
REGISTER_ALPHA_MASKBLIT(IntArgb, Index12Gray),
REGISTER_ALPHA_MASKBLIT(IntArgbPre, Index12Gray),
REGISTER_ALPHA_MASKBLIT(IntRgb, Index12Gray),
REGISTER_SRCOVER_MASKFILL(Index12Gray),
REGISTER_SRCOVER_MASKBLIT(IntArgb, Index12Gray),
REGISTER_SRCOVER_MASKBLIT(IntArgbPre, Index12Gray),
REGISTER_SOLID_DRAWGLYPHLISTAA(Index12Gray),
REGISTER_TRANSFORMHELPER_FUNCS(Index12Gray),
};
extern jboolean checkSameLut(jint *SrcReadLut, jint *DstReadLut,
SurfaceDataRasInfo *pSrcInfo,
SurfaceDataRasInfo *pDstInfo);
jboolean RegisterIndex12Gray(JNIEnv *env)
{
return RegisterPrimitives(env, Index12GrayPrimitives,
ArraySize(Index12GrayPrimitives));
}
jint PixelForIndex12Gray(SurfaceDataRasInfo *pRasInfo, jint rgb)
{
jint r, g, b, gray;
ExtractIntDcmComponentsX123(rgb, r, g, b);
gray = ComposeByteGrayFrom3ByteRgb(r, g, b);
return pRasInfo->invGrayTable[gray];
}
DEFINE_CONVERT_BLIT(IntArgb, Index12Gray, 3ByteRgb)
DEFINE_CONVERT_BLIT(ThreeByteBgr, Index12Gray, 3ByteRgb)
DEFINE_CONVERT_BLIT(ByteGray, Index12Gray, 1ByteGray)
DEFINE_CONVERT_BLIT(Index8Gray, Index12Gray, 1ByteGray)
DEFINE_CONVERT_BLIT(ByteIndexed, Index12Gray, 3ByteRgb)
void NAME_CONVERT_BLIT(Index12Gray, Index12Gray)
(void *srcBase, void *dstBase,
juint width, juint height,
SurfaceDataRasInfo *pSrcInfo,
SurfaceDataRasInfo *pDstInfo,
NativePrimitive *pPrim,
CompositeInfo *pCompInfo)
{
DeclareIndex12GrayLoadVars(SrcRead)
DeclareIndex12GrayLoadVars(DstRead)
jint srcScan = pSrcInfo->scanStride;
jint dstScan = pDstInfo->scanStride;
InitIndex12GrayLoadVars(SrcRead, pSrcInfo);
InitIndex12GrayLoadVars(DstRead, pDstInfo);
if (checkSameLut(SrcReadLut, DstReadLut, pSrcInfo, pDstInfo)) {
do {
memcpy(dstBase, srcBase, width);
srcBase = PtrAddBytes(srcBase, srcScan);
dstBase = PtrAddBytes(dstBase, dstScan);
} while (--height > 0);
} else {
DeclareIndex12GrayStoreVars(DstWrite);
InitIndex12GrayStoreVarsY(DstWrite, pDstInfo);
BlitLoopWidthHeight(Index12Gray, pSrc, srcBase, pSrcInfo,
Index12Gray, pDst, dstBase, pDstInfo, DstWrite,
width, height,
ConvertVia1ByteGray
(pSrc, Index12Gray, SrcRead,
pDst, Index12Gray, DstWrite, 0, 0));
}
}
void NAME_SCALE_BLIT(Index12Gray, Index12Gray)
(void *srcBase, void *dstBase,
juint width, juint height,
jint sxloc, jint syloc,
jint sxinc, jint syinc, jint shift,
SurfaceDataRasInfo *pSrcInfo,
SurfaceDataRasInfo *pDstInfo,
NativePrimitive *pPrim,
CompositeInfo *pCompInfo)
{
DeclareIndex8GrayLoadVars(SrcRead)
DeclareIndex8GrayLoadVars(DstRead)
jint srcScan = pSrcInfo->scanStride;
jint dstScan = pDstInfo->scanStride;
DeclareIndex8GrayStoreVars(DstWrite)
InitIndex8GrayLoadVars(SrcRead, pSrcInfo);
InitIndex8GrayLoadVars(DstRead, pDstInfo);
if (checkSameLut(SrcReadLut, DstReadLut, pSrcInfo, pDstInfo)) {
BlitLoopScaleWidthHeight(Index8Gray, pSrc, srcBase, pSrcInfo,
Index8Gray, pDst, dstBase, pDstInfo, DstWrite,
x, width, height,
sxloc, syloc, sxinc, syinc, shift,
pDst[0] = pSrc[x]);
} else {
DeclareIndex8GrayStoreVars(DstWrite);
InitIndex8GrayStoreVarsY(DstWrite, pDstInfo);
BlitLoopScaleWidthHeight(Index8Gray, pSrc, srcBase, pSrcInfo,
Index8Gray, pDst, dstBase, pDstInfo, DstWrite,
x, width, height,
sxloc, syloc, sxinc, syinc, shift,
ConvertVia1ByteGray(pSrc, Index8Gray, SrcRead,
pDst, Index8Gray, DstWrite,
x, 0));
}
}
DEFINE_SCALE_BLIT(Index12Gray, IntArgb, 1IntArgb)
DEFINE_SCALE_BLIT(IntArgb, Index12Gray, 3ByteRgb)
DEFINE_SCALE_BLIT(ThreeByteBgr, Index12Gray, 3ByteRgb)
DEFINE_SCALE_BLIT(UshortGray, Index12Gray, 1ByteGray)
DEFINE_SCALE_BLIT_LUT8(ByteIndexed, Index12Gray, PreProcessLut)
DEFINE_SCALE_BLIT(ByteGray, Index12Gray, 1ByteGray)
DEFINE_SCALE_BLIT_LUT8(Index8Gray, Index12Gray, PreProcessLut)
DEFINE_XPAR_CONVERT_BLIT_LUT8(ByteIndexedBm, Index12Gray, PreProcessLut)
DEFINE_XPAR_BLITBG_LUT8(ByteIndexedBm, Index12Gray, PreProcessLut)
DEFINE_XOR_BLIT(IntArgb, Index12Gray, AnyShort)
DEFINE_ALPHA_MASKFILL(Index12Gray, 1ByteGray)
DEFINE_ALPHA_MASKBLIT(IntArgb, Index12Gray, 1ByteGray)
DEFINE_ALPHA_MASKBLIT(IntArgbPre, Index12Gray, 1ByteGray)
DEFINE_ALPHA_MASKBLIT(IntRgb, Index12Gray, 1ByteGray)
DEFINE_SRCOVER_MASKFILL(Index12Gray, 1ByteGray)
DEFINE_SRCOVER_MASKBLIT(IntArgb, Index12Gray, 1ByteGray)
DEFINE_SRCOVER_MASKBLIT(IntArgbPre, Index12Gray, 1ByteGray)
DEFINE_SOLID_DRAWGLYPHLISTAA(Index12Gray, 1ByteGray)
DEFINE_TRANSFORMHELPERS(Index12Gray)
| {
"content_hash": "bcb1ac3dec1835feabc1673967a5cd77",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 80,
"avg_line_length": 35.31063829787234,
"alnum_prop": 0.6975174740901422,
"repo_name": "andreagenso/java2scala",
"id": "5b95697af3c54a98e56b6f84849f07acd5653ff4",
"size": "9510",
"binary": false,
"copies": "84",
"ref": "refs/heads/master",
"path": "test/J2s/java/openjdk-6-src-b27/jdk/src/share/native/sun/java2d/loops/Index12Gray.c",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "8983"
},
{
"name": "Awk",
"bytes": "26041"
},
{
"name": "Batchfile",
"bytes": "1796"
},
{
"name": "C",
"bytes": "20159882"
},
{
"name": "C#",
"bytes": "7630"
},
{
"name": "C++",
"bytes": "4513460"
},
{
"name": "CSS",
"bytes": "5128"
},
{
"name": "DTrace",
"bytes": "68220"
},
{
"name": "HTML",
"bytes": "1302117"
},
{
"name": "Haskell",
"bytes": "244134"
},
{
"name": "Java",
"bytes": "129267130"
},
{
"name": "JavaScript",
"bytes": "182900"
},
{
"name": "Makefile",
"bytes": "711241"
},
{
"name": "Objective-C",
"bytes": "66163"
},
{
"name": "Python",
"bytes": "137817"
},
{
"name": "Roff",
"bytes": "2630160"
},
{
"name": "Scala",
"bytes": "25599"
},
{
"name": "Shell",
"bytes": "888136"
},
{
"name": "SourcePawn",
"bytes": "78"
}
],
"symlink_target": ""
} |
namespace GNF.Domain.Entities
{
public interface IEntity<TPrimaryKey>
{
/// <summary>
/// Unique identifier for this entity.
/// </summary>
TPrimaryKey Id { get; set; }
/// <summary>
/// 是否是无需持久化实体对象
/// </summary>
/// <returns>True, if this entity is transient</returns>
bool IsTransient();
}
public interface IEntity : IEntity<int>
{
}
}
| {
"content_hash": "d78de2ebad22a314109647b4826fd307",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 64,
"avg_line_length": 20.952380952380953,
"alnum_prop": 0.5340909090909091,
"repo_name": "rjf1979/GNF",
"id": "8f50ef23236217ece478add1b781c8342a36dca7",
"size": "466",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GNF.Domain/Entities/IEntity.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "265495"
}
],
"symlink_target": ""
} |
/**
* @file
* This file includes definitions for ICMPv6.
*/
#ifndef ICMP6_HPP_
#define ICMP6_HPP_
#include "openthread-core-config.h"
#include <openthread/icmp6.h>
#include "common/encoding.hpp"
#include "common/locator.hpp"
#include "net/ip6_headers.hpp"
namespace ot {
namespace Ip6 {
using ot::Encoding::BigEndian::HostSwap16;
/**
* @addtogroup core-ip6-icmp6
*
* @brief
* This module includes definitions for ICMPv6.
*
* @{
*
*/
/*
* This class implements ICMPv6 header generation and parsing.
*
*/
OT_TOOL_PACKED_BEGIN
class IcmpHeader : public otIcmp6Header
{
public:
/**
* This method initializes the ICMPv6 header to all zeros.
*
*/
void Init(void)
{
mType = 0;
mCode = 0;
mChecksum = 0;
mData.m32[0] = 0;
}
/**
* ICMPv6 Message Types
*
*/
enum Type
{
kTypeDstUnreach = OT_ICMP6_TYPE_DST_UNREACH, ///< Destination Unreachable
kTypePacketToBig = OT_ICMP6_TYPE_PACKET_TO_BIG, ///< Packet To Big
kTypeTimeExceeded = OT_ICMP6_TYPE_TIME_EXCEEDED, ///< Time Exceeded
kTypeEchoRequest = OT_ICMP6_TYPE_ECHO_REQUEST, ///< Echo Request
kTypeEchoReply = OT_ICMP6_TYPE_ECHO_REPLY, ///< Echo Reply
};
/**
* ICMPv6 Message Codes
*
*/
enum Code
{
kCodeDstUnreachNoRoute = OT_ICMP6_CODE_DST_UNREACH_NO_ROUTE, ///< Destination Unreachable No Route
kCodeFragmReasTimeEx = OT_ICMP6_CODE_FRAGM_REAS_TIME_EX, ///< Fragment Reassembly Time Exceeded
};
/**
* This method returns the ICMPv6 message type.
*
* @returns The ICMPv6 message type.
*
*/
Type GetType(void) const { return static_cast<Type>(mType); }
/**
* This method sets the ICMPv6 message type.
*
* @param[in] aType The ICMPv6 message type.
*
*/
void SetType(Type aType) { mType = static_cast<uint8_t>(aType); }
/**
* This method returns the ICMPv6 message code.
*
* @returns The ICMPv6 message code.
*
*/
Code GetCode(void) const { return static_cast<Code>(mCode); }
/**
* This method sets the ICMPv6 message code.
*
* @param[in] aCode The ICMPv6 message code.
*/
void SetCode(Code aCode) { mCode = static_cast<uint8_t>(aCode); }
/**
* This method returns the ICMPv6 message checksum.
*
* @returns The ICMPv6 message checksum.
*
*/
uint16_t GetChecksum(void) const { return HostSwap16(mChecksum); }
/**
* This method sets the ICMPv6 message checksum.
*
* @param[in] aChecksum The ICMPv6 message checksum.
*
*/
void SetChecksum(uint16_t aChecksum) { mChecksum = HostSwap16(aChecksum); }
/**
* This method returns the ICMPv6 message ID for Echo Requests and Replies.
*
* @returns The ICMPv6 message ID.
*
*/
uint16_t GetId(void) const { return HostSwap16(mData.m16[0]); }
/**
* This method sets the ICMPv6 message ID for Echo Requests and Replies.
*
* @param[in] aId The ICMPv6 message ID.
*
*/
void SetId(uint16_t aId) { mData.m16[0] = HostSwap16(aId); }
/**
* This method returns the ICMPv6 message sequence for Echo Requests and Replies.
*
* @returns The ICMPv6 message sequence.
*
*/
uint16_t GetSequence(void) const { return HostSwap16(mData.m16[1]); }
/**
* This method sets the ICMPv6 message sequence for Echo Requests and Replies.
*
* @param[in] aSequence The ICMPv6 message sequence.
*
*/
void SetSequence(uint16_t aSequence) { mData.m16[1] = HostSwap16(aSequence); }
/**
* This static method returns the byte offset of the Checksum field in the ICMPv6 header.
*
* @returns The byte offset of the Checksum field.
*
*/
static uint8_t GetChecksumOffset(void) { return offsetof(otIcmp6Header, mChecksum); }
/**
* This static method returns the byte offset of the ICMPv6 payload.
*
* @returns The Byte offset of the ICMPv6 payload.
*
*/
static uint8_t GetDataOffset(void) { return offsetof(otIcmp6Header, mData); }
} OT_TOOL_PACKED_END;
/**
* This class implements ICMPv6 message handlers.
*
*/
class IcmpHandler : public otIcmp6Handler
{
friend class Icmp;
public:
/**
* This constructor creates an ICMPv6 message handler.
*
* @param[in] aCallback A pointer to the function that is called when receiving an ICMPv6 message.
* @param[in] aContext A pointer to arbitrary context information.
*
*/
IcmpHandler(otIcmp6ReceiveCallback aCallback, void *aContext)
{
mReceiveCallback = aCallback;
mContext = aContext;
mNext = NULL;
}
private:
void HandleReceiveMessage(Message &aMessage, const MessageInfo &aMessageInfo, const IcmpHeader &aIcmp6Header)
{
mReceiveCallback(mContext, &aMessage, &aMessageInfo, &aIcmp6Header);
}
IcmpHandler *GetNext(void) { return static_cast<IcmpHandler *>(mNext); }
};
/**
* This class implements ICMPv6.
*
*/
class Icmp : public InstanceLocator
{
public:
/**
* This constructor initializes the object.
*
* @param[in] aInstance A reference to the OpenThread instance.
*
*/
explicit Icmp(Instance &aInstance);
/**
* This method returns a new ICMP message with sufficient header space reserved.
*
* @param[in] aReserved The number of header bytes to reserve after the ICMP header.
*
* @returns A pointer to the message or NULL if no buffers are available.
*
*/
Message *NewMessage(uint16_t aReserved);
/**
* This method registers ICMPv6 handler.
*
* @param[in] aHandler A reference to the ICMPv6 handler.
*
* @retval OT_ERROR_NONE Successfully registered the ICMPv6 handler.
* @retval OT_ERROR_ALREADY The ICMPv6 handler is already registered.
*
*/
otError RegisterHandler(IcmpHandler &aHandler);
/**
* This method sends an ICMPv6 Echo Request message.
*
* @param[in] aMessage A reference to the Echo Request payload.
* @param[in] aMessageInfo A reference to the message info associated with @p aMessage.
* @param[in] aIdentifier An identifier to aid in matching Echo Replies to this Echo Request.
* May be zero.
*
* @retval OT_ERROR_NONE Successfully enqueued the ICMPv6 Echo Request message.
* @retval OT_ERROR_NO_BUFS Insufficient buffers available to generate an ICMPv6 Echo Request message.
*
*/
otError SendEchoRequest(Message &aMessage, const MessageInfo &aMessageInfo, uint16_t aIdentifier);
/**
* This method sends an ICMPv6 error message.
*
* @param[in] aType The ICMPv6 message type.
* @param[in] aCode The ICMPv6 message code.
* @param[in] aMessageInfo A reference to the message info.
* @param[in] aHeader The IPv6 header of the error-causing message.
*
* @retval OT_ERROR_NONE Successfully enqueued the ICMPv6 error message.
* @retval OT_ERROR_NO_BUFS Insufficient buffers available.
*
*/
otError SendError(IcmpHeader::Type aType,
IcmpHeader::Code aCode,
const MessageInfo &aMessageInfo,
const Header & aHeader);
/**
* This method handles an ICMPv6 message.
*
* @param[in] aMessage A reference to the ICMPv6 message.
* @param[in] aMessageInfo A reference to the message info associated with @p aMessage.
*
* @retval OT_ERROR_NONE Successfully processed the ICMPv6 message.
* @retval OT_ERROR_NO_BUFS Insufficient buffers available to generate the reply.
* @retval OT_ERROR_DROP The ICMPv6 message was invalid and dropped.
*
*/
otError HandleMessage(Message &aMessage, MessageInfo &aMessageInfo);
/**
* This method updates the ICMPv6 checksum.
*
* @param[in] aMessage A reference to the ICMPv6 message.
* @param[in] aChecksum The pseudo-header checksum value.
*
*/
void UpdateChecksum(Message &aMessage, uint16_t aChecksum);
/**
* This method indicates whether or not ICMPv6 Echo processing is enabled.
*
* @retval TRUE ICMPv6 Echo processing is enabled.
* @retval FALSE ICMPv6 Echo processing is disabled.
*
*/
otIcmp6EchoMode GetEchoMode(void) const { return mEchoMode; }
/**
* This method sets whether or not ICMPv6 Echo processing is enabled.
*
* @param[in] aEnabled TRUE to enable ICMPv6 Echo processing, FALSE otherwise.
*
*/
void SetEchoMode(otIcmp6EchoMode aMode) { mEchoMode = aMode; }
/**
* This method indicates whether or not the ICMPv6 Echo Request should be handled.
*
* @retval TRUE if OpenThread should respond with an ICMPv6 Echo Reply.
* @retval FALSE if OpenThread should not respond with an ICMPv6 Echo Reply.
*
*/
bool ShouldHandleEchoRequest(const MessageInfo &aMessageInfo);
private:
otError HandleEchoRequest(Message &aRequestMessage, const MessageInfo &aMessageInfo);
IcmpHandler *mHandlers;
uint16_t mEchoSequence;
otIcmp6EchoMode mEchoMode;
};
/**
* @}
*
*/
} // namespace Ip6
} // namespace ot
#endif // ICMP6_HPP_
| {
"content_hash": "3ef8be5848f6db7c20714863bdace6df",
"timestamp": "",
"source": "github",
"line_count": 338,
"max_line_length": 113,
"avg_line_length": 28.118343195266274,
"alnum_prop": 0.6284722222222222,
"repo_name": "turon/openthread",
"id": "e52d9f7368bf75751869ba71a45976ee5a460696",
"size": "11112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/core/net/icmp6.hpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "50"
},
{
"name": "C",
"bytes": "1034514"
},
{
"name": "C++",
"bytes": "4480499"
},
{
"name": "Dockerfile",
"bytes": "6306"
},
{
"name": "M4",
"bytes": "36666"
},
{
"name": "Makefile",
"bytes": "138336"
},
{
"name": "Python",
"bytes": "2160064"
},
{
"name": "Shell",
"bytes": "73650"
}
],
"symlink_target": ""
} |
if not file.Exists( "weapons/csgo_baseknife.lua", "LUA" ) then
SWEP.Spawnable = false
print( "csgo_huntsman_bright_water failed to initialize: csgo_baseknife.lua not found. Did you install the main part?" )
return
end
local TTT = ( GAMEMODE_NAME == "terrortown" or cvars.Bool("csgo_knives_force_ttt", false) )
DEFINE_BASECLASS( "csgo_baseknife" )
if ( SERVER ) then
SWEP.Weight = 5
SWEP.AutoSwitchTo = false
SWEP.AutoSwitchFrom = false
if TTT then
SWEP.EquipMenuData = nil
end
end
if ( CLIENT ) then
SWEP.Slot = TTT and 6 or 2
SWEP.SlotPos = 0
end
SWEP.PrintName = "Huntsman Knife" .. " | " .. "Bright Water"
SWEP.Category = "CS:GO Knives"
SWEP.Spawnable = true
SWEP.AdminSpawnable = true
SWEP.ViewModel = "models/weapons/v_csgo_tactical.mdl"
SWEP.WorldModel = "models/weapons/w_csgo_tactical.mdl"
SWEP.SkinIndex = 16
SWEP.PaintMaterial = nil
SWEP.AreDaggers = false
util.PrecacheModel( SWEP.ViewModel )
util.PrecacheModel( SWEP.WorldModel )
-- TTT config values
-- Kind specifies the category this weapon is in. Players can only carry one of
-- each. Can be: WEAPON_... MELEE, PISTOL, HEAVY, NADE, CARRY, EQUIP1, EQUIP2 or ROLE.
-- Matching SWEP.Slot values: 0 1 2 3 4 6 7 8
SWEP.Kind = WEAPON_EQUIP
-- If AutoSpawnable is true and SWEP.Kind is not WEAPON_EQUIP1/2, then this gun can
-- be spawned as a random weapon.
SWEP.AutoSpawnable = false
-- The AmmoEnt is the ammo entity that can be picked up when carrying this gun.
-- SWEP.AmmoEnt = "item_ammo_smg1_ttt"
-- CanBuy is a table of ROLE_* entries like ROLE_TRAITOR and ROLE_DETECTIVE. If
-- a role is in this table, those players can buy this.
SWEP.CanBuy = nil
-- InLoadoutFor is a table of ROLE_* entries that specifies which roles should
-- receive this weapon as soon as the round starts. In this case, none.
SWEP.InLoadoutFor = nil
-- If LimitedStock is true, you can only buy one per round.
SWEP.LimitedStock = false
-- If AllowDrop is false, players can't manually drop the gun with Q
SWEP.AllowDrop = true
-- If IsSilent is true, victims will not scream upon death.
SWEP.IsSilent = true
-- If NoSights is true, the weapon won't have ironsights
SWEP.NoSights = true
-- This sets the icon shown for the weapon in the DNA sampler, search window,
-- equipment menu (if buyable), etc.
SWEP.Icon = "vgui/entities/csgo_huntsman_bright_water.vmt"
| {
"content_hash": "315b4f97a973cbacceca7142d4a99eb9",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 122,
"avg_line_length": 31.346153846153847,
"alnum_prop": 0.7014314928425358,
"repo_name": "xDShot/csgo_knives_sweps",
"id": "ecfaa72646f5c23258da7fbacec3041e56ab5eea",
"size": "2445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lua/weapons/csgo_huntsman_bright_water.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "599773"
},
{
"name": "Python",
"bytes": "3019"
}
],
"symlink_target": ""
} |
export interface Offset {
top?: number;
left?: number;
}
export interface Options {
zIndex?: number;
class?: string;
offset?: Offset;
}
export interface ContentGetter<T> extends Function {
(data: T): string;
}
export interface Tip<T> {
element(element: d3.Selection<any>): Tip<T>;
show(data: T): Tip<T>;
update(data: T): Tip<T>;
hide(): Tip<T>;
remove(): void;
}
export interface TipFactory<T> extends Function {
(): Tip<T>;
}
export default function d3scription<T>(contentGetter: ContentGetter<T>, options?: Options): TipFactory<T>;
| {
"content_hash": "8f3650d4db13fbdaf4a3d7be81efb4ad",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 106,
"avg_line_length": 25.304347826086957,
"alnum_prop": 0.6512027491408935,
"repo_name": "GlobalWebIndex/d3scription",
"id": "46209bd2d99f34824f4fb94281a8ec9d924cf62d",
"size": "617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.d.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "181"
},
{
"name": "JavaScript",
"bytes": "516"
},
{
"name": "TypeScript",
"bytes": "3812"
}
],
"symlink_target": ""
} |
<?php
// Get the PHP helper library from twilio.com/docs/php/install
require_once '/path/to/vendor/autoload.php'; // Loads the library
use Twilio\Rest\Client;
// Your Account Sid and Auth Token from twilio.com/user/account
$sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
$token = "your_auth_token";
$client = new Client($sid, $token);
$client->sip
->ipAccessControlLists("AL32a3c49700934481addd5ce1659f04d2")
->delete();
| {
"content_hash": "516d684295951f226b288c4fdbff126e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 65,
"avg_line_length": 32.92307692307692,
"alnum_prop": 0.7406542056074766,
"repo_name": "teoreteetik/api-snippets",
"id": "38542705958a3aa34d4a093a56aa4bfa74e7d296",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest/sip-in/delete-ip-acl/delete-ip-acl.5.x.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "643369"
},
{
"name": "HTML",
"bytes": "335"
},
{
"name": "Java",
"bytes": "943336"
},
{
"name": "JavaScript",
"bytes": "539577"
},
{
"name": "M",
"bytes": "117"
},
{
"name": "Mathematica",
"bytes": "93"
},
{
"name": "Objective-C",
"bytes": "46198"
},
{
"name": "PHP",
"bytes": "538312"
},
{
"name": "Python",
"bytes": "467248"
},
{
"name": "Ruby",
"bytes": "470316"
},
{
"name": "Shell",
"bytes": "1564"
},
{
"name": "Swift",
"bytes": "36563"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.