repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
fanscribed/fanscribed | fanscribed/apps/transcripts/models.py | import logging
log = logging.getLogger(__name__)
import datetime
from decimal import Decimal
import re
import unicodedata
from allauth.account.signals import user_signed_up
from django.conf import settings
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.text import slugify
from django.utils.timezone import utc
from django_fsm.db.fields import FSMField, transition
from django_fsm.signals import pre_transition, post_transition
from model_utils.models import TimeStampedModel
from django_redis import get_redis_connection
from waffle import flag_is_active
from ... import locks
# ================================================================
# TRANSCRIPTS
# ================================================================
class SentenceManager(models.Manager):
use_for_related_fields = True
def empty(self):
return self.filter(state='empty')
def partial(self):
return self.filter(state='partial')
def completed(self):
return self.filter(state='completed')
def clean_edited(self):
return self.filter(clean_state='edited')
def clean_reviewed(self):
return self.filter(clean_state='reviewed')
def boundary_edited(self):
return self.filter(boundary_state='edited')
def boundary_reviewed(self):
return self.filter(boundary_state='reviewed')
def speaker_edited(self):
return self.filter(speaker_state='edited')
def speaker_reviewed(self):
return self.filter(speaker_state='reviewed')
class Sentence(models.Model):
"""A sentence made from sentence fragments.
state
-----
@startuml
[*] --> empty
empty --> partial
partial --> completed
completed --> [*]
@enduml
clean_state, boundary_state, speaker_state
------------------------------------------
These are all unprotected fields, and have no transition methods.
We trust ourselves to change them appropriately instead.
@startuml
[*] --> untouched
untouched --> editing
editing --> untouched
editing --> edited
edited --> reviewing
reviewing --> edited
reviewing --> reviewed
@enduml
"""
transcript = models.ForeignKey('Transcript', related_name='sentences')
state = FSMField(default='empty', protected=True)
clean_state = FSMField(default='untouched') # Not protected.
clean_lock_id = models.CharField(max_length=32, blank=True, null=True)
clean_last_editor = models.ForeignKey('auth.User', blank=True, null=True, related_name='+')
boundary_state = FSMField(default='untouched') # Not protected.
boundary_lock_id = models.CharField(max_length=32, blank=True, null=True)
boundary_last_editor = models.ForeignKey('auth.User', blank=True, null=True, related_name='+')
speaker_state = FSMField(default='untouched') # Not protected.
speaker_lock_id = models.CharField(max_length=32, blank=True, null=True)
speaker_last_editor = models.ForeignKey('auth.User', blank=True, null=True, related_name='+')
fragments = models.ManyToManyField(
'SentenceFragment', related_name='sentences')
fragment_candidates = models.ManyToManyField(
'SentenceFragment', related_name='candidate_sentences')
tf_start = models.ForeignKey('TranscriptFragment')
tf_sequence = models.PositiveIntegerField()
latest_text = models.TextField(blank=True, null=True)
latest_start = models.DecimalField(max_digits=8, decimal_places=2,
blank=True, null=True)
latest_end = models.DecimalField(max_digits=8, decimal_places=2,
blank=True, null=True)
latest_speaker = models.ForeignKey('Speaker', blank=True, null=True)
class Meta:
ordering = ('tf_start__start', 'tf_sequence')
objects = SentenceManager()
def __unicode__(self):
return u'{self.state} sentence'.format(**locals())
@property
def text(self):
return u' '.join(fragment.text for fragment in self.fragments.all())
@property
def candidate_text(self):
return u' '.join(
fragment.text for fragment in self.fragment_candidates.all())
# --
@transition(state, ['empty', 'partial'], 'partial', save=True)
def add_candidates(self, *fragments):
self.fragment_candidates.add(*fragments)
@transition(state, 'partial', 'partial', save=True)
def remove_candidates(self, *fragments):
self.fragment_candidates.remove(*fragments)
@transition(state, 'partial', 'partial', save=True)
def commit_candidates(self, *candidates):
self.fragments.add(*candidates)
self.fragment_candidates.remove(*candidates)
@transition(state, 'partial', 'completed', save=True)
def complete(self):
# Set initial latest text and (latest_start, latest_end).
self.revisions.create(
sequence=1,
text=self.text,
)
starts = set()
ends = set()
for fragment in self.fragments.all():
transcript_fragment = fragment.revision.fragment
starts.add(transcript_fragment.start)
ends.add(transcript_fragment.end)
self.latest_start = min(starts)
self.latest_end = max(ends)
# --
@property
def _clean_lockname(self):
return 'lock:sc:{self.id}'.format(**locals())
@property
def _boundary_lockname(self):
return 'lock:sb:{self.id}'.format(**locals())
@property
def _speaker_lockname(self):
return 'lock:ss:{self.id}'.format(**locals())
def lock_clean(self):
locks.acquire_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._clean_lockname,
lockid_field='clean_lock_id',
)
def unlock_clean(self):
locks.release_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._clean_lockname,
lockid_field='clean_lock_id',
)
def lock_boundary(self):
locks.acquire_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._boundary_lockname,
lockid_field='boundary_lock_id',
)
def unlock_boundary(self):
locks.release_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._boundary_lockname,
lockid_field='boundary_lock_id',
)
def lock_speaker(self):
locks.acquire_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._speaker_lockname,
lockid_field='speaker_lock_id',
)
def unlock_speaker(self):
locks.release_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._speaker_lockname,
lockid_field='speaker_lock_id',
)
# ---------------------
class SentenceFragment(models.Model):
"""A sentence fragment from within a transcript fragment."""
revision = models.ForeignKey('TranscriptFragmentRevision',
related_name='sentence_fragments')
sequence = models.PositiveIntegerField()
text = models.TextField()
class Meta:
ordering = ('revision__fragment__start', 'sequence')
unique_together = [
('revision', 'sequence'),
]
# ---------------------
class SentenceRevision(models.Model):
"""A full-text revision of a sentence."""
sentence = models.ForeignKey('Sentence', related_name='revisions')
sequence = models.PositiveIntegerField()
editor = models.ForeignKey('auth.User', blank=True, null=True)
text = models.TextField()
class Meta:
ordering = ('sequence',)
get_latest_by = 'sequence'
unique_together = [
('sentence', 'sequence'),
]
@receiver(post_save, sender=SentenceRevision)
def update_sentence_latest_text(instance, created, raw, **kwargs):
if created and not raw:
sentence = instance.sentence
sentence.latest_text = instance.text
sentence.save()
# ---------------------
class SentenceBoundary(models.Model):
"""A precise start/end boundary of a sentence."""
sentence = models.ForeignKey('Sentence', related_name='boundaries')
sequence = models.PositiveIntegerField()
editor = models.ForeignKey('auth.User')
start = models.DecimalField(max_digits=8, decimal_places=2)
end = models.DecimalField(max_digits=8, decimal_places=2)
class Meta:
ordering = ('sequence',)
get_latest_by = 'sequence'
unique_together = [
('sentence', 'sequence'),
]
@receiver(post_save, sender=SentenceBoundary)
def update_sentence_latest_boundary(instance, created, raw, **kwargs):
if created and not raw:
sentence = instance.sentence
sentence.latest_start = instance.start
sentence.latest_end = instance.end
sentence.save()
# ---------------------
class Speaker(models.Model):
"""A unique speaker in the transcript."""
transcript = models.ForeignKey('Transcript', related_name='speakers')
name = models.CharField(max_length=100)
class Meta:
unique_together = [
('transcript', 'name'),
]
def __unicode__(self):
return self.name
# ---------------------
class TranscriptManager(models.Manager):
use_for_related_fields = True
def unfinished(self):
return self.filter(state='unfinished')
def finished(self):
return self.filter(state='finished')
def without_known_length(self):
return self.filter(length_state='unset')
def with_known_length(self):
return self.filter(length_state='set')
class Transcript(TimeStampedModel):
"""A transcript of audio or video to text.
state
-----
@startuml
[*] --> unfinished
unfinished --> finished
finished --> [*]
@enduml
length_state
------------
@startuml
[*] --> unset
unset --> set
set --> [*]
@enduml
"""
title = models.CharField(max_length=512)
state = FSMField(default='unfinished', protected=True)
length = models.DecimalField(max_digits=8, decimal_places=2, null=True)
length_state = FSMField(default='unset', protected=True)
created_by = models.ForeignKey('auth.User', blank=True, null=True)
contributors = models.ManyToManyField(
'auth.User', related_name='contributed_to_transcripts')
objects = TranscriptManager()
class Meta:
get_latest_by = 'created'
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse(
'transcripts:detail_slug',
kwargs=dict(pk=self.pk, slug=slugify(self.title)))
def _all_tasks_complete(self):
return all(v == 100 for v in self.stats.values())
@transition(state, 'unfinished', 'finished', save=True,
conditions=[_all_tasks_complete])
def finish(self):
pass
@transition(length_state, 'unset', 'set', save=True)
def set_length(self, length):
self.length = Decimal(length)
self._create_fragments()
def _create_fragments(self):
start = Decimal('0')
previous = None
while start < self.length:
# Find the end of the current fragment.
# If remaining time is less than fragment length, stretch to end.
end = start + settings.TRANSCRIPT_FRAGMENT_LENGTH
remaining = self.length - end
if remaining < settings.TRANSCRIPT_FRAGMENT_LENGTH:
end = self.length
current = self.fragments.create(
start=start,
end=end,
# # First and last fragments are 'stitched' to each end. :)
# stitched_left=True if start == Decimal('0') else False,
# stitched_right=True if end == self.length else False,
)
if previous is not None:
self.stitches.create(
left=previous,
right=current,
)
start = end
previous = current
@property
def completed_sentences(self):
return self.sentences.filter(state='completed').order_by('latest_start')
@property
def processed_media_url(self):
"""Returns the URL for the transcript's full-length processed audio,
or None if no such audio exists for the transcript."""
try:
return self.media.get(is_processed=True, is_full_length=True).file.url
except TranscriptMedia.DoesNotExist:
return None
@property
def stats(self):
"""Return a dictionary with a percentage of completion of each phase."""
stats = {}
fragments_count = self.fragments.count()
if fragments_count == 0:
stats.update(transcribe=0)
else:
stats['transcribe'] = (
(self.fragments.transcribed().count() + self.fragments.reviewed().count() * 2) * 100
/
(self.fragments.count() * 2)
)
stitches_count = self.stitches.count()
if stitches_count == 0:
stats.update(stitch=0)
else:
stats['stitch'] = (
(self.stitches.stitched().count() + self.stitches.reviewed().count() * 2) * 100
/
(self.stitches.count() * 2)
)
sentence_count = self.sentences.count()
if sentence_count == 0:
stats.update(clean=0, boundary=0, speaker=0)
else:
stitch_factor = stats['stitch'] * 0.01
stats['clean'] = (
(self.sentences.clean_edited().count() + self.sentences.clean_reviewed().count() * 2) * 100 * stitch_factor
/
(sentence_count * 2)
)
stats['boundary'] = (
(self.sentences.boundary_edited().count() + self.sentences.boundary_reviewed().count() * 2) * 100 * stitch_factor
/
(sentence_count * 2)
)
stats['speaker'] = (
(self.sentences.speaker_edited().count() + self.sentences.speaker_reviewed().count() * 2) * 100 * stitch_factor
/
(sentence_count * 2)
)
return stats
# ---------------------
class TranscriptFragmentManager(models.Manager):
use_for_related_fields = True
def empty(self):
return self.filter(state='empty')
def transcribed(self):
return self.filter(state='transcribed')
def reviewed(self):
return self.filter(state='reviewed')
def locked(self):
return self.filter(lock_state='locked')
def unlocked(self):
return self.filter(lock_state='unlocked')
class TranscriptFragment(models.Model):
"""A fragment of a transcript defined by its time span.
state
-----
@startuml
[*] --> empty
empty --> transcribed
transcribed --> reviewed
reviewed --> [*]
@enduml
lock_state
------------
@startuml
[*] --> unlocked
unlocked --> locked
locked --> unlocked
@enduml
"""
transcript = models.ForeignKey('Transcript', related_name='fragments')
start = models.DecimalField(max_digits=8, decimal_places=2)
end = models.DecimalField(max_digits=8, decimal_places=2)
state = FSMField(default='empty', protected=True)
lock_state = FSMField(default='unlocked', protected=True)
lock_id = models.CharField(max_length=32, blank=True, null=True)
last_editor = models.ForeignKey('auth.User', blank=True, null=True, related_name='+')
objects = TranscriptFragmentManager()
class Meta:
ordering = ('start',)
unique_together = [
('transcript', 'start', 'end'),
]
def stitched_both_sides(self):
return self.stitched_left and self.stitched_right
@property
def _lockname(self):
return 'lock:tf:{self.id}'.format(**locals())
@transition(lock_state, 'unlocked', 'locked', save=True)
def lock(self):
locks.acquire_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._lockname,
lockid_field='lock_id',
)
@transition(lock_state, 'locked', 'unlocked', save=True)
def unlock(self):
locks.release_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._lockname,
lockid_field='lock_id',
)
@transition(state, 'empty', 'transcribed', save=True)
def transcribe(self):
pass
@transition(state, 'transcribed', 'reviewed', save=True)
def review(self):
# Ready related stitches if other fragments are transcribed.
if self.start != Decimal(0):
L = self.stitch_at_left
if L.left.state == 'reviewed':
L.ready()
if self.end != self.transcript.length:
R = self.stitch_at_right
if R.right.state == 'reviewed':
R.ready()
# ---------------------
class TranscriptStitchManager(models.Manager):
def notready(self):
return self.filter(state='notready')
def unstitched(self):
return self.filter(state='unstitched')
def stitched(self):
return self.filter(state='stitched')
def reviewed(self):
return self.filter(state='reviewed')
def locked(self):
return self.filter(lock_state='locked')
def unlocked(self):
return self.filter(lock_state='unlocked')
class TranscriptStitch(models.Model):
"""A stitch between two fragments of a transcript.
state
-----
@startuml
[*] --> notready
notready --> unstitched
unstitched --> stitched
stitched --> reviewed
reviewed --> [*]
@enduml
lock_state
------------
@startuml
[*] --> unlocked
unlocked --> locked
locked --> unlocked
@enduml
"""
transcript = models.ForeignKey('Transcript', related_name='stitches')
left = models.OneToOneField('TranscriptFragment', related_name='stitch_at_right')
right = models.OneToOneField('TranscriptFragment', related_name='stitch_at_left')
state = FSMField(default='notready', protected=True)
lock_state = FSMField(default='unlocked', protected=True)
lock_id = models.CharField(max_length=32, blank=True, null=True)
last_editor = models.ForeignKey('auth.User', blank=True, null=True, related_name='+')
objects = TranscriptStitchManager()
class Meta:
ordering = ('left__start',)
unique_together = [
('transcript', 'left'),
]
@property
def _lockname(self):
return 'lock:ts:{self.id}'.format(**locals())
@transition(lock_state, 'unlocked', 'locked', save=True)
def lock(self):
locks.acquire_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._lockname,
lockid_field='lock_id',
)
@transition(lock_state, 'locked', 'unlocked', save=True)
def unlock(self):
locks.release_model_lock(
conn=get_redis_connection('default'),
instance=self,
lockname=self._lockname,
lockid_field='lock_id',
)
@transition(state, 'notready', 'unstitched', save=True)
def ready(self):
pass
@transition(state, 'unstitched', 'stitched', save=True)
def stitch(self):
self._merge_sentences()
@transition(state, 'stitched', 'reviewed', save=True)
def review(self):
self._merge_sentences()
self._complete_sentences()
def _merge_sentences(self):
"""Merge overlapping Sentence instances."""
left_fragment_revision = self.left.revisions.latest()
right_fragment_revision = self.right.revisions.latest()
for revision in [left_fragment_revision, right_fragment_revision]:
deletion_candidates = []
for sf in revision.sentence_fragments.all():
sentences = sf.sentences
candidate_sentences = sf.candidate_sentences
# If fragment is in more than one sentence,
# pick the first sentence as the survivor.
if sentences.count() > 1:
survivor = sentences.first()
elif candidate_sentences.count() > 1:
survivor = candidate_sentences.first()
else:
survivor = None
if survivor is not None:
# Merge remaining sentences with survivor.
def merge(s, o):
s.fragments.add(*o.fragments.all())
s.fragment_candidates.add(
*o.fragment_candidates.all())
o.delete()
for other in sentences.all():
if other != survivor:
merge(survivor, other)
for other in candidate_sentences.all():
if other != survivor:
merge(survivor, other)
else:
# No survivor means there was only one sentence involved.
pass
def _complete_sentences(self):
"""Complete sentences in this stitch (when they are ready)."""
left_fragment_revision = self.left.revisions.latest()
right_fragment_revision = self.right.revisions.latest()
# Look for partial sentences in these revisions and complete them.
revisions_to_complete = [
left_fragment_revision,
right_fragment_revision,
]
# Also look for partial sentences in adjacent reviewed stitches.
if self.left.start != Decimal(0):
stitch_at_left = TranscriptStitch.objects.get(right=self.left)
if stitch_at_left.state == 'reviewed':
revisions_to_complete.append(stitch_at_left.left.revisions.latest())
if self.right.end != self.transcript.length:
stitch_at_right = TranscriptStitch.objects.get(left=self.right)
if stitch_at_right.state == 'reviewed':
revisions_to_complete.append(stitch_at_right.right.revisions.latest())
# Complete sentences.
sentences_checked = set()
for revision in revisions_to_complete:
for candidate_sf in revision.sentence_fragments.all():
for sentence in candidate_sf.sentences.filter(state='partial'):
if sentence.id in sentences_checked:
# Already checked this sentence ID.
continue
sentences_checked.add(sentence.id)
# Check partial sentence.
if sentence.fragment_candidates.count() > 0:
# The sentence is still being worked on.
continue
else:
# Complete the sentence if all related stitches
# are reviewed, AND adjacent stitches are reviewed.
for other_sf in sentence.fragments.all():
if True or other_sf != candidate_sf:
other_tf = other_sf.revision.fragment
must_be_reviewed = set()
# Find each stitch related to the sentence
# fragment, and its neighbor.
try:
stitch_at_left = other_tf.stitch_at_left
must_be_reviewed.add(stitch_at_left)
left_of_left = TranscriptStitch.objects.get(
right=stitch_at_left.left)
must_be_reviewed.add(left_of_left)
except TranscriptStitch.DoesNotExist:
pass
try:
stitch_at_right = other_tf.stitch_at_right
must_be_reviewed.add(stitch_at_right)
right_of_right = TranscriptStitch.objects.get(
left=stitch_at_right.right)
must_be_reviewed.add(right_of_right)
except TranscriptStitch.DoesNotExist:
pass
# Ignore the stitch currently being reviewed.
if self in must_be_reviewed:
must_be_reviewed.remove(self)
states = [s.state for s in must_be_reviewed]
if any(state != 'reviewed' for state in states):
# Not completing.
break
else:
# May complete.
pass
else:
# All stitches involved in the sentence
# are reviewed.
sentence.complete()
# ---------------------
class TranscriptFragmentRevision(TimeStampedModel):
"""A revision of a TranscriptFragment."""
fragment = models.ForeignKey('TranscriptFragment', related_name='revisions')
sequence = models.PositiveIntegerField()
editor = models.ForeignKey('auth.User')
class Meta:
get_latest_by = 'sequence'
ordering = ('sequence',)
unique_together = [
('fragment', 'sequence'),
]
@property
def text(self):
return '\n\n'.join(
sf.text for sf in self.sentence_fragments.all())
# ================================================================
# MEDIA
# ================================================================
class TranscriptMedia(TimeStampedModel):
"""
@startuml
[*] --> empty
empty --> creating
creating --> ready
ready --> deleted
deleted --> creating
@enduml
"""
transcript = models.ForeignKey('transcripts.Transcript', related_name='media')
file = models.FileField(upload_to='transcripts', max_length=1024)
state = FSMField(default='empty', protected=True)
is_processed = models.BooleanField(help_text='Is it processed media?')
is_full_length = models.BooleanField(
help_text='Is it the full length of media to be transcribed?')
start = models.DecimalField(max_digits=8, decimal_places=2, null=True)
end = models.DecimalField(max_digits=8, decimal_places=2, null=True)
download_count = models.PositiveIntegerField(default=0)
class Meta:
unique_together = (
'transcript',
'is_processed',
'is_full_length',
'start',
'end',
)
def __unicode__(self):
if self.is_processed:
return u'Processed media for {self.transcript}'.format(**locals())
else:
return u'Raw media for {self.transcript}'.format(**locals())
def create_processed_task(self):
"""Create a processed TranscriptMedia based on this one.
Assumes that this one is raw and full-length.
"""
from .tasks import create_processed_transcript_media
return create_processed_transcript_media.delay(self.pk)
def create_file_task(self):
"""Create a file for this TranscriptMedia."""
from .tasks import create_transcript_media_file
return create_transcript_media_file.delay(self.pk)
def record_download(self):
self.download_count += 1
self.save()
@transition(state, ['empty', 'deleted'], 'creating', save=True)
def create_file(self):
pass
def has_file(self):
return bool(self.file)
@transition(state, 'creating', 'ready', save=True, conditions=[has_file])
def finish(self):
pass
@transition(state, 'ready', 'deleted', save=True)
def delete_file(self):
self.file.delete()
# ================================================================
# TASKS
# ================================================================
def existing_transcript_task(transcript, user):
"""Check for existing tasks in this transcript."""
for task_class in TASK_MODEL.values():
existing_tasks = task_class.objects.filter(
transcript=transcript,
assignee=user,
state='presented',
)
if existing_tasks:
return existing_tasks[0]
def assign_next_transcript_task(transcript, user, requested_task_type, request=None):
"""Try to create the next available task of the requested type."""
# Determine which order to search for available tasks.
if requested_task_type == 'any_sequential':
# Sequential moves through the pipeline in one stage at a time.
L = [
# (task_type, is_review),
('transcribe', False),
('transcribe', True),
('stitch', False),
('stitch', True),
('boundary', False),
('boundary', True),
('clean', False),
('clean', True),
('speaker', False),
('speaker', True),
]
elif requested_task_type == 'any_eager':
# Eager switches you to a new task type as soon as one is available.
L = [
# (task_type, is_review),
('boundary', True),
('clean', True),
('speaker', True),
('boundary', False),
('clean', False),
('speaker', False),
('stitch', True),
('stitch', False),
('transcribe', True),
('transcribe', False),
]
else:
# An individual task type was selected.
if requested_task_type.endswith('_review'):
# Remove '_review' from requested task type.
requested_task_type = requested_task_type.split('_review')[0]
is_review = True
else:
# No changes to requested task type.
is_review = False
L = [(requested_task_type, is_review)]
preferred_task_types = user.profile.preferred_task_names
for task_type, is_review in L:
# Does the user want to perform this kind of task?
if task_type not in preferred_task_types:
continue
if is_review and not user.profile.wants_reviews:
continue
# Does the user have permission to perform the task?
perm_name = 'transcripts.add_{}task{}'.format(
task_type,
'_review' if is_review else '',
)
if not user.has_perm(perm_name):
continue
# Permission granted; try to create this type of task.
tasks = TASK_MODEL[task_type].objects
if tasks.can_create(user, transcript, is_review, request):
# Try to get this kind of task,
# ignoring lock failures up to 5 times.
for x in xrange(5):
try:
task = tasks.create_next(user, transcript, is_review, request)
except locks.LockException:
# Try again.
continue
else:
if task is not None:
task.present()
return task
def request_bypasses_teamwork(request):
return (request is not None
and request.user.is_superuser
and flag_is_active(request, 'bypass_teamwork'))
class TaskManager(models.Manager):
use_for_related_fields = True
def presented(self):
return self.filter(state='presented')
def valid(self):
return self.filter(state='valid')
def invalid(self):
return self.filter(state='invalid')
def can_create(self, user, transcript, is_review, request=None):
"""Can we create a new task?
:ptype user: django.contrib.auth.models.User
:ptype transcript: Transcript
:ptype review: bool
"""
return False
def create_next(self, user, transcript, is_review, request=None):
"""Create and return the next new task.
:ptype user: django.contrib.auth.models.User
:ptype transcript: Transcript
:ptype review: bool
"""
raise Task.DoesNotExist()
class Task(TimeStampedModel):
"""A transcription task to be completed.
state
-----
@startuml
[*] --> preparing
preparing --> ready
ready --> assigned
assigned --> presented
presented --> submitted
submitted --> valid
valid --> [*]
submitted --> invalid
invalid --> [*]
presented --> expired
assigned --> expired
expired --> [*]
presented --> canceled
canceled --> [*]
@enduml
"""
transcript = models.ForeignKey('Transcript')
is_review = models.BooleanField()
state = FSMField(default='preparing', protected=True)
assignee = models.ForeignKey('auth.User', blank=True, null=True)
media = models.ForeignKey('TranscriptMedia', blank=True, null=True)
presented_at = models.DateTimeField(blank=True, null=True)
validated_at = models.DateTimeField(blank=True, null=True)
class Meta:
abstract = True
objects = TaskManager()
def get_absolute_url(self):
return reverse('transcripts:task_perform', kwargs=dict(
transcript_pk=self.transcript.pk,
type=self.TASK_TYPE,
pk=self.pk,
))
def finish_transcript_if_all_tasks_complete(self):
if self.transcript._all_tasks_complete():
self.transcript.finish()
def lock(self):
raise NotImplementedError()
@transition(state, 'preparing', 'ready', save=True)
def prepare(self):
pass
@transition(state, 'ready', 'assigned', save=True)
def assign_to(self, user):
self.assignee = user
self._assign_to()
def _assign_to(self):
raise NotImplementedError()
@transition(state, 'assigned', 'presented', save=True)
def present(self):
pass
@transition(state, 'presented', 'submitted', save=True)
def submit(self):
if not settings.TESTING:
self._submit()
def _submit(self):
raise NotImplementedError()
@transition(state, 'submitted', 'valid', save=True)
def validate(self):
self._validate()
self.transcript.contributors.add(self.assignee)
def _validate(self):
raise NotImplementedError()
@transition(state, 'submitted', 'invalid', save=True)
def invalidate(self):
self._invalidate()
@transition(state, ['assigned', 'presented'], 'expired', save=True)
def expire(self):
self._invalidate()
@transition(state, 'presented', 'canceled', save=True)
def cancel(self):
self._invalidate()
def _invalidate(self):
raise NotImplementedError()
# ---------------------
class TranscribeTaskManager(TaskManager):
def _available_fragments(self, user, transcript, is_review, request=None):
if not is_review:
fragments = transcript.fragments.filter(
state='empty',
lock_state='unlocked',
)
else:
fragments = transcript.fragments.filter(
state='transcribed',
lock_state='unlocked',
)
if settings.TRANSCRIPTS_REQUIRE_TEAMWORK and not request_bypasses_teamwork(request):
fragments = fragments.exclude(last_editor=user)
return fragments
def can_create(self, user, transcript, is_review, request=None):
return bool(self._available_fragments(user, transcript, is_review, request).count())
def create_next(self, user, transcript, is_review, request=None):
fragment = self._available_fragments(user, transcript, is_review, request).first()
if fragment is None:
return None
# Apply overlap.
start = fragment.start - settings.TRANSCRIPT_FRAGMENT_OVERLAP
end = fragment.end + settings.TRANSCRIPT_FRAGMENT_OVERLAP
# Correct for out of bounds.
start = max(Decimal('0.00'), start)
end = min(transcript.length, end)
task = transcript.transcribetask_set.create(
is_review=is_review,
media=None,
fragment=fragment,
start=fragment.start,
end=fragment.end,
)
try:
task.lock()
except locks.LockException:
task.delete()
raise
if not is_review:
next = fragment.revisions.create(sequence=1, editor=user)
text = ''
else:
latest = fragment.revisions.latest()
text = latest.text
next = fragment.revisions.create(sequence=latest.sequence + 1,
editor=user)
media, created = transcript.media.get_or_create(
is_processed=True,
is_full_length=False,
start=start,
end=end,
)
task.media = media
task.revision = next
task.text = text
task.prepare()
task.assign_to(user)
return task
class TranscribeTask(Task):
TASK_TYPE = 'transcribe'
fragment = models.ForeignKey('TranscriptFragment', blank=True, null=True)
revision = models.ForeignKey('TranscriptFragmentRevision',
blank=True, null=True)
text = models.TextField(blank=True, null=True)
# Keep start and end even if `revision` goes away.
start = models.DecimalField(max_digits=8, decimal_places=2)
end = models.DecimalField(max_digits=8, decimal_places=2)
objects = TranscribeTaskManager()
class Meta:
ordering = ('-created',)
permissions = (
('add_transcribetask_review', 'Can add review transcribe task'),
)
def lock(self):
self.fragment.lock()
def _assign_to(self):
pass
def _submit(self):
from .tasks import process_transcribe_task
result = process_transcribe_task.delay(self.pk)
def _validate(self):
self.fragment.last_editor = self.assignee
self.fragment.unlock()
def _invalidate(self):
self.revision.delete()
self.revision = None
self.fragment.unlock()
# ---------------------
class StitchTaskManager(TaskManager):
def _available_stitches(self, user, transcript, is_review, request=None):
if not is_review:
stitches = transcript.stitches.filter(
state='unstitched',
lock_state='unlocked',
)
else:
stitches = transcript.stitches.filter(
state='stitched',
lock_state='unlocked',
)
if settings.TRANSCRIPTS_REQUIRE_TEAMWORK and not request_bypasses_teamwork(request):
stitches = stitches.exclude(last_editor=user)
return stitches
def can_create(self, user, transcript, is_review, request=None):
return bool(self._available_stitches(user, transcript, is_review, request).count())
def create_next(self, user, transcript, is_review, request=None):
stitch = self._available_stitches(user, transcript, is_review, request).first()
if not stitch:
return None
# Apply overlap.
start = stitch.left.start - settings.TRANSCRIPT_FRAGMENT_OVERLAP
end = stitch.right.end + settings.TRANSCRIPT_FRAGMENT_OVERLAP
# Correct for out of bounds.
start = max(Decimal('0.00'), start)
end = min(transcript.length, end)
media, created = transcript.media.get_or_create(
is_processed=True,
is_full_length=False,
start=start,
end=end,
)
task = transcript.stitchtask_set.create(
is_review=is_review,
media=media,
stitch=stitch,
)
try:
task.lock()
except locks.LockException:
task.delete()
raise
if is_review:
task.create_pairings_from_prior_task()
task.prepare()
task.assign_to(user)
return task
class StitchTask(Task):
TASK_TYPE = 'stitch'
stitch = models.ForeignKey('TranscriptStitch', related_name='+')
objects = StitchTaskManager()
class Meta:
get_latest_by = 'created'
ordering = ('-created',)
permissions = (
('add_stitchtask_review', 'Can add review stitch task'),
)
def lock(self):
self.stitch.lock()
def _assign_to(self):
pass
def _submit(self):
from .tasks import process_stitch_task
process_stitch_task.delay(self.pk)
def _validate(self):
self.stitch.last_editor = self.assignee
self.stitch.unlock()
def _invalidate(self):
self.stitch.unlock()
def create_pairings_from_prior_task(self):
# Create StitchTaskPairings based on previous completed task.
previous_completed_task = StitchTask.objects.filter(
state='valid',
stitch=self.stitch,
).latest()
for previous_pairing in previous_completed_task.pairings.all():
self.pairings.get_or_create(
left=previous_pairing.left,
right=previous_pairing.right,
)
def suggested_pairs(self):
"""Return a list of suggested (left, right) sentence fragment pairs."""
suggestions = [
# (left_sentence_fragment_id, right_sentence_fragment_id),
]
stitch = self.stitch
left_sentence_fragments = stitch.left.revisions.latest().sentence_fragments.all()
right_sentence_fragments = stitch.right.revisions.latest().sentence_fragments.all()
def normify(text):
text = unicodedata.normalize('NFKD', text)
text = re.sub(ur'[^\w\s:\)-]', u'', text).strip().lower()
return re.sub(ur'[ \s]+', u' ', text)
for left_sf in left_sentence_fragments:
left_text = left_sf.text
left_norm = normify(left_text)
left_words = left_norm.split(' ')
for right_sf in right_sentence_fragments:
right_text = right_sf.text
if left_text.startswith('[m]') and right_text.startswith('[m]'):
# Both start with music; suggest.
suggestions.append((left_sf.id, right_sf.id))
continue
right_norm = normify(right_text)
right_words = right_norm.split(' ')
for i in range(len(left_words)):
left_partial_norm = ' '.join(left_words[i:])
if right_norm.startswith(left_partial_norm):
# Potential overlap of text; suggest.
suggestions.append((left_sf.id, right_sf.id))
break
return suggestions
class StitchTaskPairing(models.Model):
task = models.ForeignKey('StitchTask', related_name='pairings')
left = models.ForeignKey('SentenceFragment', related_name='+')
right = models.ForeignKey('SentenceFragment', related_name='+')
class Meta:
ordering = ('left__revision__fragment__start', 'left__sequence')
unique_together = [
('task', 'left',),
]
def __unicode__(self):
return 'Pairing between "{self.left.text}" and "{self.right.text}"'.format(**locals())
# ---------------------
class CleanTaskManager(TaskManager):
def _available_sentences(self, user, transcript, is_review, request=None):
if not is_review:
sentences = transcript.sentences.filter(
state='completed',
clean_state='untouched',
)
else:
sentences = transcript.sentences.filter(
state='completed',
clean_state='edited',
)
if settings.TRANSCRIPTS_REQUIRE_TEAMWORK and not request_bypasses_teamwork(request):
sentences = sentences.exclude(clean_last_editor=user)
return sentences
def can_create(self, user, transcript, is_review, request=None):
return bool(self._available_sentences(user, transcript, is_review, request).count())
def create_next(self, user, transcript, is_review, request=None):
sentence = self._available_sentences(user, transcript, is_review, request).first()
if sentence is None:
return None
media, created = transcript.media.get_or_create(
is_processed=True,
is_full_length=False,
start=sentence.latest_start,
end=sentence.latest_end,
)
task = transcript.cleantask_set.create(
is_review=is_review,
media=media,
sentence=sentence,
text=sentence.latest_text,
)
try:
task.lock()
except locks.LockException:
task.delete()
raise
task.prepare()
task.assign_to(user)
return task
class CleanTask(Task):
TASK_TYPE = 'clean'
sentence = models.ForeignKey('Sentence')
text = models.TextField()
objects = CleanTaskManager()
class Meta:
ordering = ('-created',)
permissions = (
('add_cleantask_review', 'Can add review clean task'),
)
def lock(self):
self.sentence.lock_clean()
def _assign_to(self):
if not self.is_review:
self.sentence.clean_state = 'editing'
else:
self.sentence.clean_state = 'reviewing'
self.sentence.save()
def _submit(self):
from .tasks import process_clean_task
process_clean_task.delay(self.pk)
def _validate(self):
if not self.is_review:
self.sentence.clean_state = 'edited'
else:
latest, previous = self.sentence.revisions.order_by('-sequence')[:2]
if latest.text.strip() == previous.text.strip():
self.sentence.clean_state = 'reviewed'
else:
self.sentence.clean_state = 'edited'
self.sentence.clean_last_editor = self.assignee
self.sentence.unlock_clean()
self.sentence.save()
self.finish_transcript_if_all_tasks_complete()
def _invalidate(self):
if not self.is_review:
self.sentence.clean_state = 'untouched'
else:
self.sentence.clean_state = 'edited'
self.sentence.unlock_clean()
self.sentence.save()
# ---------------------
class BoundaryTaskManager(TaskManager):
def _available_sentences(self, user, transcript, is_review, request=None):
if not is_review:
sentences = transcript.sentences.filter(
state='completed',
boundary_state='untouched',
)
else:
sentences = transcript.sentences.filter(
state='completed',
boundary_state='edited',
)
if settings.TRANSCRIPTS_REQUIRE_TEAMWORK and not request_bypasses_teamwork(request):
sentences = sentences.exclude(boundary_last_editor=user)
return sentences
def can_create(self, user, transcript, is_review, request=None):
return bool(self._available_sentences(user, transcript, is_review, request).count())
def create_next(self, user, transcript, is_review, request=None):
sentence = self._available_sentences(user, transcript, is_review, request).first()
if sentence is None:
return None
media_start = sentence.fragments.first().revision.fragment.start - settings.TRANSCRIPT_FRAGMENT_OVERLAP
media_end = sentence.fragments.last().revision.fragment.end + settings.TRANSCRIPT_FRAGMENT_OVERLAP
media_start = max(Decimal(0), media_start)
media_end = min(transcript.length, media_end)
if not is_review:
# Apply overlap.
start = sentence.latest_start - settings.TRANSCRIPT_FRAGMENT_OVERLAP
end = sentence.latest_end + settings.TRANSCRIPT_FRAGMENT_OVERLAP
# Correct for out of bounds.
start = max(Decimal('0.00'), start)
end = min(transcript.length, end)
# Find a suggested sentence start:
# Look for the end of the most recently bounded sentence
# (ending within the maximum region of this sentence)
# to try to predict where this sentence will start.
bounded_sentences = transcript.sentences.completed().filter(
boundary_state__in=['edited', 'reviewed'],
latest_end__gt=media_start,
latest_end__lt=media_end,
)
if bounded_sentences.exists():
latest_bounded = bounded_sentences.order_by('-latest_end')[0]
# Suggest the end of the last sentence as the start of this one...
suggested_start = latest_bounded.latest_end
# ...but only if it comes after the default starting position...
suggested_start = max(suggested_start, start)
# ...and only if the calculated starting position comes before
# the default ending position.
if suggested_start > end:
suggested_start = start
start = suggested_start
else:
# Reviews pass through.
start = sentence.latest_start
end = sentence.latest_end
media, created = transcript.media.get_or_create(
is_processed=True,
is_full_length=False,
start=media_start,
end=media_end,
)
task = transcript.boundarytask_set.create(
is_review=is_review,
media=media,
sentence=sentence,
start=start,
end=end,
)
try:
task.lock()
except locks.LockException:
task.delete()
raise
task.prepare()
task.assign_to(user)
return task
class BoundaryTask(Task):
TASK_TYPE = 'boundary'
sentence = models.ForeignKey('Sentence')
start = models.DecimalField(max_digits=8, decimal_places=2)
end = models.DecimalField(max_digits=8, decimal_places=2)
objects = BoundaryTaskManager()
class Meta:
ordering = ('-created',)
permissions = (
('add_boundarytask_review', 'Can add review boundary task'),
)
def lock(self):
self.sentence.lock_boundary()
def _assign_to(self):
if not self.is_review:
self.sentence.boundary_state = 'editing'
else:
self.sentence.boundary_state = 'reviewing'
self.sentence.save()
def _submit(self):
# See below in `process_boundary_task_synchronously_on_submit`.
pass
def _validate(self):
if not self.is_review:
self.sentence.boundary_state = 'edited'
else:
latest, previous = self.sentence.boundaries.order_by('-sequence')[:2]
if (latest.start, latest.end) == (previous.start, previous.end):
self.sentence.boundary_state = 'reviewed'
else:
self.sentence.boundary_state = 'edited'
self.sentence.boundary_last_editor = self.assignee
self.sentence.unlock_boundary()
self.sentence.save()
self.finish_transcript_if_all_tasks_complete()
def _invalidate(self):
if not self.is_review:
self.sentence.boundary_state = 'untouched'
else:
self.sentence.boundary_state = 'edited'
self.sentence.unlock_boundary()
self.sentence.save()
# NOTE: Calls to _submit are normally processed as a celery task,
# but we are interested in getting very quick feedback for new tasks
# as far as predicting the next sentence start.
#
# Therefore, we are running it synchronously here:
#
@receiver(post_transition, sender=BoundaryTask)
def process_boundary_task_synchronously_on_submit(instance, target, **kwargs):
if target == 'submitted':
from .tasks import process_boundary_task
process_boundary_task(instance.pk)
# ---------------------
class SpeakerTaskManager(TaskManager):
def _available_sentences(self, user, transcript, is_review, request=None):
if not is_review:
sentences = transcript.sentences.filter(
state='completed',
speaker_state='untouched',
)
else:
sentences = transcript.sentences.filter(
state='completed',
speaker_state='edited',
)
if settings.TRANSCRIPTS_REQUIRE_TEAMWORK and not request_bypasses_teamwork(request):
sentences = sentences.exclude(speaker_last_editor=user)
return sentences
def can_create(self, user, transcript, is_review, request=None):
return bool(self._available_sentences(user, transcript, is_review, request).count())
def create_next(self, user, transcript, is_review, request=None):
sentence = self._available_sentences(user, transcript, is_review, request).first()
if sentence is None:
return None
start = sentence.latest_start
media, created = transcript.media.get_or_create(
is_processed=True,
is_full_length=False,
start=sentence.latest_start,
end=sentence.latest_end,
)
task = transcript.speakertask_set.create(
is_review=is_review,
media=media,
sentence=sentence,
speaker=sentence.latest_speaker,
)
try:
task.lock()
except locks.LockException:
task.delete()
raise
task.prepare()
task.assign_to(user)
return task
class SpeakerTask(Task):
TASK_TYPE = 'speaker'
sentence = models.ForeignKey('Sentence')
speaker = models.ForeignKey('Speaker', blank=True, null=True)
new_name = models.CharField(max_length=100, blank=True, null=True)
objects = SpeakerTaskManager()
class Meta:
ordering = ('-created',)
permissions = (
('add_speakertask_review', 'Can add review speaker task'),
)
def lock(self):
self.sentence.lock_speaker()
def _assign_to(self):
if not self.is_review:
self.sentence.speaker_state = 'editing'
else:
self.sentence.speaker_state = 'reviewing'
self.sentence.save()
def _submit(self):
from .tasks import process_speaker_task
process_speaker_task.delay(self.pk)
def _validate(self):
if not self.is_review:
self.sentence.speaker_state = 'edited'
else:
prior_task = self.sentence.speakertask_set.order_by('-created')[1]
if self.speaker == prior_task.speaker:
# No more changes; finished reviewing.
self.sentence.speaker_state = 'reviewed'
else:
# Speaker changed; need to review again.
self.sentence.speaker_state = 'edited'
self.sentence.speaker_last_editor = self.assignee
self.sentence.unlock_speaker()
self.sentence.save()
self.finish_transcript_if_all_tasks_complete()
def _invalidate(self):
if not self.is_review:
self.sentence.speaker_state = 'untouched'
else:
self.sentence.speaker_state = 'edited'
self.sentence.unlock_speaker()
self.sentence.save()
# ---------------------
# Mapping of task types to model classes
TASK_MODEL = {
# task_type: model_class,
'transcribe': TranscribeTask,
'stitch': StitchTask,
'clean': CleanTask,
'boundary': BoundaryTask,
'speaker': SpeakerTask,
}
def track_task_presentation_stats(instance, target, **kwargs):
utcnow = datetime.datetime.utcnow().replace(tzinfo=utc)
if target == 'presented':
instance.presented_at = utcnow
elif target == 'valid':
instance.validated_at = utcnow
for _ModelClass in TASK_MODEL.values():
receiver(pre_transition, sender=_ModelClass)(track_task_presentation_stats)
# ---------------------
@receiver(user_signed_up)
def add_to_workers_group(sender, request, user, **kwargs):
"""The `workers` group has permissions to perform many non-review tasks.
:ptype user: django.contrib.auth.models.User
"""
try:
group = Group.objects.get(name='workers')
except Group.DoesNotExist:
log.warning('"workers" group does not exist')
else:
user.groups.add(group)
|
inonitz/bruhOS | Font Shit/Consolas12_bmp.c | <reponame>inonitz/bruhOS
//
// Bitmap font C source generated by bmfont2c.py
//
#include <stdint.h>
#include "Consolas12_bmp.h"
static char_t const Consolas12_Bitmaps[1330] =
{
// ASCII: 32, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 33, char width: 6
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x30, // --OO--..
0x30, // --OO--..
0x30, // --OO--..
0x10, // ---O--..
0x10, // ---O--..
0x00, // ------..
0x30, // --OO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 34, char width: 6
0x00, // ------..
0x00, // ------..
0x48, // -O--O-..
0x48, // -O--O-..
0x48, // -O--O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 35, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x28, // --O-O--.
0x28, // --O-O--.
0x7c, // -OOOOO-.
0x28, // --O-O--.
0x28, // --O-O--.
0x7c, // -OOOOO-.
0x48, // -O--O--.
0x40, // -O-----.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 36, char width: 6
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x30, // --OO--..
0xf0, // OOOO--..
0x80, // O-----..
0x60, // -OO---..
0x38, // --OOO-..
0x28, // --O-O-..
0x28, // --O-O-..
0xf0, // OOOO--..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
// ASCII: 37, char width: 7
0x00, // -------.
0x00, // -------.
0x64, // -OO--O-.
0xa4, // O-O--O-.
0xa8, // O-O-O--.
0x50, // -O-O---.
0x10, // ---O---.
0x2c, // --O-OO-.
0x54, // -O-O-O-.
0x54, // -O-O-O-.
0x08, // ----O--.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 38, char width: 7
0x00, // -------.
0x00, // -------.
0x30, // --OO---.
0x58, // -O-OO--.
0x58, // -O-OO--.
0x70, // -OOO---.
0x64, // -OO--O-.
0x9c, // O--OOO-.
0x88, // O---O--.
0xfc, // OOOOOO-.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 39, char width: 6
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 40, char width: 6
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x10, // ---O--..
0x20, // --O---..
0x20, // --O---..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x60, // -OO---..
0x20, // --O---..
0x30, // --OO--..
0x10, // ---O--..
0x00, // ------..
// ASCII: 41, char width: 6
0x00, // ------..
0x00, // ------..
0x40, // -O----..
0x20, // --O---..
0x30, // --OO--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x20, // --O---..
0x40, // -O----..
0x00, // ------..
// ASCII: 42, char width: 6
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0x28, // --O-O-..
0x70, // -OOO--..
0x78, // -OOOO-..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 43, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x30, // --OO--..
0x78, // -OOOO-..
0x10, // ---O--..
0x10, // ---O--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 44, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x30, // --OO--..
0x10, // ---O--..
0x60, // -OO---..
0x00, // ------..
// ASCII: 45, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 46, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x30, // --OO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 47, char width: 6
0x00, // ------..
0x00, // ------..
0x08, // ----O-..
0x08, // ----O-..
0x10, // ---O--..
0x10, // ---O--..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x40, // -O----..
0x40, // -O----..
0x80, // O-----..
0x00, // ------..
0x00, // ------..
// ASCII: 48, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x48, // -O--O-..
0xcc, // OO--OO..
0x9c, // O--OOO..
0xe4, // OOO--O..
0xc4, // OO---O..
0x48, // -O--O-..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 49, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0xe0, // OOO---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 50, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x70, // -OOO--..
0x98, // O--OO-..
0x08, // ----O-..
0x18, // ---OO-..
0x10, // ---O--..
0x20, // --O---..
0x40, // -O----..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 51, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x70, // -OOO--..
0x18, // ---OO-..
0x18, // ---OO-..
0x10, // ---O--..
0x30, // --OO--..
0x08, // ----O-..
0x08, // ----O-..
0xf0, // OOOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 52, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x18, // ---OO--.
0x18, // ---OO--.
0x28, // --O-O--.
0x48, // -O--O--.
0x48, // -O--O--.
0xfc, // OOOOOO-.
0x08, // ----O--.
0x08, // ----O--.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 53, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x78, // -OOOO-..
0x40, // -O----..
0x40, // -O----..
0x78, // -OOOO-..
0x0c, // ----OO..
0x04, // -----O..
0x08, // ----O-..
0x70, // -OOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 54, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x40, // -O----..
0x80, // O-----..
0xf8, // OOOOO-..
0x88, // O---O-..
0x88, // O---O-..
0x48, // -O--O-..
0x70, // -OOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 55, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf8, // OOOOO-..
0x08, // ----O-..
0x08, // ----O-..
0x10, // ---O--..
0x10, // ---O--..
0x20, // --O---..
0x20, // --O---..
0x40, // -O----..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 56, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0xc8, // OO--O-..
0xc8, // OO--O-..
0x78, // -OOOO-..
0x78, // -OOOO-..
0xc8, // OO--O-..
0x88, // O---O-..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 57, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0xc8, // OO--O-..
0x88, // O---O-..
0x88, // O---O-..
0x78, // -OOOO-..
0x08, // ----O-..
0x18, // ---OO-..
0x70, // -OOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 58, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x30, // --OO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 59, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x30, // --OO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x10, // ---O--..
0x60, // -OO---..
0x00, // ------..
0x00, // ------..
// ASCII: 60, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x30, // --OO--..
0x40, // -O----..
0xc0, // OO----..
0x60, // -OO---..
0x10, // ---O--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 61, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf8, // OOOOO-..
0x00, // ------..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 62, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x18, // ---OO-..
0x08, // ----O-..
0x10, // ---O--..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 63, char width: 6
0x00, // ------..
0x00, // ------..
0x60, // -OO---..
0x10, // ---O--..
0x18, // ---OO-..
0x18, // ---OO-..
0x30, // --OO--..
0x20, // --O---..
0x00, // ------..
0x60, // -OO---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 64, char width: 7
0x00, // -------.
0x00, // -------.
0x18, // ---OO--.
0x44, // -O---O-.
0x44, // -O---O-.
0x98, // O--OO--.
0xaa, // O-O-O-O.
0xa8, // O-O-O--.
0xac, // O-O-OO-.
0xbc, // O-OOOO-.
0x80, // O------.
0x40, // -O-----.
0x78, // -OOOO--.
0x00, // -------.
// ASCII: 65, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x30, // --OO---.
0x30, // --OO---.
0x28, // --O-O--.
0x68, // -OO-O--.
0x48, // -O--O--.
0x7c, // -OOOOO-.
0xc4, // OO---O-.
0x84, // O----O-.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 66, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf0, // OOOO--..
0x88, // O---O-..
0x88, // O---O-..
0xf0, // OOOO--..
0x98, // O--OO-..
0x88, // O---O-..
0x88, // O---O-..
0xf0, // OOOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 67, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x40, // -O----..
0x80, // O-----..
0x80, // O-----..
0x80, // O-----..
0x80, // O-----..
0xc0, // OO----..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 68, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf0, // OOOO--..
0x88, // O---O-..
0x8c, // O---OO..
0x84, // O----O..
0x84, // O----O..
0x88, // O---O-..
0x98, // O--OO-..
0xf0, // OOOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 69, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x78, // -OOOO-..
0x40, // -O----..
0x40, // -O----..
0x78, // -OOOO-..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 70, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x78, // -OOOO-..
0x40, // -O----..
0x40, // -O----..
0x78, // -OOOO-..
0x60, // -OO---..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 71, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x60, // -OO---..
0xc0, // OO----..
0x80, // O-----..
0x9c, // O--OOO..
0x84, // O----O..
0x44, // -O---O..
0x7c, // -OOOOO..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 72, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0xf8, // OOOOO-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x80, // O-----..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 73, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf8, // OOOOO-..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 74, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x78, // -OOOO-..
0x08, // ----O-..
0x08, // ----O-..
0x08, // ----O-..
0x08, // ----O-..
0x08, // ----O-..
0x08, // ----O-..
0x70, // -OOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 75, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x88, // O---O-..
0x90, // O--O--..
0xa0, // O-O---..
0xc0, // OO----..
0xe0, // OOO---..
0xb0, // O-OO--..
0x90, // O--O--..
0x88, // O---O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 76, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x40, // -O----..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 77, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0xcc, // OO--OO-.
0xcc, // OO--OO-.
0xfc, // OOOOOO-.
0xb4, // O-OO-O-.
0xb4, // O-OO-O-.
0x84, // O----O-.
0x84, // O----O-.
0x84, // O----O-.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 78, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xc8, // OO--O-..
0xc8, // OO--O-..
0xe8, // OOO-O-..
0xa8, // O-O-O-..
0x98, // O--OO-..
0x98, // O--OO-..
0x98, // O--OO-..
0x88, // O---O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 79, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x30, // --OO---.
0x48, // -O--O--.
0x84, // O----O-.
0x84, // O----O-.
0x84, // O----O-.
0x84, // O----O-.
0xcc, // OO--OO-.
0x78, // -OOOO--.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 80, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf0, // OOOO--..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0xf0, // OOOO--..
0x80, // O-----..
0x80, // O-----..
0x80, // O-----..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 81, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x30, // --OO---.
0x48, // -O--O--.
0x84, // O----O-.
0x84, // O----O-.
0x84, // O----O-.
0x84, // O----O-.
0xcc, // OO--OO-.
0x78, // -OOOO--.
0x10, // ---O---.
0x1c, // ---OOO-.
0x00, // -------.
// ASCII: 82, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf0, // OOOO--..
0x98, // O--OO-..
0x88, // O---O-..
0xf0, // OOOO--..
0xf0, // OOOO--..
0x90, // O--O--..
0x88, // O---O-..
0x88, // O---O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 83, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0xc0, // OO----..
0x80, // O-----..
0x60, // -OO---..
0x38, // --OOO-..
0x08, // ----O-..
0x08, // ----O-..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 84, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xfc, // OOOOOO..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 85, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0xc8, // OO--O-..
0x70, // -OOO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 86, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x84, // O----O-.
0xc4, // OO---O-.
0x44, // -O---O-.
0x48, // -O--O--.
0x68, // -OO-O--.
0x28, // --O-O--.
0x30, // --OO---.
0x10, // ---O---.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 87, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x84, // O----O-.
0x84, // O----O-.
0x84, // O----O-.
0xb4, // O-OO-O-.
0xb4, // O-OO-O-.
0xfc, // OOOOOO-.
0xcc, // OO--OO-.
0x48, // -O--O--.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 88, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x44, // -O---O-.
0x48, // -O--O--.
0x38, // --OOO--.
0x30, // --OO---.
0x30, // --OO---.
0x28, // --O-O--.
0x4c, // -O--OO-.
0x84, // O----O-.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 89, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x84, // O----O-.
0x44, // -O---O-.
0x68, // -OO-O--.
0x38, // --OOO--.
0x30, // --OO---.
0x10, // ---O---.
0x10, // ---O---.
0x10, // ---O---.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 90, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf8, // OOOOO-..
0x08, // ----O-..
0x10, // ---O--..
0x30, // --OO--..
0x20, // --O---..
0x40, // -O----..
0xc0, // OO----..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 91, char width: 6
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x38, // --OOO-..
0x00, // ------..
// ASCII: 92, char width: 6
0x00, // ------..
0x00, // ------..
0x80, // O-----..
0x40, // -O----..
0x40, // -O----..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x10, // ---O--..
0x10, // ---O--..
0x08, // ----O-..
0x08, // ----O-..
0x00, // ------..
0x00, // ------..
// ASCII: 93, char width: 6
0x00, // ------..
0x00, // ------..
0x70, // -OOO--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x70, // -OOO--..
0x00, // ------..
// ASCII: 94, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0x10, // ---O--..
0x48, // -O--O-..
0x08, // ----O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 95, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0xfe, // OOOOOOO.
0x00, // -------.
// ASCII: 96, char width: 6
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 97, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x70, // -OOO--..
0x18, // ---OO-..
0x08, // ----O-..
0xf8, // OOOOO-..
0x88, // O---O-..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 98, char width: 6
0x00, // ------..
0x00, // ------..
0x80, // O-----..
0x80, // O-----..
0x80, // O-----..
0xf8, // OOOOO-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0xd8, // OO-OO-..
0x60, // -OO---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 99, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x78, // -OOOO-..
0x40, // -O----..
0x80, // O-----..
0x80, // O-----..
0xc0, // OO----..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 100, char width: 6
0x00, // ------..
0x00, // ------..
0x08, // ----O-..
0x08, // ----O-..
0x08, // ----O-..
0x48, // -O--O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0xd8, // OO-OO-..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 101, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x48, // -O--O-..
0x88, // O---O-..
0xf8, // OOOOO-..
0x80, // O-----..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 102, char width: 6
0x00, // ------..
0x00, // ------..
0x1c, // ---OOO..
0x20, // --O---..
0x20, // --O---..
0x78, // -OOOO-..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 103, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x48, // -O--O-..
0x88, // O---O-..
0x58, // -O-OO-..
0x80, // O-----..
0xf8, // OOOOO-..
0x8c, // O---OO..
0x88, // O---O-..
0x70, // -OOO--..
0x00, // ------..
// ASCII: 104, char width: 6
0x00, // ------..
0x00, // ------..
0x80, // O-----..
0x80, // O-----..
0x80, // O-----..
0xf8, // OOOOO-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 105, char width: 6
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0x20, // --O---..
0x00, // ------..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 106, char width: 6
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x10, // ---O--..
0x00, // ------..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0xe0, // OOO---..
0x00, // ------..
// ASCII: 107, char width: 7
0x00, // -------.
0x00, // -------.
0x40, // -O-----.
0x40, // -O-----.
0x40, // -O-----.
0x48, // -O--O--.
0x50, // -O-O---.
0x70, // -OOO---.
0x50, // -O-O---.
0x48, // -O--O--.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 108, char width: 6
0x00, // ------..
0x00, // ------..
0x60, // -OO---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 109, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xa8, // O-O-O-..
0xfc, // OOOOOO..
0xa4, // O-O--O..
0xa4, // O-O--O..
0xa4, // O-O--O..
0xa4, // O-O--O..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 110, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xb0, // O-OO--..
0xc8, // OO--O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 111, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x30, // --OO--..
0x48, // -O--O-..
0x88, // O---O-..
0x8c, // O---OO..
0x88, // O---O-..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 112, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xb0, // O-OO--..
0xc8, // OO--O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0xf0, // OOOO--..
0x80, // O-----..
0x80, // O-----..
0x80, // O-----..
0x00, // ------..
// ASCII: 113, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x48, // -O--O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x78, // -OOOO-..
0x08, // ----O-..
0x08, // ----O-..
0x08, // ----O-..
0x00, // ------..
// ASCII: 114, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x58, // -O-OO--.
0x64, // -OO--O-.
0x44, // -O---O-.
0x40, // -O-----.
0x40, // -O-----.
0x40, // -O-----.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 115, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x38, // --OOO-..
0x40, // -O----..
0x60, // -OO---..
0x38, // --OOO-..
0x0c, // ----OO..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 116, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x20, // --O---..
0x20, // --O---..
0xf8, // OOOOO-..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x38, // --OOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 117, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x88, // O---O-..
0x98, // O--OO-..
0x78, // -OOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 118, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x84, // O----O..
0x48, // -O--O-..
0x48, // -O--O-..
0x68, // -OO-O-..
0x30, // --OO--..
0x30, // --OO--..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 119, char width: 7
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x00, // -------.
0x84, // O----O-.
0x84, // O----O-.
0x54, // -O-O-O-.
0x74, // -OOO-O-.
0x6c, // -OO-OO-.
0x4c, // -O--OO-.
0x00, // -------.
0x00, // -------.
0x00, // -------.
// ASCII: 120, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x4c, // -O--OO..
0x68, // -OO-O-..
0x30, // --OO--..
0x30, // --OO--..
0x68, // -OO-O-..
0x4c, // -O--OO..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 121, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x84, // O----O..
0x48, // -O--O-..
0x48, // -O--O-..
0x68, // -OO-O-..
0x30, // --OO--..
0x30, // --OO--..
0x20, // --O---..
0x60, // -OO---..
0x00, // ------..
// ASCII: 122, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0xf8, // OOOOO-..
0x10, // ---O--..
0x30, // --OO--..
0x20, // --O---..
0x40, // -O----..
0xf8, // OOOOO-..
0x00, // ------..
0x00, // ------..
0x00, // ------..
// ASCII: 123, char width: 6
0x00, // ------..
0x00, // ------..
0x10, // ---O--..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0xc0, // OO----..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x30, // --OO--..
0x00, // ------..
// ASCII: 124, char width: 6
0x00, // ------..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x20, // --O---..
0x00, // ------..
// ASCII: 125, char width: 6
0x00, // ------..
0x00, // ------..
0x60, // -OO---..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x18, // ---OO-..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x10, // ---O--..
0x60, // -OO---..
0x00, // ------..
// ASCII: 126, char width: 6
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x64, // -OO--O..
0x9c, // O--OOO..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
0x00, // ------..
};
fontStyle_t FontStyle_Consolas12 =
{
95, // Glyph count
32, // First ascii code
1, // Glyph width (bytes)
14, // Glyph height (bytes)
1, // Fixed width or 0 if variable
(void*)0,
Consolas12_Bitmaps
};
|
JayLDoherty/VOOGASalad | src/engine/actions/regular_actions/SetXSpeedAction.java | package engine.actions.regular_actions;
import engine.Parameter;
import engine.actions.Action;
/**
* Sets the x speed of the associated Entity to the value of the Parameter "X
* Speed"
*
* @author <NAME>
*
*/
public class SetXSpeedAction extends Action {
public SetXSpeedAction() {
addParam(new Parameter(getResource("XSpeed"), Double.class, 0.0));
}
@Override
public void act() {
getEntity().setXSpeed((Double) getParam(getResource("XSpeed")));
}
} |
Knitter/nb-playframework | src/main/java/com/qualixium/playnb/nodes/SourceFolderFilterNode.java | package com.qualixium.playnb.nodes;
import com.qualixium.playnb.PlayProject;
import com.qualixium.playnb.nodes.testfile.TestFileFilterNode;
import java.awt.Image;
import org.netbeans.api.annotations.common.StaticResource;
import org.openide.filesystems.FileObject;
import org.openide.nodes.FilterNode;
import org.openide.nodes.Node;
import org.openide.util.ImageUtilities;
public class SourceFolderFilterNode extends FilterNode {
@StaticResource
public static final String PACKAGE_IMAGE_PATH = "com/qualixium/playnb/project/package.png";
public static final Image PACKAGE_IMAGE = ImageUtilities.loadImage(PACKAGE_IMAGE_PATH);
public SourceFolderFilterNode(Node original, PlayProject playProject) {
super(original, new ProxyChildren(original, playProject));
}
@Override
public Image getOpenedIcon(int type) {
return PACKAGE_IMAGE;
}
@Override
public Image getIcon(int type) {
return PACKAGE_IMAGE;
}
static class ProxyChildren extends FilterNode.Children {
private final PlayProject playProject;
public ProxyChildren(Node owner, PlayProject playProject) {
super(owner);
this.playProject = playProject;
}
@Override
protected Node copyNode(Node original) {
FileObject fileObject = (FileObject) original.getLookup().lookup(FileObject.class);
if (fileObject.isFolder()) {
return new SourceFolderFilterNode(original, playProject);
} else {
if (fileObject.getPath().contains(playProject.getProjectDirectory()
.getFileObject("test").getPath())) {
if (fileObject.getExt().equals("java") || fileObject.getExt().equals("scala")) {
return new TestFileFilterNode(original, playProject);
}
} else if (fileObject.getExt().equals("java") || fileObject.getExt().equals("scala")) {
return new SourceFileFilterNode(original);
}
return original.cloneNode();
}
}
}
}
|
CaelestisZ/HeraQ | drivers/muic/universal/muic_usb.c | <gh_stars>0
/*
* muic_ccic.c
*
* Copyright (C) 2014 Samsung Electronics
* <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
#include <linux/gpio.h>
#include <linux/i2c.h>
#include <linux/interrupt.h>
#include <linux/slab.h>
#include <linux/platform_device.h>
#include <linux/module.h>
#include <linux/delay.h>
#include <linux/host_notify.h>
#include <linux/string.h>
#if defined (CONFIG_OF)
#include <linux/of_device.h>
#include <linux/of_gpio.h>
#endif
#include <linux/muic/muic.h>
#if defined(CONFIG_MUIC_NOTIFIER)
#include <linux/muic/muic_notifier.h>
#endif
#if defined(CONFIG_USB_EXTERNAL_NOTIFY)
#include <linux/usb_notify.h>
#endif
#include "muic-internal.h"
#include "muic_coagent.h"
#if defined(CONFIG_USB_EXTERNAL_NOTIFY)
extern void muic_send_dock_intent(int type);
static int muic_handle_usb_notification(struct notifier_block *nb,
unsigned long action, void *data)
{
#ifdef CONFIG_MUIC_POGO
muic_data_t *pmuic =
container_of(nb, muic_data_t, usb_nb);
#endif
switch (action) {
/* Abnormal device */
case EXTERNAL_NOTIFY_3S_NODEVICE:
pr_info("%s: 3S_NODEVICE(USB HOST Connection timeout)\n", __func__);
#ifdef CONFIG_MUIC_POGO
if (pmuic->attached_dev == ATTACHED_DEV_POGO_MUIC)
muic_set_pogo_status(pmuic, 1);
#endif
break;
default:
break;
}
return NOTIFY_DONE;
}
void muic_register_usb_notifier(muic_data_t *pmuic)
{
int ret = 0;
pr_info("%s: Registering EXTERNAL_NOTIFY_DEV_MUIC.\n", __func__);
ret = usb_external_notify_register(&pmuic->usb_nb,
muic_handle_usb_notification, EXTERNAL_NOTIFY_DEV_MUIC);
if (ret < 0) {
pr_info("%s: USB Noti. is not ready.\n", __func__);
return;
}
pr_info("%s: done.\n", __func__);
}
void muic_unregister_usb_notifier(muic_data_t *pmuic)
{
int ret = 0;
pr_info("%s\n", __func__);
ret = usb_external_notify_unregister(&pmuic->usb_nb);
if (ret < 0) {
pr_info("%s: USB Noti. unregister error.\n", __func__);
return;
}
pr_info("%s: done.\n", __func__);
}
#else
void muic_register_usb_notifier(muic_data_t *pmuic){}
void muic_unregister_usb_notifier(muic_data_t *pmuic){}
#endif
|
TeamSPoon/logicmoo_base | prolog/logicmoo/pdt_server/pdt.common/src/org/cs3/pdt/common/PDTDecorator.java | package org.cs3.pdt.common;
public interface PDTDecorator {
public void updateDecorator();
}
|
atul-vyshnav/2021_IBM_Code_Challenge_StockIT | src/StockIT-v2-release_source_from_JADX/sources/com/google/android/gms/internal/ads/zzm.java | package com.google.android.gms.internal.ads;
import android.os.Process;
import java.util.concurrent.BlockingQueue;
/* compiled from: com.google.android.gms:play-services-ads@@19.4.0 */
public final class zzm extends Thread {
private static final boolean DEBUG = zzaq.DEBUG;
private final BlockingQueue<zzaa<?>> zzl;
/* access modifiers changed from: private */
public final BlockingQueue<zzaa<?>> zzm;
private final zzk zzn;
/* access modifiers changed from: private */
public final zzak zzo;
private volatile boolean zzp = false;
private final zzo zzq;
public zzm(BlockingQueue<zzaa<?>> blockingQueue, BlockingQueue<zzaa<?>> blockingQueue2, zzk zzk, zzak zzak) {
this.zzl = blockingQueue;
this.zzm = blockingQueue2;
this.zzn = zzk;
this.zzo = zzak;
this.zzq = new zzo(this);
}
public final void quit() {
this.zzp = true;
interrupt();
}
public final void run() {
if (DEBUG) {
zzaq.m208v("start new dispatcher", new Object[0]);
}
Process.setThreadPriority(10);
this.zzn.initialize();
while (true) {
try {
processRequest();
} catch (InterruptedException unused) {
if (this.zzp) {
Thread.currentThread().interrupt();
return;
}
zzaq.m207e("Ignoring spurious interrupt of CacheDispatcher thread; use quit() to terminate it", new Object[0]);
}
}
}
private final void processRequest() throws InterruptedException {
zzaa take = this.zzl.take();
take.zzc("cache-queue-take");
take.zzd(1);
try {
take.isCanceled();
zzn zzb = this.zzn.zzb(take.zze());
if (zzb == null) {
take.zzc("cache-miss");
if (!this.zzq.zzb(take)) {
this.zzm.put(take);
}
} else if (zzb.zza()) {
take.zzc("cache-hit-expired");
take.zza(zzb);
if (!this.zzq.zzb(take)) {
this.zzm.put(take);
}
take.zzd(2);
} else {
take.zzc("cache-hit");
zzaj zza = take.zza(new zzy(zzb.data, zzb.zzw));
take.zzc("cache-hit-parsed");
if (!zza.isSuccess()) {
take.zzc("cache-parsing-failed");
this.zzn.zza(take.zze(), true);
take.zza((zzn) null);
if (!this.zzq.zzb(take)) {
this.zzm.put(take);
}
take.zzd(2);
return;
}
if (!(zzb.zzv < System.currentTimeMillis())) {
this.zzo.zzb(take, zza);
} else {
take.zzc("cache-hit-refresh-needed");
take.zza(zzb);
zza.zzbu = true;
if (!this.zzq.zzb(take)) {
this.zzo.zza(take, zza, new zzp(this, take));
} else {
this.zzo.zzb(take, zza);
}
}
take.zzd(2);
}
} finally {
take.zzd(2);
}
}
}
|
ScalablyTyped/SlinkyTyped | a/aws-sdk/src/main/scala/typingsSlinky/awsSdk/sesv2Mod/ReplacementTemplate.scala | package typingsSlinky.awsSdk.sesv2Mod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait ReplacementTemplate extends StObject {
/**
* A list of replacement values to apply to the template. This parameter is a JSON object, typically consisting of key-value pairs in which the keys correspond to replacement tags in the email template.
*/
var ReplacementTemplateData: js.UndefOr[EmailTemplateData] = js.native
}
object ReplacementTemplate {
@scala.inline
def apply(): ReplacementTemplate = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[ReplacementTemplate]
}
@scala.inline
implicit class ReplacementTemplateMutableBuilder[Self <: ReplacementTemplate] (val x: Self) extends AnyVal {
@scala.inline
def setReplacementTemplateData(value: EmailTemplateData): Self = StObject.set(x, "ReplacementTemplateData", value.asInstanceOf[js.Any])
@scala.inline
def setReplacementTemplateDataUndefined: Self = StObject.set(x, "ReplacementTemplateData", js.undefined)
}
}
|
SSSDNSY/go | src/internal/goarch/zgoarch_sparc64.go | // Code generated by gengoarch.go using 'go generate'. DO NOT EDIT.
//go:build sparc64
package goarch
const GOARCH = `sparc64`
const Is386 = 0
const IsAmd64 = 0
const IsAmd64p32 = 0
const IsArm = 0
const IsArmbe = 0
const IsArm64 = 0
const IsArm64be = 0
const IsLoong64 = 0
const IsMips = 0
const IsMipsle = 0
const IsMips64 = 0
const IsMips64le = 0
const IsMips64p32 = 0
const IsMips64p32le = 0
const IsPpc = 0
const IsPpc64 = 0
const IsPpc64le = 0
const IsRiscv = 0
const IsRiscv64 = 0
const IsS390 = 0
const IsS390x = 0
const IsSparc = 0
const IsSparc64 = 1
const IsWasm = 0
|
gbzarelli/music-store-api | src/test/java/br/com/helpdev/musicstore/controller/rest/DiscRestControllerTest.java | package br.com.helpdev.musicstore.controller.rest;
import br.com.helpdev.musicstore.exception.NoValuePresentException;
import br.com.helpdev.musicstore.service.DiscService;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.web.servlet.MockMvc;
import static org.hamcrest.Matchers.containsString;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@WebMvcTest(DiscRestController.class)
class DiscRestControllerTest {
private static final String GENRE_ROCK = "rock";
@Autowired
MockMvc mockMvc;
@MockBean
DiscService discService;
@Test
void shouldNotAllowedPostMethod() throws Exception {
mockMvc.perform(post(DiscRestController.ROOT_PATH))
.andExpect(status().isMethodNotAllowed());
}
@Test
void shouldReturnStatusOkByGetDiscByID() throws Exception {
mockMvc.perform(get(DiscRestController.ROOT_PATH + DiscRestController.PATH_BY_ID, 1))
.andExpect(status().isOk());
}
@Test
void shouldReturnNoValueExceptionWhenCallMethodGetByIDWithNotFoundID() throws Exception {
int discID = 555;
NoValuePresentException exception = new NoValuePresentException("no value for id=" + discID);
when(discService.getDiscByID(discID)).thenThrow(exception);
mockMvc.perform(get(DiscRestController.ROOT_PATH + DiscRestController.PATH_BY_ID, discID))
.andDo(print())
.andExpect(status().isBadRequest())
.andExpect(content().string(containsString(exception.getMessage())));
}
@Test
void shouldReturnOKWhenGetDiscsCalled() throws Exception {
mockMvc.perform(get(DiscRestController.ROOT_PATH))
.andExpect(status().isOk());
}
@Test
void shouldReturnOKWhenGetDiscsByGenresCalled() throws Exception {
mockMvc.perform(get(DiscRestController.ROOT_PATH + DiscRestController.PATH_BY_GENRE, GENRE_ROCK))
.andExpect(status().isOk());
}
} |
Carlangueitor/pulumi-kubernetes | sdk/go/kubernetes/rbac/v1beta1/role.go | // *** WARNING: this file was generated by pulumigen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v1beta1
import (
"reflect"
metav1 "github.com/pulumi/pulumi-kubernetes/sdk/v2/go/kubernetes/meta/v1"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Role is a namespaced, logical grouping of PolicyRules that can be referenced as a unit by a RoleBinding. Deprecated in v1.17 in favor of rbac.authorization.k8s.io/v1 Role, and will no longer be served in v1.20.
type Role struct {
pulumi.CustomResourceState
// APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
ApiVersion pulumi.StringPtrOutput `pulumi:"apiVersion"`
// Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
Kind pulumi.StringPtrOutput `pulumi:"kind"`
// Standard object's metadata.
Metadata metav1.ObjectMetaPtrOutput `pulumi:"metadata"`
// Rules holds all the PolicyRules for this Role
Rules PolicyRuleArrayOutput `pulumi:"rules"`
}
// NewRole registers a new resource with the given unique name, arguments, and options.
func NewRole(ctx *pulumi.Context,
name string, args *RoleArgs, opts ...pulumi.ResourceOption) (*Role, error) {
if args == nil {
args = &RoleArgs{}
}
args.ApiVersion = pulumi.StringPtr("rbac.authorization.k8s.io/v1beta1")
args.Kind = pulumi.StringPtr("Role")
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("kubernetes:rbac.authorization.k8s.io/v1:Role"),
},
{
Type: pulumi.String("kubernetes:rbac.authorization.k8s.io/v1alpha1:Role"),
},
})
opts = append(opts, aliases)
var resource Role
err := ctx.RegisterResource("kubernetes:rbac.authorization.k8s.io/v1beta1:Role", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetRole gets an existing Role resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetRole(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *RoleState, opts ...pulumi.ResourceOption) (*Role, error) {
var resource Role
err := ctx.ReadResource("kubernetes:rbac.authorization.k8s.io/v1beta1:Role", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering Role resources.
type roleState struct {
// APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
ApiVersion *string `pulumi:"apiVersion"`
// Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
Kind *string `pulumi:"kind"`
// Standard object's metadata.
Metadata *metav1.ObjectMeta `pulumi:"metadata"`
// Rules holds all the PolicyRules for this Role
Rules []PolicyRule `pulumi:"rules"`
}
type RoleState struct {
// APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
ApiVersion pulumi.StringPtrInput
// Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
Kind pulumi.StringPtrInput
// Standard object's metadata.
Metadata metav1.ObjectMetaPtrInput
// Rules holds all the PolicyRules for this Role
Rules PolicyRuleArrayInput
}
func (RoleState) ElementType() reflect.Type {
return reflect.TypeOf((*roleState)(nil)).Elem()
}
type roleArgs struct {
// APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
ApiVersion *string `pulumi:"apiVersion"`
// Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
Kind *string `pulumi:"kind"`
// Standard object's metadata.
Metadata *metav1.ObjectMeta `pulumi:"metadata"`
// Rules holds all the PolicyRules for this Role
Rules []PolicyRule `pulumi:"rules"`
}
// The set of arguments for constructing a Role resource.
type RoleArgs struct {
// APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
ApiVersion pulumi.StringPtrInput
// Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
Kind pulumi.StringPtrInput
// Standard object's metadata.
Metadata metav1.ObjectMetaPtrInput
// Rules holds all the PolicyRules for this Role
Rules PolicyRuleArrayInput
}
func (RoleArgs) ElementType() reflect.Type {
return reflect.TypeOf((*roleArgs)(nil)).Elem()
}
|
GHelen/selenium_java_project | src/main/java/ua/odessa/ghelen/applicationlogicrun/NavigationHelperRun.java | <filename>src/main/java/ua/odessa/ghelen/applicationlogicrun/NavigationHelperRun.java
package ua.odessa.ghelen.applicationlogicrun;
import org.openqa.selenium.By;
import ua.odessa.ghelen.applicationlogic.NavigationHelper;
public class NavigationHelperRun extends DriverBasedHelper implements NavigationHelper {
private String baseUrl;
public NavigationHelperRun(ApplicationManagerRun manager) {
super(manager.getDriver());
this.baseUrl = manager.getBaseUrl();
}
@Override
public void openMainPage() {
driver.get(baseUrl);
}
@Override
public void gotoBasketPage() {
// TODO Auto-generated method stub
}
@Override
public void gotoUserProfilePage() {
pages.everyPage.ensurePageLoaded().clickUserProfilePage();
}
@Override
public void gotoUserBuyBookPage() {
pages.everyPage.ensurePageLoaded().clickBuyBookPage();
}
@Override
public void gotoHomePage() {
pages.everyPage.ensurePageLoaded().clickhomePageLink();
}
}
|
phinguyen06/ecommerce | googlecheckout/checkoutrefimpl/com/google/checkout/orderprocessing/UnarchiveOrderRequest.java | /*******************************************************************************
* Copyright (C) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.checkout.orderprocessing;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.google.checkout.AbstractCheckoutRequest;
import com.google.checkout.MerchantConstants;
import com.google.checkout.util.Constants;
import com.google.checkout.util.Utils;
/**
* This class contains methods that construct <unarchive-order> API
* requests.
*/
public class UnarchiveOrderRequest extends AbstractCheckoutRequest {
private Document document;
private Element root;
/**
* Constructor which takes an instance of MerchantConstants.
*
* @param merchantConstants
* The MerchantConstants.
*
* @see MerchantConstants
*/
public UnarchiveOrderRequest(MerchantConstants merchantConstants) {
super(merchantConstants);
document = Utils.newEmptyDocument();
root = (Element) document.createElementNS(Constants.checkoutNamespace,
"unarchive-order");
root.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns",
Constants.checkoutNamespace);
document.appendChild(root);
}
/**
* Constructor which takes an instance of MerchantConstants and the Google
* Order Number.
*
* @param merchantConstants
* The MerchantConstants.
* @param googleOrderNo
* The Google Order Number.
*
* @see MerchantConstants
*/
public UnarchiveOrderRequest(MerchantConstants merchantConstants,
String googleOrderNo) {
this(merchantConstants);
this.setGoogleOrderNo(googleOrderNo);
}
/*
* (non-Javadoc)
*
* @see com.google.checkout.CheckoutRequest#getXml()
*/
public String getXml() {
return Utils.documentToString(document);
}
/*
* (non-Javadoc)
*
* @see com.google.checkout.CheckoutRequest#getXmlPretty()
*/
public String getXmlPretty() {
return Utils.documentToStringPretty(document);
}
/**
* Return the Google Order Number, which is the value of the
* google-order-number attribute on the root tag.
*
* @return The Google Order Number.
*/
public String getGoogleOrderNo() {
return root.getAttribute("google-order-number");
}
/**
* Set the Google Order Number, which is the value of the
* google-order-number attribute on the root tag.
*
* @param googleOrderNo
* The Google Order Number.
*/
public void setGoogleOrderNo(String googleOrderNo) {
root.setAttribute("google-order-number", googleOrderNo);
}
/*
* (non-Javadoc)
*
* @see com.google.checkout.CheckoutRequest#getPostUrl()
*/
public String getPostUrl() {
return merchantConstants.getRequestUrl();
}
}
|
annapowellsmith/openpresc | openprescribing/matrixstore/build/sort_and_merge_gzipped_csv_files.py | <gh_stars>10-100
import csv
import os
from pipes import quote
import subprocess
class InvalidHeaderError(Exception):
pass
def sort_and_merge_gzipped_csv_files(
# CSV files to sort (which may or may not be gzipped)
input_filenames,
# Output file
output_filename,
# Column names to sort by
sort_columns,
):
"""
Given a list of CSV files, sort the rows by the supplied column names and
write the result to `output_filename`
Input files may be gzipped or not (either will work). The output is
always gzipped.
We shell out to the `sort` command for this as it much more efficient than
trying to do this in Python and can transparently handle sorting files that
are many times too large to fit in memory.
Note that `sort` doesn't really parse CSV, it just splits on commas; so
this function won't work where the CSV contains commas -- at least, where
these are to the left of the columns which are being sorted on.
"""
header_line = get_header_line(input_filenames)
sort_column_indices = get_column_indices(header_line, sort_columns)
# Construct a shell pipeline to read all input files and sort in the
# correct order, outputing the header line first
pipeline = "( {read_files} ) | ( echo {header_line}; {sort_by_columns} )".format(
read_files=read_files(input_filenames, skip_lines=1),
header_line=quote(header_line),
sort_by_columns=sort_by_columns(sort_column_indices),
)
pipeline += " | gzip"
pipeline += " > {}".format(quote(output_filename))
env = os.environ.copy()
# For much faster string comparison when sorting
env["LANG"] = "C"
subprocess.check_call(pipeline, shell=True, env=env)
def read_files(filenames, skip_lines=None, max_lines=None):
"""
Return command to read all supplied files (which may or may not be
gzipped), optionally skipping a number of leading and trailing lines
"""
return "; ".join(
[
read_file(filename, skip_lines=skip_lines, max_lines=max_lines)
for filename in filenames
]
)
def read_file(filename, skip_lines=None, max_lines=None):
"""
Return command to read a file (which may or may not be gzipped), optionally
skipping a number of leading and trailing lines
"""
# The `--force` flag means that non-gzipped files are handled transparently
# as if the command was just `cat`
command = "gzip --decompress --force --to-stdout --quiet {}".format(quote(filename))
if skip_lines is not None:
command += " | tail -n +{}".format(int(skip_lines) + 1)
if max_lines is not None:
command += " | head -n {}".format(int(max_lines))
return command
def sort_by_columns(column_indices):
"""
Return a `sort` command string configured to sort a CSV file by the supplied column
indices
"""
sort_keys = ["--key={0},{0}".format(i + 1) for i in column_indices]
return "sort --field-separator=, {}".format(" ".join(sort_keys))
def get_header_line(filenames):
"""
Return the first line of one of the files and check it is consistent across
all files
"""
pipeline = "({read_files}) 2>/dev/null".format(
read_files=read_files(filenames, max_lines=1)
)
header_lines = (
subprocess.check_output(pipeline, shell=True).decode("utf8").splitlines()
)
header_line = header_lines[0]
for n, filename in enumerate(filenames):
other_line = header_lines[n]
if other_line != header_line:
raise InvalidHeaderError(
"Input files do not have identical headers:\n\n"
"{}: {}\n{}: {}".format(filenames[0], header_line, filename, other_line)
)
return header_line
def get_column_indices(header_line, columns):
"""
Take a CSV header line and a list of columns and return the indices of
those columns (or raise InvalidHeaderError)
"""
headers = next(csv.reader([header_line]))
try:
return [headers.index(column) for column in columns]
except ValueError as e:
raise InvalidHeaderError("{} of headers: {}".format(e, header_line))
|
Snownee/Kiwi | src/main/java/snownee/kiwi/inventory/container/ModSlot.java | <filename>src/main/java/snownee/kiwi/inventory/container/ModSlot.java<gh_stars>10-100
package snownee.kiwi.inventory.container;
import net.minecraft.world.Container;
import net.minecraft.world.inventory.Slot;
import net.minecraft.world.item.ItemStack;
/**
* @since 2.7.0
*/
public class ModSlot extends Slot {
public ModSlot(Container inventoryIn, int index, int xPosition, int yPosition) {
super(inventoryIn, index, xPosition, yPosition);
}
@Override
public boolean mayPlace(ItemStack stack) {
return container.canPlaceItem(index, stack);
}
}
|
ddcprg/prime-number-service | prime-number-service-app/src/main/java/com/company/prime/service/number/app/DefaultPrimeNumberGenerator.java | package com.company.prime.service.number.app;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class DefaultPrimeNumberGenerator implements PrimeNumberGenerator {
private final PrimeNumberPredicate checker;
public DefaultPrimeNumberGenerator(PrimeNumberPredicate checker) {
this.checker = checker;
}
public List<Integer> primesTill(int limit) {
checkArgument(limit > 0, "Parameter must be greater than 0");
List<Integer> result =
IntStream.rangeClosed(1, limit)
.parallel()
.filter(checker)
.boxed()
.collect(Collectors.toList());
return result;
}
}
|
849679859/rt-thread | bsp/nrf5x/libraries/drivers/drv_i2c.c | /*
* Copyright (c) 2006-2020, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2020-11-15 xckhmf First Verison
*
*/
#include <rtdevice.h>
#include <nrfx_twi_twim.h>
#include <nrfx_twim.h>
#include <drv_i2c.h>
#if defined(BSP_USING_I2C0) || defined(BSP_USING_I2C1)
typedef struct
{
nrf_twim_frequency_t freq;
uint32_t scl_pin;
uint32_t sda_pin;
nrfx_twim_t twi_instance;
}drv_i2c_cfg_t;
#ifdef BSP_USING_I2C0
static drv_i2c_cfg_t drv_i2c_0 =
{
.freq = NRF_TWIM_FREQ_400K,
.scl_pin = BSP_I2C0_SCL_PIN,
.sda_pin = BSP_I2C0_SDA_PIN,
.twi_instance = NRFX_TWIM_INSTANCE(0)
};
static struct rt_i2c_bus_device i2c0_bus;
#endif
#ifdef BSP_USING_I2C1
static drv_i2c_cfg_t drv_i2c_1 =
{
.freq = NRF_TWIM_FREQ_400K,
.scl_pin = BSP_I2C1_SCL_PIN,
.sda_pin = BSP_I2C1_SDA_PIN,
.twi_instance = NRFX_TWIM_INSTANCE(1)
};
static struct rt_i2c_bus_device i2c1_bus;
#endif
static int twi_master_init(struct rt_i2c_bus_device *bus)
{
nrfx_err_t rtn;
nrfx_twim_config_t config = NRFX_TWIM_DEFAULT_CONFIG(0,0);
drv_i2c_cfg_t *p_cfg = bus->priv;
nrfx_twim_t const * p_instance = &p_cfg->twi_instance;
config.frequency = p_cfg->freq;
config.scl = p_cfg->scl_pin;
config.sda = p_cfg->sda_pin;
nrfx_twi_twim_bus_recover(config.scl,config.sda);
rtn = nrfx_twim_init(p_instance,&config,NULL,NULL);
nrfx_twim_enable(p_instance);
return 0;
}
static rt_size_t _master_xfer(struct rt_i2c_bus_device *bus,
struct rt_i2c_msg msgs[],
rt_uint32_t num)
{
nrfx_twim_t const * p_instance = &((drv_i2c_cfg_t *)bus->priv)->twi_instance;
nrfx_err_t ret = NRFX_ERROR_INTERNAL;
uint32_t no_stop_flag = 0;
nrfx_twim_xfer_desc_t xfer = NRFX_TWIM_XFER_DESC_TX(msgs->addr,msgs->buf, msgs->len);
if((msgs->flags & 0x01) == RT_I2C_WR)
{
xfer.type = NRFX_TWIM_XFER_TX;
if((msgs->flags & 0x40) == RT_I2C_NO_READ_ACK)
{
no_stop_flag = NRFX_TWIM_FLAG_TX_NO_STOP;
}
}
else if((msgs->flags & 0x01) == RT_I2C_RD)
{
xfer.type = NRFX_TWIM_XFER_RX;
}
ret = nrfx_twim_xfer(p_instance,&xfer,no_stop_flag);
return (ret == NRFX_SUCCESS) ? msgs->len : 0;
}
static const struct rt_i2c_bus_device_ops _i2c_ops =
{
_master_xfer,
NULL,
NULL,
};
int rt_hw_i2c_init(void)
{
#ifdef BSP_USING_I2C0
i2c0_bus.ops= &_i2c_ops;
i2c0_bus.timeout = 0;
i2c0_bus.priv = (void *)&drv_i2c_0;
twi_master_init(&i2c0_bus);
rt_i2c_bus_device_register(&i2c0_bus, "i2c0");
#endif
#ifdef BSP_USING_I2C1
i2c1_bus.ops= &_i2c_ops;
i2c1_bus.timeout = 0;
i2c1_bus.priv = (void *)&drv_i2c_1;
twi_master_init(&i2c1_bus);
rt_i2c_bus_device_register(&i2c1_bus, "i2c1");
#endif
return 0;
}
INIT_BOARD_EXPORT(rt_hw_i2c_init);
#endif /* defined(BSP_USING_I2C0) || defined(BSP_USING_I2C1) */
|
souzabrizolara/py-home-shell | src/entities/hsuser.py | __author__ = 'alisonbento'
import src.base.arrayparsableentity as parsable
class HomeShellUser(parsable.ArrayParsableEntity):
def __init__(self):
self.id = 0
self.name = None
self.gender = None
self.email = None
self.username = None
self.password = None
self.locale = None
self.created = None
self.modified = None
def to_array(self):
return {
"id": self.id,
"name": self.name,
"gender": self.gender,
"email": self.email,
"locale": self.locale,
"created": self.created,
"modified": self.modified
}
|
viewserver/viewserver | viewserver-messages/viewserver-messages-common/src/main/java/io/viewserver/messages/tableevent/ITableMetadata.java | <filename>viewserver-messages/viewserver-messages-common/src/main/java/io/viewserver/messages/tableevent/ITableMetadata.java
/*
* Copyright 2016 Claymore Minds Limited and Niche Solutions (UK) Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.viewserver.messages.tableevent;
import io.viewserver.messages.IPoolableMessage;
import io.viewserver.messages.IRecyclableMessage;
import io.viewserver.messages.common.ColumnType;
import java.util.List;
/**
* Created by nick on 02/12/15.
*/
public interface ITableMetadata<T> extends IPoolableMessage<T> {
List<IMetadataValue> getMetadataValues();
interface IMetadataValue<T> extends IRecyclableMessage<T> {
String getName();
IMetadataValue setName(String name);
IValue getValue();
IMetadataValue setValue(IValue value);
}
interface IValue<T> extends IPoolableMessage<T> {
ColumnType getValueType();
boolean getBooleanValue();
IValue setBooleanValue(boolean value);
int getIntegerValue();
IValue setIntegerValue(int value);
long getLongValue();
IValue setLongValue(long value);
float getFloatValue();
IValue setFloatValue(float value);
double getDoubleValue();
IValue setDoubleValue(double value);
String getStringValue();
IValue setStringValue(String value);
}
}
|
ScalablyTyped/SlinkyTyped | d/dojo/src/main/scala/typingsSlinky/dojo/dijit/package.scala | package typingsSlinky.dojo
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation._
package object dijit {
/**
* Permalink: http://dojotoolkit.org/api/1.9/dijit/BackgroundIframe.html
*
* For IE/FF z-index shenanigans. id attribute is required.
* new dijit.BackgroundIframe(node).
*
* Makes a background iframe as a child of node, that fills
* area (and position) of node
*
* @param node
*/
type BackgroundIframe = js.Function1[/* node */ org.scalajs.dom.raw.HTMLElement, scala.Unit]
/**
* Permalink: http://dojotoolkit.org/api/1.9/dijit/_BidiSupport.html
*
* Deprecated module for enabling textdir support in the dijit widgets. New code should just define
* has("dojo-bidi") to return true, rather than manually requiring this module.
*
*/
type BidiSupport = js.Function0[scala.Unit]
type Calendar = typingsSlinky.dojo.dijit.Calendar_
type CalendarLite = typingsSlinky.dojo.dijit.CalendarLite_
type ColorPalette = typingsSlinky.dojo.dijit.ColorPalette_
type Dialog = typingsSlinky.dojo.dijit.Dialog_
type InlineEditBox = typingsSlinky.dojo.dijit.InlineEditBox_
type MenuBarItem = typingsSlinky.dojo.dijit.MenuBarItem_
type OnDijitClickMixin = typingsSlinky.dojo.dijit.OnDijitClickMixin_
type PaletteMixin = typingsSlinky.dojo.dijit.PaletteMixin_
type TemplatedMixin = typingsSlinky.dojo.dijit.TemplatedMixin_
type TimePicker = typingsSlinky.dojo.dijit.TimePicker_
type Tooltip = typingsSlinky.dojo.dijit.Tooltip_
type Tree_ = typingsSlinky.dojo.dijit.Tree__
type base = typingsSlinky.dojo.dijit.base_
/**
* Permalink: http://dojotoolkit.org/api/1.9/dijit/hccss.html
*
* Test if computer is in high contrast mode, and sets dijit_a11y flag on <body> if it is.
* Deprecated, use dojo/hccss instead.
*
*/
type hccss = js.Function0[scala.Unit]
type main = typingsSlinky.dojo.dijit.main_
type place = typingsSlinky.dojo.dijit.place_
type registry = typingsSlinky.dojo.dijit.registry_
}
|
fakewen/Monitoring-branch | src/lib/wkpf/c/common/native_wuclasses/GENERATEDwuclass_light_sensor.h |
#include "native_wuclasses.h"
#include "native_wuclasses_privatedatatypes.h"
#ifndef WUCLASS_LIGHT_SENSORH
#define WUCLASS_LIGHT_SENSORH
extern wuclass_t wuclass_light_sensor;
#endif
|
pnarvor/nephelae_paparazzi | tests/mission_launch_basic_01.py | <reponame>pnarvor/nephelae_paparazzi<filename>tests/mission_launch_basic_01.py<gh_stars>0
#! /usr/bin/python3
import os
import sys
import argparse
import time
import threading
from ivy.std_api import *
import logging
PPRZ_HOME = os.getenv("PAPARAZZI_HOME", os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../')))
sys.path.append(PPRZ_HOME + "/var/lib/python")
from pprzlink.ivy import IvyMessagesInterface
from pprzlink.message import PprzMessage
from pprzlink import messages_xml_map
ac_id = 24
ivyInterface = IvyMessagesInterface()
time.sleep(0.5)
def append_lace(index=1):
msg = PprzMessage('datalink', 'MISSION_CUSTOM')
msg['ac_id'] = ac_id
msg['insert'] = 0
msg['index'] = index
msg['type'] = 'LACE'
msg['duration'] = -1
# msg['params'] = [-10500.0,1500.0,1100.0, 0, 100.0, 0]
# msg['params'] = [-1500.0,1500.0,2600.0, 0, 100.0, -7.5,-0.5,0]
msg['params'] = [1500.0,900.0,700.0, 0, 50.0, -7.5,-0.5,0]
# msg['params'] = [-1500.0,1500.0,2600.0, 0, 100.0, 0.0,0.0,0]
ivyInterface.send(msg)
def append_rosette(index=1):
msg = PprzMessage('datalink', 'MISSION_CUSTOM')
msg['ac_id'] = ac_id
msg['insert'] = 0
msg['index'] = index
msg['type'] = 'RSTT'
msg['duration'] = -1
# msg['params'] = [-10500.0,1500.0,1100.0]
msg['params'] = [1500.0,900.0,700.0, 0, 50.0, -7.5,-0.5,0]
# msg['params'] = [-1500.0,1500.0,2600.0, 0, 100.0, -7.5,-0.5,0]
ivyInterface.send(msg)
def next_mission():
msg = PprzMessage('datalink', 'NEXT_MISSION')
msg['ac_id'] = ac_id
ivyInterface.send(msg)
def goto_mission(index):
msg = PprzMessage('datalink', 'GOTO_MISSION')
msg['ac_id'] = ac_id
msg['mission_id'] = index
ivyInterface.send(msg)
def end_mission():
msg = PprzMessage('datalink', 'END_MISSION')
msg['ac_id'] = ac_id
ivyInterface.send(msg)
def start_block(index):
msg = PprzMessage('ground', 'JUMP_TO_BLOCK')
msg['block_id'] = index
msg['ac_id'] = ac_id
ivyInterface.send(msg)
lwc = 0
def lwc_value(value):
msg = PprzMessage('datalink', 'PAYLOAD_COMMAND')
msg['ac_id'] = ac_id
msg['command'] = [value]
ivyInterface.send(msg)
def start_lace():
append_lace()
start_block(6)
|
AldoAbdn/aldobot.js | commands/pause.js | exports.run = (client, message, args) => {
const voiceChannel = message.member.voice.channel;
const dispatcher = message.guild.dispatcher;
//If user is not in the voice channel, return
if (!client.voice.connections.find(voiceConnection => voiceConnection.channel.id == voiceChannel.id)){
return;
} else if (dispatcher){
//If there is a dispatcher, pause it
dispatcher.pause();
}
};
exports.conf = {
enabled: true,
guildOnly: false,
aliases: [],
category: "Music",
permLevel: 0
};
exports.help = {
name: 'pause',
description: 'Pauses audio',
usage: 'pause'
};
|
LS4GAN/uvcgan | uvcgan/config/model_config.py | <filename>uvcgan/config/model_config.py
class ModelConfig:
__slots__ = [
'model',
'model_args',
'optimizer',
'weight_init',
]
def __init__(
self,
model,
optimizer = None,
model_args = None,
weight_init = None,
):
self.model = model
self.model_args = model_args or {}
self.optimizer = optimizer or {
'name' : 'AdamW', 'betas' : (0.5, 0.999), 'weight_decay' : 1e-5,
}
self.weight_init = weight_init
def to_dict(self):
return { x : getattr(self, x) for x in self.__slots__ }
|
ujway/viron | src/store/mutations/application.js | import reject from 'mout/array/reject';
import ObjectAssign from 'object-assign';
import exporter from './exporter';
export default exporter('application', {
/**
* 起動ステータスを変更します。
* @param {Object} state
* @param {Boolean} bool
* @return {Array}
*/
launch: (state, bool) => {
state.application.isLaunched = bool;
return ['application'];
},
/**
* 画面遷移ステータスを変更します。
* @param {Object} state
* @param {Boolean} bool
* @return {Array}
*/
navigation: (state, bool) => {
state.application.isNavigating = bool;
return ['application'];
},
/**
* 通信中APIを追加します。
* @param {Object} state
* @param {Object} info
* @return {Array}
*/
addNetworking: (state, info) => {
state.application.networkings.push(ObjectAssign({
id: `networking_${Date.now()}`
}, info));
state.application.isNetworking = true;
return ['application'];
},
/**
* 通信中APIを削除します。
* @param {Object} state
* @param {String} networkingId
* @param {riotx.Context} context
* @return {Array}
*/
removeNetworking: (state, networkingId, context) => {
state.application.networkings = reject(state.application.networkings, networking => {
return (networking.id === networkingId);
});
if (!!context) {
// 意図的に通信状態チェックを遅らせます。
setTimeout(() => {
context.commit('application.isNetworking');
}, 500);
} else if (!state.application.networkings.length) {
state.application.isNetworking = false;
}
return ['application'];
},
/**
* 通信状態を更新します。
* @param {Object} state
* @return {Array}
*/
isNetworking: state => {
state.application.isNetworking = !!state.application.networkings.length;
return ['application'];
},
/**
* ドラッグステータスを変更します。
* @param {Object} state
* @param {Boolean} bool
* @return {Array}
*/
drag: (state, bool) => {
state.application.isDragging = bool;
return ['application'];
},
/**
* メニューの開閉状態を切り替えます。
* @param {Object} state
* @return {Array}
*/
menuToggle: state => {
state.application.isMenuOpened = !state.application.isMenuOpened;
return ['application'];
},
/**
* エンドポイント用のフィルターテキストを更新します。
* @param {Object} state
* @param {String} newFilterText
* @return {Array}
*/
endpointFilterText: (state, newFilterText) => {
state.application.endpointFilterText = newFilterText;
return ['application'];
},
/**
* エンドポイント用の一時フィルターテキストを更新します。
* @param {Object} state
* @param {String} newTempFilterText
* @return {Array}
*/
endpointTempFilterText: (state, newTempFilterText) => {
state.application.endpointTempFilterText = newTempFilterText;
return ['application'];
}
});
|
rorik/UBU-MetProg | PO2/tests/juego/util/SentidoTest.java | package juego.util;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* [juego.util.SentidoTest] Created by <NAME>. on 2017/11/16.
*
* @author <NAME>.
* rorik.me
* github.com/rorik
*/
class SentidoTest {
@Test
void obtenerDesplazamientoHorizontal() {
assertEquals(0, Sentido.NORTE.obtenerDesplazamientoHorizontal());
assertEquals(0, Sentido.SUR.obtenerDesplazamientoHorizontal());
assertEquals(1, Sentido.ESTE.obtenerDesplazamientoHorizontal());
assertEquals(-1, Sentido.OESTE.obtenerDesplazamientoHorizontal());
}
@Test
void obtenerDesplazamientoVertical() {
assertEquals(-1, Sentido.NORTE.obtenerDesplazamientoVertical());
assertEquals(1, Sentido.SUR.obtenerDesplazamientoVertical());
assertEquals(0, Sentido.ESTE.obtenerDesplazamientoVertical());
assertEquals(0, Sentido.OESTE.obtenerDesplazamientoVertical());
}
} |
GreyNoise-Intelligence/insightconnect-plugins | cymon_v2/komand_cymon_v2/connection/connection.py | <filename>cymon_v2/komand_cymon_v2/connection/connection.py<gh_stars>0
import komand
from .schema import ConnectionSchema
# Custom imports below
from komand_cymon_v2.util.api import CymonV2
import maya
class Connection(komand.Connection):
def __init__(self):
super(self.__class__, self).__init__(input=ConnectionSchema())
def connect(self, params):
now = maya.now()
end = maya.when(string="April 30, 2019")
if now >= end:
raise Exception(
"Error: The Cymon service has been discontinued. " "Please transition off of using this plugin."
)
else:
self.logger.warning(
"Warning: The Cymon service will be discontinued on April 30, 2019. "
"Please plan to transition off this plugin before then."
)
self.logger.info("Connecting")
credentials = params.get("api_credentials")
if credentials:
username = credentials.get("username")
password = credentials.get("password")
if username == "anonymous" and password == "<PASSWORD>": # noqa: B105
username = None
password = <PASSWORD>
self.api = CymonV2(username, password, self.logger)
self.logger.info("Connected")
|
airbornemint/PieCrust2 | piecrust/formatting/base.py |
PRIORITY_FIRST = -1
PRIORITY_NORMAL = 0
PRIORITY_LAST = 1
class Formatter(object):
FORMAT_NAMES = None
OUTPUT_FORMAT = None
def __init__(self):
self.priority = PRIORITY_NORMAL
self.enabled = True
def initialize(self, app):
self.app = app
def render(self, format_name, txt):
raise NotImplementedError()
|
gorolykmaxim/time-trakr | app/src/main/java/com/example/timetrakr/viewmodel/activities/durations/ActivitiesDurationViewModel.java | package com.example.timetrakr.viewmodel.activities.durations;
import android.app.Application;
import com.example.timetrakr.TimeTrakrApplication;
import com.example.timetrakr.model.activity.duration.ActivityDuration;
import com.example.timetrakr.model.activity.duration.ActivityDurationCalculator;
import com.example.timetrakr.model.activity.duration.ActivityDurationSelection;
import com.example.timetrakr.model.activity.events.ActivityStartEvent;
import com.example.timetrakr.model.activity.events.ActivityStartEventRepository;
import com.example.timetrakr.model.messages.MessageRepository;
import java.time.Duration;
import java.util.List;
import java.util.Set;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.lifecycle.AndroidViewModel;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MediatorLiveData;
/**
* View model of the view that shows durations of activities, done today.
*/
public class ActivitiesDurationViewModel extends AndroidViewModel {
private MediatorLiveData<List<ActivityDuration>> observableActivityDurations;
private LiveData<List<ActivityStartEvent>> activityStartEvents;
private ActivityDurationCalculator durationCalculator;
private ActivityDurationSelection activityDurationSelection;
private MediatorLiveData<String> observableMessage;
private MessageRepository<List<ActivityDuration>> messageRepository;
/**
* Construct the view model.
*
* @param application reference to the application instance
*/
public ActivitiesDurationViewModel(@NonNull Application application) {
super(application);
TimeTrakrApplication timeTrakrApplication = (TimeTrakrApplication)application;
ActivityStartEventRepository repository = timeTrakrApplication.getActivityStartEventRepository();
durationCalculator = timeTrakrApplication.getActivityDurationCalculator();
messageRepository = timeTrakrApplication.getDurationMessagesRepository();
activityStartEvents = repository.getObservableForAllForToday();
observableActivityDurations = new MediatorLiveData<>();
observableActivityDurations.addSource(activityStartEvents, this::triggerActivityDurationCalculationFor);
activityDurationSelection = new ActivityDurationSelection(observableActivityDurations);
observableMessage = new MediatorLiveData<>();
observableMessage.addSource(observableActivityDurations, this::triggerMessageLookupFor);
}
/**
* Specify activity duration selection of this view model. The view model will use this
* selection to store selected activity durations and to display total duration of
* stored activities.
*
* @param activityDurationSelection selection to use by view model
*/
public void setActivityDurationSelection(ActivityDurationSelection activityDurationSelection) {
this.activityDurationSelection = activityDurationSelection;
}
/**
* Add specified activity duration to the current selection. In case activity already belongs
* to the selection, the method call will be ignored.
*
* @param activityDuration activity duration to add to the selection
*/
public void selectActivityDuration(ActivityDuration activityDuration) {
activityDurationSelection.add(activityDuration);
}
/**
* Remove specified activity duration from the current selection. In case activity does not
* belong to the selection the method call will be ignored.
*
* @param activityDuration activity duration to remove from the selection
*/
public void deselectActivityDuration(ActivityDuration activityDuration) {
activityDurationSelection.remove(activityDuration);
}
/**
* Clear current selection of activity durations.
*/
public void clearSelectedActivityDurations() {
activityDurationSelection.clear();
}
/**
* Get observable of total duration of selected activities.
*
* @return observable of total duration
*/
public LiveData<Duration> getObservableTotalDuration() {
return activityDurationSelection.getObservableTotalDuration();
}
/**
* Get observable of a set of selected activities.
*
* @return observable of selected activities
*/
public LiveData<Set<String>> getObservableSelectedActivities() {
return activityDurationSelection.getObservableSelectedActivities();
}
/**
* Get observable for list of durations of today's activities.
*
* @return observable for list of today's activity durations
*/
public LiveData<List<ActivityDuration>> getActivityDurations() {
return observableActivityDurations;
}
/**
* Get observable of a message to display in the title of the durations fragment.
*
* @return observable of a message to display
*/
public LiveData<String> getObservableMessage() {
return observableMessage;
}
/**
* Force recalculation of durations of today's activities.
*/
public void recalculateActivityDurations() {
triggerActivityDurationCalculationFor(activityStartEvents.getValue());
}
/**
* Calculate durations for activities from the specified list of activity start events and
* notify observers of activity durations observable about the results.
*
* @param activityStartEvents list of activity start events to calculate durations for.
* In case null is passed - the call will be ignored.
*/
private void triggerActivityDurationCalculationFor(@Nullable List<ActivityStartEvent> activityStartEvents) {
if (activityStartEvents != null) {
observableActivityDurations.setValue(durationCalculator.calculateDurationsFromEvents(activityStartEvents));
}
}
/**
* Find a message in the message repository, that matches the specified list of activity
* durations.
*
* @param activityDurations list of activity durations to find a message for.
* In case null is passed - the call will be ignored.
*/
private void triggerMessageLookupFor(@Nullable List<ActivityDuration> activityDurations) {
if (activityDurations != null) {
observableMessage.setValue(messageRepository.findOneThatAppliesTo(activityDurations));
}
}
}
|
lemonJun/Emmet | src/test/java/algo1/common/NineTable.java | <reponame>lemonJun/Emmet
package algo1.common;
public class NineTable {
public static void main(String[] args) {
table(9);
}
/**
* 我用白话文解释"\t"的意思是:在同一个缓冲区内横向跳8个空格,JDK1.5上是这样的,至于更高版本是否变化,那我就不太清楚了!!
* (有时候也有书籍称"\t"为制表符,对齐时使用的)
*
* @param n
*/
public static void table(int n) {
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= i; j++) {
System.out.print(j + "*" + i + "=" + j * i + "\t");
}
System.out.println();
}
}
}
|
legendzhouqiang/-uncode-cache | src/main/java/cn/uncode/cache/store/redis/JedisExecutor.java | <filename>src/main/java/cn/uncode/cache/store/redis/JedisExecutor.java<gh_stars>0
package cn.uncode.cache.store.redis;
import cn.uncode.cache.store.redis.cluster.JedisClusterCustom;
public abstract class JedisExecutor<T> {
abstract T doInJedis(JedisClusterCustom jedisClusterCustom);
}
|
SemyonSinchenko/jungrapht-visualization | jungrapht-visualization/src/main/java/org/jungrapht/visualization/layout/algorithms/repulsion/BarnesHutRepulsion.java | package org.jungrapht.visualization.layout.algorithms.repulsion;
import org.jungrapht.visualization.layout.model.LayoutModel;
/**
* @author <NAME>
* @param <V> the vertex type
* @param <R> the Repulsion type
* @param <B> the Repulsion Builder type
*/
public interface BarnesHutRepulsion<
V, R extends BarnesHutRepulsion<V, R, B>, B extends BarnesHutRepulsion.Builder<V, R, B>>
extends StandardRepulsion<V, R, B> {
interface Builder<V, R extends BarnesHutRepulsion<V, R, B>, B extends Builder<V, R, B>>
extends StandardRepulsion.Builder<V, R, B> {
B layoutModel(LayoutModel<V> layoutModel);
B theta(double theta);
R build();
}
}
|
kniefliu/WindowsSamples | third_party/skia_m76/third_party/externals/angle2/src/compiler/translator/tree_ops/ClampPointSize.h | <reponame>kniefliu/WindowsSamples
//
// Copyright (c) 2017 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// ClampPointSize.h: Limit the value that is written to gl_PointSize.
//
#ifndef COMPILER_TRANSLATOR_TREEOPS_CLAMPPOINTSIZE_H_
#define COMPILER_TRANSLATOR_TREEOPS_CLAMPPOINTSIZE_H_
namespace sh
{
class TIntermBlock;
class TSymbolTable;
void ClampPointSize(TIntermBlock *root, float maxPointSize, TSymbolTable *symbolTable);
} // namespace sh
#endif // COMPILER_TRANSLATOR_TREEOPS_CLAMPPOINTSIZE_H_
|
BlackYps/faf-java-api | src/main/java/com/faforever/api/event/UpdatedEventResponse.java | package com.faforever.api.event;
record UpdatedEventResponse(
int id,
String eventId,
Integer currentCount
) {
}
|
DanielGMesquita/StudyPath | PythonIntro/Semana4/DigitoAdjacente.py | <reponame>DanielGMesquita/StudyPath
#Recebe um número inteiro na entrada e verifique se o número recebido possui ao menos um dígito
#com um dígito adjacente igual a ele. Caso exista, imprima "sim"; se não existir, imprima "não".
n = input('Digite um número: ')
adj = False #condição para encerramento
c = 1 #contador para iterações
d = n[0] #variável de apoio para poder realizar comparação entre os itens da string
aux = 0 #auxiliar para definir a condição final
while c < len(n) and not adj:
if d == n[c]:
adj = True
aux = 1
d = n[c]
c = c + 1
if aux == 1:
print('sim')
else:
print('não') |
Keneral/asystem | connectivity/shill/async_connection.h | //
// Copyright (C) 2011 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#ifndef SHILL_ASYNC_CONNECTION_H_
#define SHILL_ASYNC_CONNECTION_H_
#include <memory>
#include <string>
#include <base/callback.h>
#include "shill/refptr_types.h"
namespace shill {
class EventDispatcher;
class IOHandler;
class IPAddress;
class Sockets;
// The AsyncConnection class implements an asynchronous
// outgoing TCP connection. When passed an IPAddress and
// port, and it will notify the caller when the connection
// is made. It can also be passed an interface name to
// bind the local side of the connection.
class AsyncConnection {
public:
// If non-empty |interface_name| specifies an local interface from which
// to originate the connection.
AsyncConnection(const std::string& interface_name,
EventDispatcher* dispatcher,
Sockets* sockets,
const base::Callback<void(bool, int)>& callback);
virtual ~AsyncConnection();
// Open a connection given an IP address and port (in host order).
// When the connection completes, |callback| will be called with the
// a boolean (indicating success if true) and an fd of the opened socket
// (in the success case). If successful, ownership of this open fd is
// passed to the caller on execution of the callback.
//
// This function (Start) returns true if the connection is in progress,
// or if the connection has immediately succeeded (the callback will be
// called in this case). On success the callback may be called before
// Start() returns to its caller. On failure to start the connection,
// this function returns false, but does not execute the callback.
//
// Calling Start() on an AsyncConnection that is already Start()ed is
// an error.
virtual bool Start(const IPAddress& address, int port);
// Stop the open connection, closing any fds that are still owned.
// Calling Stop() on an unstarted or Stop()ped AsyncConnection is
// a no-op.
virtual void Stop();
std::string error() const { return error_; }
private:
friend class AsyncConnectionTest;
void OnConnectCompletion(int fd);
// Initiate a socket connection to given IP address and port (in host order).
int ConnectTo(const IPAddress& address, int port);
std::string interface_name_;
EventDispatcher* dispatcher_;
Sockets* sockets_;
base::Callback<void(bool, int)> callback_;
std::string error_;
int fd_;
base::Callback<void(int)> connect_completion_callback_;
std::unique_ptr<IOHandler> connect_completion_handler_;
DISALLOW_COPY_AND_ASSIGN(AsyncConnection);
};
} // namespace shill
#endif // SHILL_ASYNC_CONNECTION_H_
|
mwrightE38/Event38-Dronekit-Android | ServiceApp/src/org/droidplanner/services/android/utils/apps/AppsUpdateReceiver.java | package org.droidplanner.services.android.utils.apps;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.support.v4.content.LocalBroadcastManager;
import org.droidplanner.services.android.ui.fragment.RecommendedAppsFragment;
/**
* Created by <NAME> on 2/5/15.
*/
public class AppsUpdateReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
LocalBroadcastManager.getInstance(context)
.sendBroadcast(new Intent(RecommendedAppsFragment.ACTION_REFRESH_RECOMMENDED_APPS));
}
}
|
quameleon/ruby-nxt | lib/telegrams/commands/direct/keep_alive.rb | require_relative '../direct_command'
class KeepAlive < DirectCommand
def initialize
super(true)
@command = 0x0D
end
end
|
AppSecAI-TEST/qalingo-engine | apis/api-core/api-core-common/src/main/java/org/hoteia/qalingo/core/domain/CmsContent.java | <filename>apis/api-core/api-core-common/src/main/java/org/hoteia/qalingo/core/domain/CmsContent.java
package org.hoteia.qalingo.core.domain;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.Lob;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Version;
import org.apache.commons.lang.StringUtils;
import org.hibernate.Hibernate;
import org.hoteia.qalingo.core.annotation.CacheEntityInformation;
import org.hoteia.qalingo.core.comparator.CmsContentAssetComparator;
import org.hoteia.qalingo.core.comparator.CmsContentBlockComparator;
import org.hoteia.qalingo.core.domain.impl.DomainEntity;
@Entity
@Table(name="TCMS_CONTENT")
@CacheEntityInformation(cacheName="web_cache_cms_content")
public class CmsContent extends AbstractCmsEntity<CmsContent, CmsContentAttribute> implements DomainEntity {
/**
* Generated UID
*/
private static final long serialVersionUID = 2371548827810234869L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "ID", nullable = false)
private Long id;
@Version
@Column(name = "VERSION", nullable = false) // , columnDefinition = "int(11) default 1"
private int version;
@Column(name = "CODE")
private String code;
@Column(name = "APP")
private String app;
@Column(name = "TYPE")
private String type;
@Column(name = "TITLE")
private String title;
@Column(name = "LINK_TITLE")
private String linkTitle;
@Column(name = "SEO_SEGMENT")
private String seoSegment;
@Column(name = "SEO_KEY")
private String seoKey;
@Column(name = "SUMMARY")
@Lob
private String summary;
@Column(name = "MASTER", nullable = false) // , columnDefinition = "tinyint(1) default 1"
private boolean master = false;
@Column(name = "ACTIVE", nullable = false) // , columnDefinition = "tinyint(1) default 0"
private boolean active = false;
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, targetEntity = org.hoteia.qalingo.core.domain.CmsContentAttribute.class)
@JoinColumn(name = "CMS_CONTENT_ID")
private Set<CmsContentAttribute> attributes = new HashSet<CmsContentAttribute>();
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, targetEntity = org.hoteia.qalingo.core.domain.CmsContentAsset.class)
@JoinColumn(name = "CMS_CONTENT_ID")
private Set<CmsContentAsset> assets = new HashSet<CmsContentAsset>();
@ManyToOne(fetch = FetchType.LAZY, targetEntity = org.hoteia.qalingo.core.domain.User.class)
@JoinColumn(name = "USER_ID", insertable = true, updatable = true)
private User user;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = org.hoteia.qalingo.core.domain.MarketArea.class)
@JoinColumn(name = "MARKET_AREA_ID", insertable = true, updatable = true)
private MarketArea marketArea;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = org.hoteia.qalingo.core.domain.Localization.class)
@JoinColumn(name = "LOCALIZATION_ID", insertable = true, updatable = true)
private Localization localization;
@ManyToOne(fetch = FetchType.LAZY, targetEntity = org.hoteia.qalingo.core.domain.CmsContent.class)
@JoinColumn(name = "CMS_CONTENT_ID", insertable = true, updatable = true)
private CmsContent masterCmsContent;
@OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, targetEntity = org.hoteia.qalingo.core.domain.CmsContentBlock.class)
@JoinColumn(name = "CMS_CONTENT_ID")
private Set<CmsContentBlock> blocks = new HashSet<CmsContentBlock>();
@ManyToMany(fetch = FetchType.LAZY, cascade = { CascadeType.MERGE }, targetEntity = org.hoteia.qalingo.core.domain.ProductSku.class)
@JoinTable(name = "TCMS_CONTENT_PRODUCT_SKU_REL", joinColumns = @JoinColumn(name = "CMS_CONTENT_ID"), inverseJoinColumns = @JoinColumn(name = "PRODUCT_SKU_ID"))
private Set<ProductSku> productSkus = new HashSet<ProductSku>();
@ManyToMany(fetch = FetchType.LAZY, cascade = { CascadeType.MERGE }, targetEntity = org.hoteia.qalingo.core.domain.ProductBrand.class)
@JoinTable(name = "TCMS_CONTENT_PRODUCT_BRAND_REL", joinColumns = @JoinColumn(name = "CMS_CONTENT_ID"), inverseJoinColumns = @JoinColumn(name = "PRODUCT_BRAND_ID"))
private Set<ProductBrand> productBrands = new HashSet<ProductBrand>();
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_PUBLISH")
private Date datePublish;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_CREATE")
private Date dateCreate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_UPDATE")
private Date dateUpdate;
public CmsContent() {
this.dateCreate = new Date();
this.dateUpdate = new Date();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getApp() {
return app;
}
public void setApp(String app) {
this.app = app;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getLinkTitle() {
return linkTitle;
}
public void setLinkTitle(String linkTitle) {
this.linkTitle = linkTitle;
}
public String getSeoSegment() {
return seoSegment;
}
public void setSeoSegment(String seoSegment) {
this.seoSegment = seoSegment;
}
public String getSeoKey() {
return seoKey;
}
public void setSeoKey(String seoKey) {
this.seoKey = seoKey;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public boolean isMaster() {
return master;
}
public void setMaster(boolean master) {
this.master = master;
}
public boolean isActive() {
return active;
}
public void setActive(boolean active) {
this.active = active;
}
public Set<CmsContentAttribute> getAttributes() {
return attributes;
}
public void setAttributes(Set<CmsContentAttribute> attributes) {
this.attributes = attributes;
}
public Set<CmsContentAsset> getAssets() {
return assets;
}
public List<CmsContentAsset> getSortedAssets() {
List<CmsContentAsset> sortedCmsContentAssets = null;
if (assets != null
&& Hibernate.isInitialized(assets)) {
sortedCmsContentAssets = new LinkedList<CmsContentAsset>(assets);
Collections.sort(sortedCmsContentAssets, new CmsContentAssetComparator());
}
return sortedCmsContentAssets;
}
public void setAssets(Set<CmsContentAsset> assets) {
this.assets = assets;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public MarketArea getMarketArea() {
return marketArea;
}
public void setMarketArea(MarketArea marketArea) {
this.marketArea = marketArea;
}
public Localization getLocalization() {
return localization;
}
public void setLocalization(Localization localization) {
this.localization = localization;
}
public CmsContent getMasterCmsContent() {
return masterCmsContent;
}
public void setMasterCmsContent(CmsContent masterCmsContent) {
this.masterCmsContent = masterCmsContent;
}
public Set<CmsContentBlock> getBlocks() {
return blocks;
}
public List<CmsContentBlock> getSortedCmsContentBlocks() {
List<CmsContentBlock> sortedCmsContentBlocks = null;
if (blocks != null
&& Hibernate.isInitialized(blocks)) {
sortedCmsContentBlocks = new LinkedList<CmsContentBlock>(blocks);
Collections.sort(sortedCmsContentBlocks, new CmsContentBlockComparator());
}
return sortedCmsContentBlocks;
}
public CmsContentBlock getBlockByType(String type) {
if (blocks != null
&& Hibernate.isInitialized(blocks)
&& StringUtils.isNotEmpty(type)) {
for (CmsContentBlock cmsContentBlock : blocks) {
if(cmsContentBlock.getType().equals(type)){
return cmsContentBlock;
}
}
}
return null;
}
public void setBlocks(Set<CmsContentBlock> blocks) {
this.blocks = blocks;
}
public Set<ProductSku> getProductSkus() {
return productSkus;
}
public List<ProductSku> getSortedProductSkus() {
List<ProductSku> sortedProductSkus = null;
if (Hibernate.isInitialized(productSkus)
&& productSkus != null) {
sortedProductSkus = new LinkedList<ProductSku>(productSkus);
// TODO : sort
}
return sortedProductSkus;
}
public void setProductSkus(Set<ProductSku> productSkus) {
this.productSkus = productSkus;
}
public Set<ProductBrand> getProductBrands() {
return productBrands;
}
public ProductBrand getDefaultProductBrand() {
if (Hibernate.isInitialized(productBrands)
&& productBrands != null) {
for (ProductBrand productBrand : productBrands) {
return productBrand;
}
}
return null;
}
public void setProductBrands(Set<ProductBrand> productBrands) {
this.productBrands = productBrands;
}
public Date getDatePublish() {
return datePublish;
}
public void setDatePublish(Date datePublish) {
this.datePublish = datePublish;
}
public Date getDateCreate() {
return dateCreate;
}
public void setDateCreate(Date dateCreate) {
this.dateCreate = dateCreate;
}
public Date getDateUpdate() {
return dateUpdate;
}
public void setDateUpdate(Date dateUpdate) {
this.dateUpdate = dateUpdate;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((app == null) ? 0 : app.hashCode());
result = prime * result + ((code == null) ? 0 : code.hashCode());
result = prime * result + ((dateCreate == null) ? 0 : dateCreate.hashCode());
result = prime * result + ((datePublish == null) ? 0 : datePublish.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((seoKey == null) ? 0 : seoKey.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + version;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CmsContent other = (CmsContent) obj;
if (app == null) {
if (other.app != null)
return false;
} else if (!app.equals(other.app))
return false;
if (code == null) {
if (other.code != null)
return false;
} else if (!code.equals(other.code))
return false;
if (dateCreate == null) {
if (other.dateCreate != null)
return false;
} else if (!dateCreate.equals(other.dateCreate))
return false;
if (datePublish == null) {
if (other.datePublish != null)
return false;
} else if (!datePublish.equals(other.datePublish))
return false;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
if (seoKey == null) {
if (other.seoKey != null)
return false;
} else if (!seoKey.equals(other.seoKey))
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
if (version != other.version)
return false;
return true;
}
@Override
public String toString() {
return "CmsContent [id=" + id + ", version=" + version + ", code="
+ code + ", app=" + app + ", type=" + type + ", title=" + title
+ ", linkTitle=" + linkTitle + ", seoSegment=" + seoSegment
+ ", seoKey=" + seoKey + ", summary=" + summary + ", active="
+ active + ", dateCreate=" + dateCreate + ", dateUpdate="
+ dateUpdate + "]";
}
} |
csamak/asterixdb | asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/DatasetIdFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.transaction.management.service.transaction;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
public class DatasetIdFactory {
private static int id = 0;
private static boolean isInitialized = false;
public static synchronized boolean isInitialized() {
return isInitialized;
}
public static synchronized void initialize(int initialId) {
id = initialId;
isInitialized = true;
}
public static synchronized int generateDatasetId() throws AlgebricksException {
if (id == Integer.MAX_VALUE) {
throw new AsterixException(ErrorCode.DATASET_ID_EXHAUSTED);
}
return ++id;
}
public static int generateAlternatingDatasetId(int originalId) {
return originalId ^ 0x80000000;
}
public static synchronized int getMostRecentDatasetId() {
return id;
}
}
|
dbrad/AdventOfCode2021 | src/Day10/Part02/tests.js | <filename>src/Day10/Part02/tests.js
import {assertEquals, describe, it} from "../../lib/testing.js";
import fn from "./main.js";
export default async function ()
{
describe("When calculating the autocomplete score of the navigation subsystem", async () =>
{
it("should return 288957 as the score for the test input", async () =>
{
const input = [
"[({(<(())[]>[[{[]{<()<>>",
"[(()[<>])]({[<{<<[]>>(",
"{([(<{}[<>[]}>{[]{[(<()>",
"(((({<>}<{<{<>}{[]{[]{}",
"[[<[([]))<([[{}[[()]]]",
"[{[{({}]{}}([{[{{{}}([]",
"{<[[]]>}<{[{[{[]{()[[[]",
"[<(<(<(<{}))><([]([]()",
"<{([([[(<>()){}]>(<<{{",
"<{([{{}}[<[[[<>{}]]]>[]]",
];
assertEquals(await fn(input), 288957);
});
});
} |
naaman10/mcci | src/components/DonateItems.js | import React from 'react';
import PreviewCompatibleImage from './PreviewCompatibleImage';
const DonateItems = ({ title, text, list, cta }) => (
<div className="donate-items">
<div className="donate-items__wrapper">
<div className="donate-items__row_1">
<h2 className="donate-items-title">{title}</h2>
<p className="donate-items-subtitle">{text}</p>
</div>
<div className="donate-items__row_2">
{
list && list.length > 0 && list.map((item, i) => (
<div key={i} className="donate-items__column">
<PreviewCompatibleImage cName="donate-item__image" imageInfo={item.image} />
<p className="donate-items-label">{item.label}</p>
<a className="btn btn--amazon donate-items-button" href={item.cta.url}><em>{item.cta.label}</em></a>
</div>
))
}
</div>
<div className="donate-items__row_3">
<a href={cta.url} className="btn btn--amazon donate-items-wishlist-button" ><em>{cta.label}</em></a>
</div>
</div>
</div>
);
export default DonateItems; |
PurpleGuitar/tools | uwb/dwchunking.py | #!/usr/bin/env python2
# -*- coding: utf8 -*-
#
# Copyright (c) 2014 unfoldingWord
# http://creativecommons.org/licenses/MIT/
# See LICENSE file for details.
#
# Contributors:
# <NAME> <<EMAIL>>
"""
"""
import os
import sys
import codecs
## Import the bookKeys mapping from USFM-Tools
USFMTools='/var/www/vhosts/door43.org/USFM-Tools/support'
sys.path.append(USFMTools)
try:
from books import bookKeys
except ImportError:
print "Please ensure that {0}/books.py exists.".format(USFMTools)
sys.exit(1)
TMPL = u'''====== {1} ======
===== TFT: =====
<usfm>
{0}
</usfm>
===== UTB: =====
<usfm>
{0}
</usfm>
===== Important Terms: =====
* **[[:en:uwb:notes:key-terms:example|example]]**
* **[[:en:uwb:notes:key-terms:example|example]]**
===== Translation Notes: =====
* **bold words** - explanation
* **bold words** - explanation
===== Links: =====
* **[[en/bible-training/notes:{6}/questions/comprehension/{4}|Luke Chapter {4} Checking Questions]]**
* **[[en/bible-training/notes:{6}/questions/checking/{5}-checking|{5} Checking Questions]]**
**[[en/bible-training/notes:{2}|<<]] | [[en/bible-training/notes:{3}|>>]]**'''
def splice(s):
chunks = []
for i in s.split('\n===== '):
ref, txt = i.split('=====\n', 1)
ref = ref.strip()
filepath = getpath(ref.lower())
if not filepath: continue
chunks.append([filepath, txt.strip(), ref])
chunks.sort(key=lambda chunks: chunks[0])
return chunks
def getpath(r):
fill = 2
try:
book, ref = r.split(' ')
#bk = books[book]
c, vv = ref.split(':')
v = vv.split('-')[0]
if 'psa' in book.lower():
fill = 3
return '{0}/{1}/{2}.txt'.format(book, c.zfill(fill), v.zfill(fill))
except:
return False
def writeFile(f, content):
makeDir(f.rpartition('/')[0])
out = codecs.open(f, encoding='utf-8', mode='w')
out.write(content)
out.close()
def makeDir(d):
'''
Simple wrapper to make a directory if it does not exist.
'''
if not os.path.exists(d):
os.makedirs(d, 0755)
def genNav(chunked, usfmbk):
'''
Walks the generated folder and creates next and previous links.
'''
for e in chunked:
#e[0] is filepath, e[1] is text, e[2] is ref
i = chunked.index(e)
prv = getNav(chunked, i-1)
nxt = getNav(chunked, i+1)
chp = e[2].split()[1].split(':')[0]
bk = e[2].split()[0]
writeFile(e[0], TMPL.format(e[1], e[2], prv, nxt, chp, bk, usfmbk))
def getNav(chunked, i):
if i == -1:
return ''
elif i >= len(chunked):
return ''
return chunked[i][0]
if __name__ == '__main__':
if len(sys.argv) > 1:
filetochunk = str(sys.argv[1]).strip()
if not os.path.exists(filetochunk):
print 'Directory not found: {0}'.format(filetochunk)
sys.exit(1)
else:
print 'Please specify the file to chunk.'
sys.exit(1)
src = codecs.open(filetochunk, encoding='utf-8').read()
chunked = splice(src)
genNav(chunked, filetochunk.replace('.txt', ''))
|
xiaohalo/LeetCode | Python/spiral-matrix.py | <reponame>xiaohalo/LeetCode
from __future__ import print_function
# Time: O(m * n)
# Space: O(1)
#
# Given a matrix of m x n elements (m rows, n columns), return all elements of the matrix in spiral order.
#
# For example,
# Given the following matrix:
#
# [
# [ 1, 2, 3 ],
# [ 4, 5, 6 ],
# [ 7, 8, 9 ]
# ]
# You should return [1,2,3,6,9,8,7,4,5].
#
class Solution:
# @param matrix, a list of lists of integers
# @return a list of integers
def spiralOrder(self, matrix):
result = []
if matrix == []:
return result
left, right, top, bottom = 0, len(matrix[0]) - 1, 0, len(matrix) - 1
while left <= right and top <= bottom:
for j in xrange(left, right + 1):
result.append(matrix[top][j])
for i in xrange(top + 1, bottom):
result.append(matrix[i][right])
for j in reversed(xrange(left, right + 1)):
if top < bottom:
result.append(matrix[bottom][j])
for i in reversed(xrange(top + 1, bottom)):
if left < right:
result.append(matrix[i][left])
left, right, top, bottom = left + 1, right - 1, top + 1, bottom - 1
return result
if __name__ == "__main__":
print(Solution().spiralOrder([[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]]))
print(Solution().spiralOrder([[2,3]])) |
natebragg/java-sketch | test/new_ast/FieldMethod.java | class A {
void m() { }
}
class B {
A a;
B() { a = new A(); }
}
class FieldMethod {
harness void main() {
B b = new B();
b.a.m();
}
}
|
stavanmehta/leetcode | python/406.queue-reconstruction-by-height.py | <reponame>stavanmehta/leetcode<gh_stars>0
class Solution:
def reconstructQueue(self, people: List[List[int]]) -> List[List[int]]:
|
rkamath3/chocosolver | src/test/java/org/clafer/ir/IrPrefixTest.java | <filename>src/test/java/org/clafer/ir/IrPrefixTest.java
package org.clafer.ir;
import org.clafer.choco.constraint.Constraints;
import org.clafer.ir.IrQuickTest.Solution;
import static org.clafer.ir.Irs.*;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chocosolver.solver.constraints.Constraint;
import org.chocosolver.solver.variables.CStringVar;
/**
*
* @author jimmy
*/
@RunWith(IrQuickTest.class)
public class IrPrefixTest {
@Test(timeout = 60000)
public IrBoolExpr setup(IrStringVar prefix, IrStringVar word) {
return prefix(prefix, word);
}
@Solution
public Constraint setup(CStringVar prefix, CStringVar word) {
return Constraints.prefix(
prefix.getChars(), prefix.getLength(),
word.getChars(), word.getLength());
}
}
|
jpadilla/denali | commands/build.js | import dedent from 'dedent-js';
import Command from '../lib/cli/command';
import Project from '../lib/cli/project';
import ui from '../lib/cli/ui';
export default class BuildCommand extends Command {
static commandName = 'build';
static description = 'Compile your app into optimized ES5 code';
static longDescription = dedent`
Takes your app's ES201X source code and produces compiled, sourcemapped, and
optimized output compatible with Node 6.`;
params = [];
flags = {
environment: {
description: 'The target environment to build for.',
defaultValue: 'development',
type: String
},
output: {
description: 'The directory to build into',
defaultValue: 'dist',
type: String
},
watch: {
description: 'Continuously watch the source files and rebuild on changes',
defaultValue: false,
type: Boolean
},
'print-slow-trees': {
description: 'Print out an analysis of the build process, showing the slowest nodes.',
defaultValue: false,
type: Boolean
}
};
runsInApp = true;
run({ flags }) {
let project = new Project({
environment: flags.environment,
printSlowTrees: flags['print-slow-trees'],
lint: flags.environment !== 'production'
});
if (flags.watch) {
project.watch({
outputDir: flags.output,
onBuild() {
ui.info('Build complete');
}
});
} else {
project.build(flags.output);
}
}
}
|
AlexanderFadeev/ood | lab1/duck/decoy.go | package duck
import (
"github.com/AlexanderFadeev/ood/lab1/duck/dance_strategy"
"github.com/AlexanderFadeev/ood/lab1/duck/fly_strategy"
"github.com/AlexanderFadeev/ood/lab1/duck/quack_strategy"
)
type Decoy struct {
ConfigurableDuck
}
func NewDecoyDuck() *Decoy {
return &Decoy{
newDuck("decoy", quack_strategy.Muted, fly_strategy.NoWay, dance_strategy.NoWay),
}
}
|
zhouhp007/AndroidAll | language-java/java-core/src/com/chiclaim/reflection/InstanceOf.java | package com.chiclaim.reflection;
/**
* @author chiclaim
*/
public class InstanceOf {
public static void main(String[] args) {
System.out.println(Integer.valueOf(1) instanceof Number); // true
System.out.println(int.class.isInstance(11)); // false [int is not Integer]
System.out.println(Number.class.isInstance(11)); // true
System.out.println(Number.class.isAssignableFrom(Integer.class)); // true
}
}
|
rafael-radkowski/TrackingExpert- | include/loader/SamplingTypes.h | <filename>include/loader/SamplingTypes.h
/*
class SamplingTypes
<NAME>
Iowa State University
<EMAIL>
MIT License
---------------------------------------------------------------
Last edits:
Aug 9, 2020, RR:
- Added a method to validate the correctness of the value and to correct them if required.
*/
#ifndef __SAMPLING_TIMES__
#define __SAMPLING_TIMES__
#include <algorithm>
/*
The different datasets sampling types for the camera data and the loaded model.
RAW: all points are used
UNIDORM: the points are uniformly distributed, using a voxel raster or a grid on the image.
RANDOM: random points are selected, without any distribution.
*/
typedef enum _SamplingMethod
{
RAW = 0,
UNIFORM = 1,
RANDOM = 2,
}SamplingMethod;
typedef struct _SamplingParam
{
// voxel grid size
// for uniform sampling
float grid_x;
float grid_y;
float grid_z;
// step size for uniform sampling
int uniform_step;
int random_max_points;
float ramdom_percentage;
_SamplingParam()
{
// Unit of the grid is the model unit.
grid_x = 0.01f;
grid_y = 0.01f;
grid_z = 0.01f;
uniform_step = 1;
random_max_points = 5000;
ramdom_percentage = 25; // currently not in use. Use the max random points number.
}
void validate(void) {
uniform_step = std::max(1, uniform_step);
random_max_points = std::max(1, random_max_points);
ramdom_percentage = std::max(1.0f, std::min(100.0f, ramdom_percentage));
grid_x = std::max(0.0001f, grid_x);
grid_y = std::max(0.0001f, grid_y);
grid_z = std::max(0.0001f, grid_z);
}
}SamplingParam;
#endif |
aldrinbaroi/simple-messaging-server | src/main/java/net/baroi/messaging/server/simple/encryption/EncryptDecryptUtil.java | <filename>src/main/java/net/baroi/messaging/server/simple/encryption/EncryptDecryptUtil.java<gh_stars>0
package net.baroi.messaging.server.simple.encryption;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
/**
*
* @author <NAME>
*
*/
public class EncryptDecryptUtil {
private static final String ENCRYPT = "encrypt";
private static final String DECRYPT = "decrypt";
public static void main(String[] args) {
if ((args.length == 3) && (args[0].equalsIgnoreCase(ENCRYPT) || args[0].equalsIgnoreCase(DECRYPT))) {
String command = args[0];
String password = args[1];
String text = args[2];
StandardPBEStringEncryptor enc = new StandardPBEStringEncryptor();
enc.setPassword(password);
if (command.equals(ENCRYPT)) {
System.out.println();
System.out.println("Command : " + command);
System.out.println("Password : " + password);
System.out.println("Provided plain text: " + text);
try {
System.out.println("Encrypted text : " + enc.encrypt(text));
} catch (Exception e) {
System.out.println("ERROR: " + e.getMessage());
}
System.out.println();
} else if (command.equals(DECRYPT)) {
System.out.println();
System.out.println("Command : " + command);
System.out.println("Password : " + password);
System.out.println("Provided encrypted text: " + text);
try {
System.out.println("Decrypted plain text : " + enc.decrypt(text));
} catch (Exception e) {
System.out.println("ERROR: " + e.getMessage());
}
System.out.println();
}
} else {
System.out.println();
System.out.println("Usage: encryptor COMMAND PASSWORD TEXT");
System.out.println("");
System.out.println(" COMMAND : [encrypt|decrypt]");
System.out.println(" PASSWORD: Password to be used for encryption & decryption");
System.out.println(" TEXT : Text to be encrypted or decrypted");
System.out.println();
}
}
}
|
acmd/GIT-TCC-LIBQUIC-ACMD | src/crypto/openssl_util.h | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CRYPTO_OPENSSL_UTIL_H_
#define CRYPTO_OPENSSL_UTIL_H_
#include <stddef.h>
#include "base/location.h"
#include "base/macros.h"
#include "crypto/crypto_export.h"
namespace crypto {
// Provides a buffer of at least MIN_SIZE bytes, for use when calling OpenSSL's
// SHA256, HMAC, etc functions, adapting the buffer sizing rules to meet those
// of the our base wrapper APIs.
// This allows the library to write directly to the caller's buffer if it is of
// sufficient size, but if not it will write to temporary |min_sized_buffer_|
// of required size and then its content is automatically copied out on
// destruction, with truncation as appropriate.
template<int MIN_SIZE>
class ScopedOpenSSLSafeSizeBuffer {
public:
ScopedOpenSSLSafeSizeBuffer(unsigned char* output, size_t output_len)
: output_(output),
output_len_(output_len) {
}
~ScopedOpenSSLSafeSizeBuffer() {
if (output_len_ < MIN_SIZE) {
// Copy the temporary buffer out, truncating as needed.
memcpy(output_, min_sized_buffer_, output_len_);
}
// else... any writing already happened directly into |output_|.
}
unsigned char* safe_buffer() {
return output_len_ < MIN_SIZE ? min_sized_buffer_ : output_;
}
private:
// Pointer to the caller's data area and its associated size, where data
// written via safe_buffer() will [eventually] end up.
unsigned char* output_;
size_t output_len_;
// Temporary buffer writen into in the case where the caller's
// buffer is not of sufficient size.
unsigned char min_sized_buffer_[MIN_SIZE];
DISALLOW_COPY_AND_ASSIGN(ScopedOpenSSLSafeSizeBuffer);
};
// Initialize OpenSSL if it isn't already initialized. This must be called
// before any other OpenSSL functions though it is safe and cheap to call this
// multiple times.
// This function is thread-safe, and OpenSSL will only ever be initialized once.
// OpenSSL will be properly shut down on program exit.
CRYPTO_EXPORT void EnsureOpenSSLInit();
// Drains the OpenSSL ERR_get_error stack. On a debug build the error codes
// are send to VLOG(1), on a release build they are disregarded. In most
// cases you should pass FROM_HERE as the |location|.
CRYPTO_EXPORT void ClearOpenSSLERRStack(
const tracked_objects::Location& location);
// Place an instance of this class on the call stack to automatically clear
// the OpenSSL error stack on function exit.
class OpenSSLErrStackTracer {
public:
// Pass FROM_HERE as |location|, to help track the source of OpenSSL error
// messages. Note any diagnostic emitted will be tagged with the location of
// the constructor call as it's not possible to trace a destructor's callsite.
explicit OpenSSLErrStackTracer(const tracked_objects::Location& location)
: location_(location) {
EnsureOpenSSLInit();
}
~OpenSSLErrStackTracer() {
ClearOpenSSLERRStack(location_);
}
private:
const tracked_objects::Location location_;
DISALLOW_IMPLICIT_CONSTRUCTORS(OpenSSLErrStackTracer);
};
} // namespace crypto
#endif // CRYPTO_OPENSSL_UTIL_H_
|
stark4n6/cLeapp | scripts/artifacts/customDict.py | <filename>scripts/artifacts/customDict.py
import os
import textwrap
from scripts.artifact_report import ArtifactHtmlReport
from scripts.cleapfuncs import logfunc, logdevinfo, tsv, timeline, is_platform_windows, get_next_unused_name, open_sqlite_db_readonly, get_browser_name
def get_customDict(files_found, report_folder, seeker, wrap_text):
data_list =[]
for file_found in files_found:
file_found = str(file_found)
with open(file_found, "r") as filefrom:
for line in filefrom:
if 'checksum_v1 =' in line:
pass
else:
data_list.append((line,))
if len(data_list) > 0:
report = ArtifactHtmlReport('Custom Dictionary')
report.start_artifact_report(report_folder, 'Custom Dictionary')
report.add_script()
data_headers = ('Value',)
report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
report.end_artifact_report()
tsvname = 'Custom Dictionary'
tsv(report_folder, data_headers, data_list, tsvname)
else:
logfunc('No Custom Dictionary Data available') |
ankitaagar/felix-dev | framework/src/main/java/org/apache/felix/framework/FilterImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.framework;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.felix.framework.ServiceRegistrationImpl.ServiceReferenceImpl;
import org.apache.felix.framework.capabilityset.CapabilitySet;
import org.apache.felix.framework.capabilityset.SimpleFilter;
import org.apache.felix.framework.util.StringMap;
import org.apache.felix.framework.wiring.BundleCapabilityImpl;
import org.osgi.framework.Filter;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.wiring.BundleRevision;
public class FilterImpl implements Filter
{
private final SimpleFilter m_filter;
public FilterImpl(String filterStr) throws InvalidSyntaxException
{
try
{
m_filter = SimpleFilter.parse(filterStr);
}
catch (Throwable th)
{
throw new InvalidSyntaxException(th.getMessage(), filterStr);
}
}
public boolean match(ServiceReference sr)
{
if (sr instanceof ServiceReferenceImpl)
{
return CapabilitySet.matches((ServiceReferenceImpl) sr, m_filter);
}
else
{
return CapabilitySet.matches(new WrapperCapability(sr), m_filter);
}
}
public boolean match(Dictionary<String, ? > dctnr)
{
return CapabilitySet.matches(new WrapperCapability(dctnr, false), m_filter);
}
public boolean matchCase(Dictionary<String, ? > dctnr)
{
return CapabilitySet.matches(new WrapperCapability(dctnr, true), m_filter);
}
public boolean matches(Map<String, ?> map)
{
return CapabilitySet.matches(new WrapperCapability(map), m_filter);
}
public boolean equals(Object o)
{
return toString().equals(o.toString());
}
public int hashCode()
{
return toString().hashCode();
}
public String toString()
{
return m_filter.toString();
}
static class WrapperCapability extends BundleCapabilityImpl
{
private final Map m_map;
public WrapperCapability(Map map)
{
super(null, null, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
m_map = (map == null) ? Collections.EMPTY_MAP : map;
}
public WrapperCapability(Dictionary dict, boolean caseSensitive)
{
super(null, null, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
m_map = new DictionaryToMap(dict, caseSensitive);
}
public WrapperCapability(ServiceReference sr)
{
super(null, null, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
m_map = new StringMap();
for (String key : sr.getPropertyKeys())
{
m_map.put(key, sr.getProperty(key));
}
}
@Override
public BundleRevision getRevision()
{
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getNamespace()
{
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Map<String, String> getDirectives()
{
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Map<String, Object> getAttributes()
{
return m_map;
}
@Override
public List<String> getUses()
{
throw new UnsupportedOperationException("Not supported yet.");
}
}
private static class DictionaryToMap implements Map
{
private final Map m_map;
private final Dictionary m_dict;
public DictionaryToMap(Dictionary dict, boolean caseSensitive)
{
if (!caseSensitive)
{
m_dict = null;
m_map = new StringMap();
if (dict != null)
{
Enumeration keys = dict.keys();
while (keys.hasMoreElements())
{
Object key = keys.nextElement();
if (m_map.get(key) == null)
{
m_map.put(key, dict.get(key));
}
else
{
throw new IllegalArgumentException(
"Duplicate attribute: " + key.toString());
}
}
}
}
else
{
m_dict = dict;
m_map = null;
}
}
public int size()
{
throw new UnsupportedOperationException("Not supported yet.");
}
public boolean isEmpty()
{
throw new UnsupportedOperationException("Not supported yet.");
}
public boolean containsKey(Object o)
{
throw new UnsupportedOperationException("Not supported yet.");
}
public boolean containsValue(Object o)
{
throw new UnsupportedOperationException("Not supported yet.");
}
public Object get(Object o)
{
if (m_dict != null)
{
return m_dict.get(o);
}
else if (m_map != null)
{
return m_map.get(o);
}
return null;
}
public Object put(Object k, Object v)
{
throw new UnsupportedOperationException("Not supported yet.");
}
public Object remove(Object o)
{
throw new UnsupportedOperationException("Not supported yet.");
}
public void putAll(Map map)
{
throw new UnsupportedOperationException("Not supported yet.");
}
public void clear()
{
throw new UnsupportedOperationException("Not supported yet.");
}
public Set<Object> keySet()
{
throw new UnsupportedOperationException("Not supported yet.");
}
public Collection<Object> values()
{
throw new UnsupportedOperationException("Not supported yet.");
}
public Set<Entry<Object, Object>> entrySet()
{
return Collections.EMPTY_SET;
}
}
} |
tanmeimei11/iui | packages/iVideoScreen/index.js | <reponame>tanmeimei11/iui
import iVideoScreen from './src/index.vue'
iVideoScreen.install = function (Vue) {
Vue.component(iVideoScreen.name, iVideoScreen)
}
export default iVideoScreen
|
moutainhigh/yeecook | platform-shop/src/main/java/com/platform/service/MenuService.java | package com.platform.service;
import com.platform.entity.MenuEntity;
import java.util.List;
import java.util.Map;
/**
* Service接口
*
* @author zoubin
* @email <EMAIL>
* @date 2019-05-07 23:34:58
*/
public interface MenuService {
/**
* 根据主键查询实体
*
* @param id 主键
* @return 实体
*/
MenuEntity queryObject(Integer id);
/**
* 分页查询
*
* @param map 参数
* @return list
*/
List<MenuEntity> queryList(Map<String, Object> map);
/**
* 分页统计总数
*
* @param map 参数
* @return 总数
*/
int queryTotal(Map<String, Object> map);
/**
* 保存实体
*
* @param menu 实体
* @return 保存条数
*/
int save(MenuEntity menu);
/**
* 根据主键更新实体
*
* @param menu 实体
* @return 更新条数
*/
int update(MenuEntity menu);
/**
* 根据主键删除
*
* @param id
* @return 删除条数
*/
int delete(Integer id);
/**
* 根据主键批量删除
*
* @param ids
* @return 删除条数
*/
int deleteBatch(Integer[] ids);
}
|
Saljack/mapstruct | processor/src/main/java/org/mapstruct/ap/internal/version/package-info.java | <reponame>Saljack/mapstruct
/*
* Copyright MapStruct Authors.
*
* Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
/**
* <p>
* Provides various version information
* </p>
*/
package org.mapstruct.ap.internal.version;
|
JKerney-HunterIndustries/battledeck | app/moduleWrappers/logger.js | 'use strict';
function logger() {
return console;
}
module.exports = logger; |
prorevizor/noc | ui/web/core/StateModel.js | <gh_stars>10-100
//---------------------------------------------------------------------
// NOC.core.State
//---------------------------------------------------------------------
// Copyright (C) 2007-2017 The NOC Project
// See LICENSE for details
//---------------------------------------------------------------------
console.debug("Defining NOC.core.StateModel");
Ext.define("NOC.core.StateModel", {
extend: "Ext.data.Model",
fields: [
{name: "id", type: "string"},
{name: "label", type: "string"},
{name: "description", type: "string"},
{name: "to_state", type: "string"},
{name: "to_state__label", type: "string"}
]
});
|
zfoo-project/zapp-web | src/util/websocketUtils.js | <filename>src/util/websocketUtils.js<gh_stars>1-10
import ByteBuffer from '@/jsProtocol/buffer/ByteBuffer';
import ProtocolManager from '@/jsProtocol/ProtocolManager.js';
import WebsocketSignInRequest from '@/jsProtocol/user/WebsocketSignInRequest.js';
import Error from '@/jsProtocol/common/Error.js';
import Message from '@/jsProtocol/common/Message.js';
import Ping from '@/jsProtocol/common/Ping';
import Pong from '@/jsProtocol/common/Pong';
import i18n from '@/i18n/i18n.js';
import { getXToken } from '@/util/authUtils.js';
import { simpleSuccess, simpleError } from '@/util/noticeUtils.js';
import { isBlank } from '@/util/stringUtils.js';
let websocket = null;
let wsUrl = null;
let serverTime = new Date().getTime();
// 每30秒发送一次心跳包
setInterval(function() {
// 如果没有登录,则不需要发送心跳包
if (isBlank(getXToken())) {
return;
}
if (isBlank(wsUrl)) {
return;
}
// 如果服务器长时间没有回应,则重新连接
if (new Date().getTime() - serverTime >= 3 * 60 * 1000) {
connect(wsUrl, '服务器长时间没有相应,进行重连尝试');
return;
}
sendPacket(new Ping());
}, 30000);
// readyState的状态码定义
// 0 (CONNECTING),正在链接中
// 1 (OPEN),已经链接并且可以通讯
// 2 (CLOSING),连接正在关闭
// 3 (CLOSED),连接已关闭或者没有链接成功
export function connect(url, desc) {
console.log('start connect websocket: ' + desc);
closeWebsocket();
// const ws = new WebSocket('ws://127.0.0.1:9000/websocket');
const ws = new WebSocket(url);
websocket = ws;
wsUrl = url;
ws.binaryType = 'arraybuffer';
ws.onopen = function() {
console.log('websocket open success');
// websocket连接成功过后,先发送ping同步服务器时间,再发送登录请求
sendPacket(new Ping());
const request = new WebsocketSignInRequest(getXToken());
sendPacket(request);
simpleSuccess(i18n.t('notice.websocketConnectSuccess'));
serverTime = new Date().getTime();
};
ws.onmessage = function(event) {
const data = event.data;
const byteBuffer = new ByteBuffer();
byteBuffer.writeBytes(data);
byteBuffer.setReadOffset(4);
const packet = ProtocolManager.read(byteBuffer);
byteBuffer.readBoolean();
console.log('Websocket收到:', packet);
if (packet.protocolId() === Pong.prototype.protocolId()) {
serverTime = _.toNumber(packet.time);
} else if (packet.protocolId() === Message.prototype.protocolId()) {
if (packet.code === 1) {
simpleSuccess(i18n.t('code_enum_' + packet.code));
} else if (packet.code === 2) {
// do noting
} else {
simpleError(i18n.t('code_enum_' + packet.code));
}
} else if (packet.protocolId() === Error.prototype.protocolId()) {
simpleError(i18n.t('code_enum_' + packet.errorCode));
} else {
ProtocolManager.getProtocol(packet.protocolId()).receiver(packet);
}
};
ws.onerror = function(event) {
console.log('websocket error');
console.log(event);
};
ws.onclose = function(event) {
console.log('websocket close');
console.log(event);
};
}
export function closeWebsocket() {
if (_.isNil(websocket)) {
return;
}
if (websocket.readyState === 0 || websocket.readyState === 1) {
websocket.close();
return;
}
websocket = null;
}
export function sendPacket(packet) {
console.log('Websocket发送:', packet);
if (isBlank(getXToken())) {
simpleError(i18n.t('notice.notSignInError'));
return;
}
if (isBlank(wsUrl)) {
simpleError(i18n.t('notice.websocketConnectError'));
return;
}
if (_.isNil(websocket)) {
simpleError(i18n.t('notice.websocketReconnect'));
connect(wsUrl, '发送消息的时候ws是空的,重连ws');
return;
}
switch (websocket.readyState) {
case 0:
simpleSuccess(i18n.t('notice.websocketConnecting'));
break;
case 1:
// eslint-disable-next-line no-case-declarations
const byteBuffer = new ByteBuffer();
byteBuffer.setWriteOffset(4);
ProtocolManager.write(byteBuffer, packet);
byteBuffer.writeBoolean(false);
// eslint-disable-next-line no-case-declarations
const writeOffset = byteBuffer.writeOffset;
byteBuffer.setWriteOffset(0);
byteBuffer.writeRawInt(writeOffset - 4);
byteBuffer.setWriteOffset(writeOffset);
websocket.send(byteBuffer.buffer);
break;
case 2:
simpleError(i18n.t('notice.websocketConnecting'));
connect(wsUrl, '发送消息的时候ws正在关闭,进行重连尝试');
break;
case 3:
simpleError(i18n.t('notice.websocketConnecting'));
connect(wsUrl, '发送消息的时候ws连接关闭,进行重连尝试');
break;
default:
simpleError(i18n.t('notice.websocketStateError'));
}
}
export function packetReceiver(protocol, receiverCallback) {
if (_.isNil(protocol.receiver)) {
protocol.receiver = receiverCallback;
}
}
export function getServerTime() {
return serverTime;
}
|
nota-ja/cli | src/cf/commands/application/events_test.go | package application_test
import (
. "cf/commands/application"
"cf/errors"
"cf/models"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
testapi "testhelpers/api"
testassert "testhelpers/assert"
testcmd "testhelpers/commands"
testconfig "testhelpers/configuration"
testreq "testhelpers/requirements"
testterm "testhelpers/terminal"
"time"
)
var _ = Describe("events command", func() {
var (
requirementsFactory *testreq.FakeReqFactory
eventsRepo *testapi.FakeAppEventsRepo
ui *testterm.FakeUI
)
const TIMESTAMP_FORMAT = "2006-01-02T15:04:05.00-0700"
BeforeEach(func() {
eventsRepo = &testapi.FakeAppEventsRepo{}
requirementsFactory = &testreq.FakeReqFactory{LoginSuccess: true, TargetedSpaceSuccess: true}
ui = new(testterm.FakeUI)
})
runCommand := func(args ...string) {
configRepo := testconfig.NewRepositoryWithDefaults()
cmd := NewEvents(ui, configRepo, eventsRepo)
testcmd.RunCommand(cmd, testcmd.NewContext("events", args), requirementsFactory)
}
It("fails with usage when called without an app name", func() {
runCommand()
Expect(ui.FailedWithUsage).To(BeTrue())
Expect(testcmd.CommandDidPassRequirements).To(BeFalse())
})
It("lists events given an app name", func() {
earlierTimestamp, err := time.Parse(TIMESTAMP_FORMAT, "1999-12-31T23:59:11.00-0000")
Expect(err).NotTo(HaveOccurred())
timestamp, err := time.Parse(TIMESTAMP_FORMAT, "2000-01-01T00:01:11.00-0000")
Expect(err).NotTo(HaveOccurred())
app := models.Application{}
app.Name = "my-app"
app.Guid = "my-app-guid"
requirementsFactory.Application = app
eventsRepo.RecentEventsReturns.Events = []models.EventFields{
{
Guid: "event-guid-1",
Name: "app crashed",
Timestamp: earlierTimestamp,
Description: "reason: app instance exited, exit_status: 78",
ActorName: "<NAME>",
},
{
Guid: "event-guid-2",
Name: "app crashed",
Timestamp: timestamp,
Description: "reason: app instance was stopped, exit_status: 77",
ActorName: "<NAME>",
},
}
runCommand("my-app")
Expect(eventsRepo.RecentEventsArgs.Limit).To(Equal(uint64(50)))
Expect(eventsRepo.RecentEventsArgs.AppGuid).To(Equal("my-app-guid"))
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"Getting events for app", "my-app", "my-org", "my-space", "my-user"},
{"time", "event", "actor", "description"},
{earlierTimestamp.Local().Format(TIMESTAMP_FORMAT), "app crashed", "<NAME>", "app instance exited", "78"},
{timestamp.Local().Format(TIMESTAMP_FORMAT), "app crashed", "<NAME>", "app instance was stopped", "77"},
})
})
It("tells the user when an error occurs", func() {
eventsRepo.RecentEventsReturns.Error = errors.New("welp")
app := models.Application{}
app.Name = "my-app"
requirementsFactory.Application = app
runCommand("my-app")
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"events", "my-app"},
{"FAILED"},
{"welp"},
})
})
It("tells the user when no events exist for that app", func() {
app := models.Application{}
app.Name = "my-app"
requirementsFactory.Application = app
runCommand("my-app")
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"events", "my-app"},
{"No events", "my-app"},
})
})
})
|
casualcore/casual | middleware/common/include/common/message/coordinate.h | //!
//! Copyright (c) 2018, The casual project
//!
//! This software is licensed under the MIT license, https://opensource.org/licenses/MIT
//!
#pragma once
#include "common/functional.h"
#include "common/uuid.h"
#include "common/log.h"
#include "common/algorithm.h"
#include "common/algorithm/container.h"
#include "common/serialize/macro.h"
#include <vector>
#include <deque>
namespace casual
{
namespace common::message::coordinate
{
namespace fan
{
template< typename M, typename ID>
struct Out
{
using message_type = M;
using id_type = ID;
//! type to correlate the pending fan out request with the upcoming replies
struct Pending
{
enum struct State : short
{
pending,
received,
failed,
};
inline friend std::ostream& operator << ( std::ostream& out, State state)
{
switch( state)
{
case State::pending: return out << "pending";
case State::received: return out << "received";
case State::failed: return out << "failed";
}
return out << "<unknown>";
}
Pending() = default;
inline Pending( strong::correlation::id correlation, id_type id)
: id{ id}, correlation{ std::move( correlation)}{}
State state = State::pending;
id_type id;
strong::correlation::id correlation;
inline friend bool operator == ( const Pending& lhs, const strong::correlation::id& rhs) { return lhs.correlation == rhs;}
template< typename I>
friend auto operator == ( const Pending& lhs, I&& rhs) -> decltype( std::declval< const id_type&>() == rhs) { return lhs.id == rhs;}
inline friend bool operator == ( const Pending& lhs, State rhs) { return lhs.state == rhs;}
CASUAL_LOG_SERIALIZE(
CASUAL_SERIALIZE( state);
CASUAL_SERIALIZE( id);
CASUAL_SERIALIZE( correlation);
)
};
//! register pending 'fan outs' and a callback which is invoked when all pending
//! has been 'received'.
template< typename C>
auto operator () ( std::vector< Pending> pending, C&& callback)
-> decltype( void( callback( std::vector< message_type>{}, std::vector< Pending>{})))
{
auto& entry = m_entries.emplace_back( std::move( pending), std::forward< C>( callback));
// to make it symmetrical and 'impossible' to add 'dead letters'.
if( entry.done())
m_entries.pop_back();
}
//! 'pipe' the `message` to the 'fan-out-coordination'. Will invoke callback if `message` is
//! the last pending message for an entry.
inline void operator () ( message_type message)
{
if( auto found = algorithm::find( m_entries, message.correlation))
if( found->coordinate( std::move( message)))
m_entries.erase( std::begin( found));
}
template< typename I>
inline auto failed( I&& id) -> decltype( void( std::declval< const id_type&>() == id))
{
algorithm::container::trim( m_entries, algorithm::remove_if( m_entries, [id]( auto& entry)
{
return entry.failed( id);
}));
}
inline auto empty() const noexcept { return m_entries.empty();}
//! @returns an empty 'pending_type' vector
//! convince function to get 'the right type'
inline auto empty_pendings() const noexcept { return std::vector< Pending>{};}
CASUAL_LOG_SERIALIZE(
CASUAL_SERIALIZE_NAME( m_entries, "entries");
)
private:
struct Entry
{
using callback_t = common::function< void( std::vector< message_type> received, std::vector< Pending> outcome)>;
inline Entry( std::vector< Pending> pending, callback_t callback)
: m_pending{ std::move( pending)}, m_callback{ std::move( callback)} {}
inline bool coordinate( message_type message)
{
auto found = algorithm::find( m_pending, message.correlation);
assert( found);
found->state = decltype( found->state)::received;
m_received.push_back( std::move( message));
return done();
}
template< typename I>
auto failed( I&& id) -> decltype( Pending{}.id == id)
{
for( auto& pending: m_pending)
if( pending.id == id && pending.state == Pending::State::pending)
pending.state = Pending::State::failed;
return done();
}
inline friend bool operator == ( const Entry & lhs, const strong::correlation::id& rhs)
{
return predicate::boolean( algorithm::find( lhs.m_pending, rhs));
}
CASUAL_LOG_SERIALIZE(
CASUAL_SERIALIZE_NAME( m_pending, "pending");
CASUAL_SERIALIZE_NAME( m_received, "received");
)
bool done()
{
if( algorithm::any_of( m_pending, predicate::value::equal( Pending::State::pending)))
return false;
log::line( verbose::log, "entry: ", *this);
m_callback( std::move( m_received), std::move( m_pending));
return true;
}
private:
std::vector< Pending> m_pending;
std::vector< message_type> m_received;
callback_t m_callback;
};
std::deque< Entry> m_entries;
};
} // fan
} //common::message::coordinate
} // casual
|
webdev188/tytus | parser/team27/G-27/execution/main.py | import sys
from execution.symbol.environment import Environment
from prettytable import PrettyTable
class Main(object):
def __init__(self,queryArray):
self.queryArray = queryArray
def execute(self, environment):
arreglo = []
errores = []
if isinstance(self.queryArray,list):
for item in self.queryArray:
env = Environment(environment)
res = item.execute(env)
if isinstance(res,str):
arreglo.append(res)
elif isinstance(res,dict):
if 'Error' in res:
errores.append('Tipo: SEMÁNTICO, Error: ' + res['Error'] + ' Linea: ' + str(res['Columna']) + ' Columna: ' + str(res['Fila']))
else:
x = PrettyTable()
encabezados = []
for value in res['table'].columns:
encabezados.append(value['column'].name)
x.field_names = encabezados
for tupla in res['data']:
x.add_row(tupla)
arreglo.append('\n'+ x.get_string() +'\n')
elif isinstance(res,list):
arreglo.append(str(res))
return [arreglo,errores]
else:
return [arreglo,errores]
|
tcmike/condo | apps/condo/migrations/20210820182823-0046_auto_20210820_1328.js | // auto generated by kmigrator
// KMIGRATOR:0046_auto_20210820_1328:IyBHZW5lcmF0ZWQgYnkgRGphbmdvIDMuMi41IG9uIDIwMjEtMDgtMjAgMTM6MjgKCmZyb20gZGphbmdvLmRiIGltcG9ydCBtaWdyYXRpb25zLCBtb2RlbHMKCgpjbGFzcyBNaWdyYXRpb24obWlncmF0aW9ucy5NaWdyYXRpb24pOgoKICAgIGRlcGVuZGVuY2llcyA9IFsKICAgICAgICAoJ19kamFuZ29fc2NoZW1hJywgJzAwNDVfb3JnYW5pemF0aW9uZW1wbG95ZWVfc3BlY2lhbGl6YXRpb25zX21hbnknKSwKICAgIF0KCiAgICBvcGVyYXRpb25zID0gWwogICAgICAgIG1pZ3JhdGlvbnMuQWx0ZXJGaWVsZCgKICAgICAgICAgICAgbW9kZWxfbmFtZT0nYmlsbGluZ2ludGVncmF0aW9uYWNjZXNzcmlnaHQnLAogICAgICAgICAgICBuYW1lPSdpZCcsCiAgICAgICAgICAgIGZpZWxkPW1vZGVscy5VVUlERmllbGQocHJpbWFyeV9rZXk9VHJ1ZSwgc2VyaWFsaXplPUZhbHNlKSwKICAgICAgICApLAogICAgICAgIG1pZ3JhdGlvbnMuQWx0ZXJGaWVsZCgKICAgICAgICAgICAgbW9kZWxfbmFtZT0nYmlsbGluZ2ludGVncmF0aW9uYWNjZXNzcmlnaHRoaXN0b3J5cmVjb3JkJywKICAgICAgICAgICAgbmFtZT0naGlzdG9yeV9pZCcsCiAgICAgICAgICAgIGZpZWxkPW1vZGVscy5VVUlERmllbGQoZGJfaW5kZXg9VHJ1ZSksCiAgICAgICAgKSwKICAgIF0K
exports.up = async (knex) => {
await knex.raw(`
BEGIN;
ALTER TABLE "BillingIntegrationAccessRight" RENAME COLUMN "id" TO "old_id";
ALTER TABLE "BillingIntegrationAccessRight" DROP CONSTRAINT "BillingIntegrationAccessRight_pkey";
ALTER TABLE "BillingIntegrationAccessRight" ADD COLUMN "id" UUID NULL;
UPDATE "BillingIntegrationAccessRight" SET "id" = uuid_generate_v4();
ALTER TABLE "BillingIntegrationAccessRight" ADD PRIMARY KEY ("id");
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" RENAME COLUMN "history_id" TO "old_history_id";
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" ADD COLUMN "history_id" UUID NULL;
-- Set "history_id" of history records to "id" of corresponding records in new format [1]
UPDATE "BillingIntegrationAccessRightHistoryRecord" hr
SET "history_id" = ar."id"
FROM "BillingIntegrationAccessRight" as ar
WHERE(
ar."old_id" = hr."old_history_id"
);
-- We can have hard-deleted rows in BillingIntegrationAccessRight table and orphaned rows in BillingIntegrationAccessRightHistoryRecord,
-- that was correspond to them before hard-delete. Therefore after [1] a table BillingIntegrationAccessRightHistoryRecord
-- can have NULL in "history_id" column in some rows. Since we don't have original rows in BillingIntegrationAccessRight
-- table, we can set them to some value
UPDATE "BillingIntegrationAccessRightHistoryRecord" hr
SET "history_id" = '00000000-0000-0000-0000-000000000000'
WHERE (
hr.history_id IS NULL
);
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" ALTER COLUMN "history_id" SET NOT NULL;
-- All future history record will not have old id anymore, because plugin does not saves them
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" ALTER COLUMN "old_history_id" DROP NOT NULL;
COMMIT;
END;
`)
}
exports.down = async (knex) => {
await knex.raw(`
BEGIN;
--
-- Change type of BillingIntegrationAccessRight.id column back to integer
--
ALTER TABLE "BillingIntegrationAccessRight" RENAME COLUMN "id" TO "_old_id";
ALTER TABLE "BillingIntegrationAccessRight" RENAME COLUMN "old_id" TO "id";
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" RENAME COLUMN "history_id" TO "_old_history_id";
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" RENAME COLUMN "old_history_id" TO "history_id";
-- Set old format values of history_id, that was not saved after up-migration for further inserted rows
UPDATE "BillingIntegrationAccessRightHistoryRecord" hr
SET "history_id" = ar."id"
FROM "BillingIntegrationAccessRight" as ar
WHERE(
ar."_old_id" = hr."_old_history_id"
);
ALTER TABLE "BillingIntegrationAccessRight" DROP CONSTRAINT "BillingIntegrationAccessRight_pkey";
ALTER TABLE "BillingIntegrationAccessRight" ADD PRIMARY KEY ("id");
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" ALTER COLUMN "history_id" SET NOT NULL;
ALTER TABLE "BillingIntegrationAccessRight" DROP COLUMN "_old_id";
ALTER TABLE "BillingIntegrationAccessRightHistoryRecord" DROP COLUMN "_old_history_id";
COMMIT;
END;
`)
}
|
tqrg-bot/rubinius | stdlib/ext/win32ole/tests/oleserver.rb | <gh_stars>10-100
require 'win32ole'
def oletypelib_name(pat)
WIN32OLE_TYPE.typelibs.each do |lib|
return lib if pat =~ lib
end
end
module OLESERVER
MS_EXCEL_TYPELIB = oletypelib_name(/^Microsoft Excel .* Object Library$/)
MS_XML_TYPELIB = oletypelib_name(/^Microsoft XML/)
end
|
ulfyyang/ulfy-android-master | app/src/main/java/com/ulfy/master/ui/cell/ContentSearchCell.java | <reponame>ulfyyang/ulfy-android-master
package com.ulfy.master.ui.cell;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.ImageView;
import android.widget.TextView;
import com.ulfy.android.image.ImageUtils;
import com.ulfy.android.mvvm.IViewModel;
import com.ulfy.android.ui_injection.Layout;
import com.ulfy.android.ui_injection.ViewById;
import com.ulfy.master.R;
import com.ulfy.master.application.cm.ContentSearchCM;
import com.ulfy.master.ui.base.BaseCell;
@Layout(id = R.layout.cell_content_search)
public class ContentSearchCell extends BaseCell {
@ViewById(id = R.id.coverIV) private ImageView coverIV;
@ViewById(id = R.id.nameTV) private TextView nameTV;
@ViewById(id = R.id.timeTV) private TextView timeTV;
private ContentSearchCM cm;
public ContentSearchCell(Context context) {
super(context);
init(context, null);
}
public ContentSearchCell(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
}
@Override public void bind(IViewModel model) {
cm = (ContentSearchCM) model;
ImageUtils.loadImage(cm.contentSearch.cover, R.drawable.drawable_loading, R.drawable.drawable_loading_false, coverIV);
nameTV.setText(cm.contentSearch.name);
timeTV.setText(String.format("%d次观看", cm.contentSearch.time));
}
} |
imslinn/AndroidCommon | common/src/main/java/tk/beason/common/widget/pickview/DateTimePicker.java | <reponame>imslinn/AndroidCommon
/*
* Copyright (C) 2016 The beasontk Android Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tk.beason.common.widget.pickview;
import android.content.Context;
import android.content.res.TypedArray;
import androidx.annotation.IntDef;
import androidx.annotation.IntRange;
import androidx.annotation.NonNull;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import tk.beason.common.R;
import tk.beason.common.widget.pickview.adapter.DateTimeAdapter;
import tk.beason.common.widget.pickview.listener.OnItemSelectedListener;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Calendar;
import java.util.Date;
/**
* Created by beasontk on 2017/4/25.
* 时间选择器
*/
public class DateTimePicker extends LinearLayout {
private static final String TAG = "DateTimePicker";
/**
* 默认的开始的年
*/
private static final int DEFAULT_START_YEAR = 1900;
/**
* 默认的结束年份
*/
private static final int DEFAULT_END_YEAR = 2600;
private PickerView mYearPicker;
private DateTimeAdapter mYearAdapter;
private PickerView mMonthPicker;
private DateTimeAdapter mMonthAdapter;
private PickerView mDayPicker;
private DateTimeAdapter mDayAdapter;
private PickerView mHourPicker;
private DateTimeAdapter mHourAdapter;
private PickerView mMinPicker;
private DateTimeAdapter mMinAdapter;
private Calendar mStartDate;
private Calendar mEndDate;
private int mStyle;
@IntDef({Style.DATE_TIME, Style.DATE, Style.TIME})
@Retention(RetentionPolicy.SOURCE)
public @interface Style {
/**
* 日期+时间
*/
int DATE_TIME = 1;
/**
* 仅日期
*/
int DATE = 2;
/**
* 仅时间
*/
int TIME = 3;
}
public DateTimePicker(Context context) {
this(context, null);
}
public DateTimePicker(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public DateTimePicker(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.DateTimePicker);
int style = a.getInt(R.styleable.DateTimePicker_datePickerStyle, Style.DATE_TIME);
a.recycle();
setOrientation(HORIZONTAL);
setGravity(Gravity.CENTER);
initViews();
initDatas();
//noinspection WrongConstant
setStyle(style);
}
private void initViews() {
LayoutInflater inflater = LayoutInflater.from(getContext());
View root = inflater.inflate(R.layout.widget_date_time_picker, this, true);
mYearAdapter = new DateTimeAdapter();
mYearPicker = (PickerView) root.findViewById(R.id.year);
mYearPicker.setAdapter(mYearAdapter);
mYearPicker.setOnItemSelectedListener(new ItemSelectedListener(Calendar.YEAR));
mMonthAdapter = new DateTimeAdapter();
mMonthPicker = (PickerView) root.findViewById(R.id.month);
mMonthPicker.setAdapter(mMonthAdapter);
mMonthPicker.setOnItemSelectedListener(new ItemSelectedListener(Calendar.MONTH));
mDayAdapter = new DateTimeAdapter();
mDayPicker = (PickerView) root.findViewById(R.id.date);
mDayPicker.setAdapter(mDayAdapter);
mDayPicker.setOnItemSelectedListener(new ItemSelectedListener(Calendar.DAY_OF_MONTH));
mHourAdapter = new DateTimeAdapter();
mHourPicker = (PickerView) root.findViewById(R.id.hour);
mHourPicker.setAdapter(mHourAdapter);
mHourPicker.setOnItemSelectedListener(new ItemSelectedListener(Calendar.HOUR_OF_DAY));
mMinAdapter = new DateTimeAdapter();
mMinPicker = (PickerView) root.findViewById(R.id.min);
mMinPicker.setAdapter(mMinAdapter);
mMinPicker.setOnItemSelectedListener(new ItemSelectedListener(Calendar.MINUTE));
}
private void initDatas() {
mStartDate = Calendar.getInstance();
mStartDate.set(Calendar.YEAR, DEFAULT_START_YEAR);
mStartDate.set(Calendar.MONTH, 1);
mStartDate.set(Calendar.DAY_OF_MONTH, 1);
mStartDate.set(Calendar.HOUR_OF_DAY, 0);
mStartDate.set(Calendar.MINUTE, 0);
mStartDate.set(Calendar.SECOND, 0);
mStartDate.set(Calendar.MILLISECOND, 0);
mEndDate = Calendar.getInstance();
mEndDate.set(Calendar.YEAR, DEFAULT_END_YEAR);
mEndDate.set(Calendar.MONTH, 1);
mEndDate.set(Calendar.DAY_OF_MONTH, 1);
mEndDate.set(Calendar.HOUR_OF_DAY, 0);
mEndDate.set(Calendar.MINUTE, 0);
mEndDate.set(Calendar.SECOND, 0);
mEndDate.set(Calendar.MILLISECOND, 0);
final Calendar calendar = Calendar.getInstance();
final int nowYear = calendar.get(Calendar.YEAR);
final int nowMonth = calendar.get(Calendar.MONTH) + 1;
int now = nowYear;
mYearAdapter.setStart(DEFAULT_START_YEAR);
mYearAdapter.setEnd(DEFAULT_END_YEAR);
mYearPicker.setCurrentPosition(now - DEFAULT_START_YEAR);
now = nowMonth;
mMonthAdapter.setStart(1);
mMonthAdapter.setEnd(12);
mMonthPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.DAY_OF_MONTH);
mDayAdapter.setStart(1);
mDayAdapter.setEnd(getDay(nowYear, nowMonth));
mDayPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.HOUR_OF_DAY);
mHourAdapter.setStart(1);
mHourAdapter.setEnd(24);
mHourPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.MINUTE);
mMinAdapter.setStart(1);
mMinAdapter.setEnd(60);
mMinPicker.setCurrentPosition(now - 1);
}
/**
* 设置样式
*/
public void setStyle(@Style int style) {
if (mStyle == style) {
Log.i(TAG, "setStyle: Style is same");
return;
}
mStyle = style;
switch (mStyle) {
case Style.DATE_TIME:
mYearPicker.setVisibility(VISIBLE);
mMonthPicker.setVisibility(VISIBLE);
mDayPicker.setVisibility(VISIBLE);
mHourPicker.setVisibility(VISIBLE);
mMinPicker.setVisibility(VISIBLE);
break;
case Style.DATE:
mYearPicker.setVisibility(VISIBLE);
mMonthPicker.setVisibility(VISIBLE);
mDayPicker.setVisibility(VISIBLE);
mHourPicker.setVisibility(GONE);
mMinPicker.setVisibility(GONE);
break;
case Style.TIME:
mYearPicker.setVisibility(GONE);
mMonthPicker.setVisibility(GONE);
mDayPicker.setVisibility(GONE);
mHourPicker.setVisibility(VISIBLE);
mMinPicker.setVisibility(VISIBLE);
break;
}
}
public Date getCurrentTime() {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.YEAR, getCurrentYear());
calendar.set(Calendar.MONTH, getCurrentMonth() - 1);
calendar.set(Calendar.DAY_OF_MONTH, getCurrentDay());
calendar.set(Calendar.HOUR_OF_DAY, getCurrentHour());
calendar.set(Calendar.MINUTE, getCurrentMin());
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTime();
}
public long getCurrentTimeInMillis() {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.YEAR, getCurrentYear());
calendar.set(Calendar.MONTH, getCurrentMonth() - 1);
calendar.set(Calendar.DAY_OF_MONTH, getCurrentDay());
calendar.set(Calendar.HOUR_OF_DAY, getCurrentHour());
calendar.set(Calendar.MINUTE, getCurrentMin());
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTimeInMillis();
}
/**
* 获取当前是那一年
*/
public int getCurrentYear() {
return mYearAdapter.getStart() + mYearPicker.getCurrentPosition();
}
/**
* 获取当前是那一月
*/
public int getCurrentMonth() {
return mMonthAdapter.getStart() + mMonthPicker.getCurrentPosition();
}
/**
* 获取当前是那一天
*/
public int getCurrentDay() {
return mDayAdapter.getStart() + mDayPicker.getCurrentPosition();
}
/**
* 获取当前小时
*/
public int getCurrentHour() {
return mHourAdapter.getStart() + mHourPicker.getCurrentPosition();
}
/**
* 获取当前分钟
*/
public int getCurrentMin() {
return mMinAdapter.getStart() + mMinPicker.getCurrentPosition();
}
@SuppressWarnings("unused")
public void setCurrentDate(@NonNull Date date) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
int now = calendar.get(Calendar.YEAR);
mYearPicker.setCurrentPosition(now - DEFAULT_START_YEAR);
now = calendar.get(Calendar.MONTH) + 1;
mMonthPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.DAY_OF_MONTH);
mDayPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.HOUR_OF_DAY);
mHourPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.MINUTE);
mMinPicker.setCurrentPosition(now - 1);
}
@SuppressWarnings("unused")
public void setCurrentDate(int year, int month, int day, int hour, int min) {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.YEAR, year);
calendar.set(Calendar.MONTH, month - 1);
calendar.set(Calendar.DAY_OF_MONTH, day);
calendar.set(Calendar.HOUR_OF_DAY, hour);
calendar.set(Calendar.MINUTE, min);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
int now = calendar.get(Calendar.YEAR);
mYearPicker.setCurrentPosition(now - DEFAULT_START_YEAR);
now = calendar.get(Calendar.MONTH) + 1;
mMonthPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.DAY_OF_MONTH);
mDayPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.HOUR_OF_DAY);
mHourPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.MINUTE);
mMinPicker.setCurrentPosition(now - 1);
}
public void setCurrentDate(long time) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(time);
int now = calendar.get(Calendar.YEAR);
mYearPicker.setCurrentPosition(now - DEFAULT_START_YEAR);
now = calendar.get(Calendar.MONTH) + 1;
mMonthPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.DAY_OF_MONTH);
mDayPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.HOUR_OF_DAY);
mHourPicker.setCurrentPosition(now - 1);
now = calendar.get(Calendar.MINUTE);
mMinPicker.setCurrentPosition(now - 1);
}
/**
* 设置开始日期
* 先暂时不使用
*/
@SuppressWarnings("unused")
private void setStartDate(@NonNull Date date) {
mStartDate.setTime(date);
}
/**
* 设置结束日期
* 先暂时不使用
*/
@SuppressWarnings("unused")
private void setEndDate(@NonNull Date date) {
mEndDate.setTime(date);
}
/**
* 计算每月多少天
*/
private int getDay(final @IntRange(from = 0) int year, final @IntRange(from = 0) int month) {
boolean isLeayYear = isLeayYear(year);
switch (month) {
case 1:
case 3:
case 5:
case 7:
case 8:
case 10:
case 12:
return 31;
case 2:
return isLeayYear ? 29 : 28;
default:
return 30;
}
}
/**
* 是否是闰年
*/
private boolean isLeayYear(final @IntRange(from = 0) int year) {
/*
* 可以被400整出的肯定是闰年, 其他情况是 只能被4整除不能被100整除
*/
return (year % 400 == 0) || ((year % 4 == 0) && (year % 100 != 0));
}
/**
* 选中的监听
*/
private class ItemSelectedListener implements OnItemSelectedListener {
private int mType;
private ItemSelectedListener(int type) {
mType = type;
}
@Override
public void onItemSelected(int position) {
switch (mType) {
case Calendar.YEAR:
final int month = getCurrentMonth();
if (month == 2) {
updateDay();
}
break;
case Calendar.MONTH:
updateDay();
break;
}
}
void updateDay() {
final int year = getCurrentYear();
final int month = getCurrentMonth();
mDayAdapter.setEnd(getDay(year, month));
}
}
}
|
atulsm/Test_Projects | src/functional/programming/BasicLambdas.java | <gh_stars>1-10
package functional.programming;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
public class BasicLambdas {
public static void main(String[] args) {
//Predicate, represents a simple function that takes a single value as parameter, and returns true or false
predicates();
//The Function interface represents a function (method) that takes a single parameter and returns a single value
functions();
//The Java UnaryOperator interface is a functional interface that represents an operation which takes a single parameter and returns a parameter of the same type
unary();
//The Java BinaryOperator interface is a functional interface that represents an operation which takes two parameters and returns a single value. Both parameters and the return type must be of the same type.
binary();
//The Java Supplier interface is a functional interface that represents an function that supplies a value of some sorts. The Supplier interface can also be thought of as a factory interface.
supplier();
//The Java Consumer interface is a functional interface that represents an function that consumes a value without returning any value.
consumer();
}
private static void predicates() {
Predicate<String> strLength = (s) -> s.length() < 10;
System.out.println(strLength.test("atul"));
Integer[] nums = {1,3,6,10,30,100};
List<Integer> numbers = Arrays.asList(nums);
List<Integer> filteredList = numbers.stream().filter(i -> i>5).collect(Collectors.toList());
System.out.println(filteredList);
}
private static void functions() {
Function<Integer, Integer> square = new Function<Integer, Integer>() {
@Override
public Integer apply(Integer t) {
return t*t;
}
};
Function<Integer, Integer> square1 = (x) -> x*x;
System.out.println(square.apply(5));
System.out.println(square1.apply(5));
}
private static void unary() {
UnaryOperator<Integer> u = (i -> i+1);
System.out.println(u.apply(3));
}
private static void binary() {
BinaryOperator<Integer> binary = ( (i,j) -> i+j);
System.out.println(binary.apply(3,4));
}
private static void supplier() {
Supplier<Integer> random = ( () -> new Random().nextInt());
System.out.println(random.get());
}
private static void consumer() {
Consumer<Integer> consume = (x -> System.out.println(x));
consume.accept(3);
}
}
|
kasipavankumar/rustic-medico | admin-client/source/services/loginService.js | import axios from 'axios';
import TokenService from './tokenService';
import { API_URL } from '../config';
class LoginService {
constructor(credentials) {
this.credentials = credentials;
}
login = async () => {
try {
const response = await axios({
method: 'POST',
url: `${API_URL}/api/admin/login`,
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
data: this.credentials,
// withCredentials: true,
});
const tokenService = new TokenService('_SID_', response.data.token);
tokenService.setCookie();
return true;
} catch (err) {
return false;
}
};
}
export default LoginService;
|
ValerieKenyon/cliff-effects | src/test/setupJSDOM.js | <filename>src/test/setupJSDOM.js
// Provide virtual browser object for enzyme testing
// https://airbnb.io/enzyme/docs/guides/jsdom.html
const { jsdom } = require('jsdom');
global.document = jsdom('');
global.window = document.defaultView;
global.navigator = { userAgent: 'node.js' };
let copyProps = function (src, target) {
const props = Object.getOwnPropertyNames(src)
.filter((prop) => {
return typeof target[ prop ] === 'undefined';
})
.reduce((result, prop) => {
return {
...result,
[ prop ]: Object.getOwnPropertyDescriptor(src, prop),
};
}, {});
Object.defineProperties(target, props);
};
copyProps(document.defaultView, global);
|
xiaonanln/myleetcode-python | src/979. Distribute Coins in Binary Tree.py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def distributeCoins(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return 0
def postorder(node):
if node.left:
postorder(node.left)
if node.right:
postorder(node.right)
node._sum = (node.left._sum if node.left else 0) + node.val + (node.right._sum if node.right else 0)
node._size = (node.left._size if node.left else 0) + 1 + (node.right._size if node.right else 0)
postorder(root)
return self.solve(root)
def solve(self, root):
ret = 0
if root.left:
ret += self.solve(root.left)
lsize, lsum = root.left._size, root.left._sum
ret += abs(lsum - lsize)
if root.right:
ret += self.solve(root.right)
rsize, rsum = root.right._size, root.right._sum
ret += abs(rsum - rsize)
return ret
import utils
print Solution().distributeCoins(utils.maketree([3, 0, 0])) |
turp1twin/cql-query-engine | src/lib/fhir/appointment.js | <gh_stars>1-10
// Copyright (c) 2014 The MITRE Corporation
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of HL7 nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
import * as DT from '../cql/cqlDatatypes'
import { Identifier, CodeableConcept, Reference, BackboneElement, DomainResource } from './core'
/**
* Embedded class
* @class AppointmentParticipantComponent
*/
class AppointmentParticipantComponent extends BackboneElement {
/**
* Role of participant in the appointment.
* @returns {Array} an array of {@link CodeableConcept} objects
*/
type () {
if (this.json['type']) {
return this.json['type'].map(item => new CodeableConcept(item))
}
}
/**
* A Person of device that is participating in the appointment, usually Practitioner, Patient, RelatedPerson or Device.
* @returns {Reference}
*/
actor () { if (this.json['actor']) return new Reference(this.json['actor']) }
/**
* Is this participant required to be present at the meeting.
* This covers a use-case where 2 doctors need to meet to discuss the results for a specific patient, and the patient is not required to be present.
* @returns {Array} an array of {@link String} objects
*/
required () { return this.json['required'] }
/**
* Participation status of the Patient.
* @returns {Array} an array of {@link String} objects
*/
status () { return this.json['status'] }
}
/**
* A scheduled healthcare event for a patient and/or practitioner(s) where a service may take place at a specific date/time.
* @class Appointment
*/
export class Appointment extends DomainResource {
/**
* This records identifiers associated with this appointment concern that are defined by business processed and/or
* used to refer to it when a direct URL reference to the resource itself is not appropriate (e.g. in CDA documents, or in written / printed documentation).
* @returns {Array} an array of {@link Identifier} objects
*/
identifier () {
if (this.json['identifier']) {
return this.json['identifier'].map(item => new Identifier(item))
}
}
/**
* The priority of the appointment. Can be used to make informed decisions if needing to re-prioritize appointments.
* (The iCal Standard specifies 0 as undefined, 1 as highest, 9 as lowest priority) (Need to change back to CodeableConcept).
* @returns {Array} an array of {@link Number} objects
*/
priority () { return this.json['priority'] }
/**
* Each of the participants has their own participation status which indicates their involvement in the process, however this status indicates the shared status.
* @returns {Array} an array of {@link String} objects
*/
status () { return this.json['status'] }
/**
* The type of appointments that is being booked (ideally this would be an identifiable service - which is at a location, rather than the location itself).
* @returns {CodeableConcept}
*/
type () { if (this.json['type']) return new CodeableConcept(this.json['type']) }
/**
* The reason that this appointment is being scheduled, this is more clinical than administrative.
* @returns {CodeableConcept}
*/
reason () { if (this.json['reason']) return new CodeableConcept(this.json['reason']) }
/**
* The brief description of the appointment as would be shown on a subject line in a meeting request, or appointment list. Detailed or expanded information should be put in the comment field.
* @returns {Array} an array of {@link String} objects
*/
description () { return this.json['description'] }
/**
* Date/Time that the appointment is to take place.
* @returns {Array} an array of {@link Date} objects
*/
start () { if (this.json['start']) return DT.DateTime.parse(this.json['start']) }
/**
* Date/Time that the appointment is to conclude.
* @returns {Array} an array of {@link Date} objects
*/
end () { if (this.json['end']) return DT.DateTime.parse(this.json['end']) }
/**
* The slot that this appointment is filling.
* If provided then the schedule will not be provided as slots are not recursive, and the start/end values MUST be the same as from the slot.
* @returns {Array} an array of {@link Reference} objects
*/
slot () {
if (this.json['slot']) {
return this.json['slot'].map(item => new Reference(item))
}
}
/**
* The primary location that this appointment is to take place.
* @returns {Reference}
*/
location () { if (this.json['location']) return new Reference(this.json['location']) }
/**
* Additional comments about the appointment.
* @returns {Array} an array of {@link String} objects
*/
comment () { return this.json['comment'] }
/**
* An Order that lead to the creation of this appointment.
* @returns {Reference}
*/
order () { if (this.json['order']) return new Reference(this.json['order']) }
/**
* List of participants involved in the appointment.
* @returns {Array} an array of {@link AppointmentParticipantComponent} objects
*/
participant () {
if (this.json['participant']) {
return this.json['participant'].map(item => new AppointmentParticipantComponent(item))
}
}
/**
* Who recorded the appointment.
* @returns {Reference}
*/
lastModifiedBy () { if (this.json['lastModifiedBy']) return new Reference(this.json['lastModifiedBy']) }
/**
* Date when the appointment was recorded.
* @returns {Array} an array of {@link Date} objects
*/
lastModified () { if (this.json['lastModified']) return DT.DateTime.parse(this.json['lastModified']) }
}
|
mindv0rtex/DearPyGui | DearPyGui/src/core/AppItems/basic/mvSelectable.cpp | <reponame>mindv0rtex/DearPyGui
#include <utility>
#include "mvSelectable.h"
#include "mvApp.h"
#include "mvItemRegistry.h"
#include "mvImGuiThemeScope.h"
#include "mvFontScope.h"
namespace Marvel {
void mvSelectable::InsertParser(std::map<std::string, mvPythonParser>* parsers)
{
parsers->insert({ s_command, mvPythonParser({
{mvPythonDataType::Optional},
{mvPythonDataType::String, "name"},
{mvPythonDataType::KeywordOnly},
{mvPythonDataType::Bool, "default_value", "", "False"},
{mvPythonDataType::Callable, "callback", "Registers a callback", "None"},
{mvPythonDataType::Object, "callback_data", "Callback data", "None"},
{mvPythonDataType::Integer, "width","", "0"},
{mvPythonDataType::Integer, "height", "", "0"},
{mvPythonDataType::String, "parent", "Parent this item will be added to. (runtime adding)", "''"},
{mvPythonDataType::String, "before", "This item will be displayed before the specified item in the parent. (runtime adding)", "''"},
{mvPythonDataType::String, "source", "", "''"},
{mvPythonDataType::Bool, "enabled", "Display grayed out text so selectable cannot be selected", "True"},
{mvPythonDataType::String, "label", "", "''"},
{mvPythonDataType::Bool, "show", "Attempt to render", "True"},
{mvPythonDataType::Bool, "span_columns", "span all columns", "False"},
}, "Adds a selectable.", "None", "Adding Widgets") });
}
mvSelectable::mvSelectable(const std::string& name)
: mvBoolPtrBase(name)
{
}
void mvSelectable::setEnabled(bool value)
{
if (value == m_enabled)
return;
if (value)
m_flags &= ~ImGuiSelectableFlags_Disabled;
else
m_flags |= ImGuiSelectableFlags_Disabled;
m_enabled = value;
}
void mvSelectable::draw(ImDrawList* drawlist, float x, float y)
{
ScopedID id;
mvImGuiThemeScope scope(this);
mvFontScope fscope(this);
if (ImGui::Selectable(m_label.c_str(), m_value.get(), m_flags, ImVec2((float)m_width, (float)m_height)))
mvApp::GetApp()->getCallbackRegistry().addCallback(m_callback, m_name, m_callback_data);
}
void mvSelectable::setExtraConfigDict(PyObject* dict)
{
if (dict == nullptr)
return;
// helper for bit flipping
auto flagop = [dict](const char* keyword, int flag, int& flags, bool flip)
{
if (PyObject* item = PyDict_GetItemString(dict, keyword)) ToBool(item) ? flags |= flag : flags &= ~flag;
};
// window flags
flagop("span_columns", ImGuiSelectableFlags_SpanAllColumns, m_flags, false);
}
void mvSelectable::getExtraConfigDict(PyObject* dict)
{
if (dict == nullptr)
return;
// helper to check and set bit
auto checkbitset = [dict](const char* keyword, int flag, const int& flags, bool flip)
{
PyDict_SetItemString(dict, keyword, ToPyBool(flags & flag));
};
// window flags
checkbitset("span_columns", ImGuiSelectableFlags_SpanAllColumns, m_flags, false);
}
} |
Samuelote/spyfall | app/components/LocationsCount/LocationsCount.js | <filename>app/components/LocationsCount/LocationsCount.js<gh_stars>1-10
import React from 'react';
import { connect } from 'react-redux';
import selectedLocationsCountSelector from 'selectors/selectedLocationsCount';
import totalLocationsCountSelector from 'selectors/totalLocationsCount';
export const LocationsCount = ({className, style, selectedLocationsCount, totalLocationsCount}) => (
<span className={className} style={style}>
{selectedLocationsCount}/{totalLocationsCount}
</span>
);
const mapStateToProps = (state) => ({
selectedLocationsCount: selectedLocationsCountSelector(state),
totalLocationsCount: totalLocationsCountSelector(state),
});
export default connect(mapStateToProps)(LocationsCount);
|
HeNine/scrumkin | scrumkin-api/src/main/java/com/scrumkin/api/exceptions/UserStoryTitleNotUniqueException.java | package com.scrumkin.api.exceptions;
import javax.ejb.ApplicationException;
@ApplicationException(rollback = true)
public class UserStoryTitleNotUniqueException extends Exception {
private static final long serialVersionUID = 1L;
public UserStoryTitleNotUniqueException() {
super();
}
public UserStoryTitleNotUniqueException(String description) {
super(description);
}
}
|
coheigea/tdi-studio-se | main/plugins/org.talend.designer.components.libs/libs_src/talend-proxy/src/main/java/org/talend/proxy/ProxyHolder.java | package org.talend.proxy;
import java.net.Proxy;
import java.util.HashMap;
import java.util.Map;
/**
* Use only inside of ThreadLocal
*/
public class ProxyHolder {
private Map<String, Proxy> proxyMap;
public ProxyHolder() {
proxyMap = new HashMap<>();
}
/**
*
* @param proxy HTTP or SOCKS proxy instance to use
* @param host without protocol
* @param port -1 to apply proxy for every port
*/
public void putNewHost(Proxy proxy, String host, int port) {
if (port != -1) {
proxyMap.put(host + ":" + port, proxy);
} else {
proxyMap.put(host, proxy);
}
}
public Map<String, Proxy> getProxyMap() {
return proxyMap;
}
}
|
isac322/BOJ | 10931/10931.py3.py | <gh_stars>10-100
import hashlib
print(hashlib.sha384(input().encode()).hexdigest()) |
Zoxc/mirb | src/classes/proc.cpp | #include "proc.hpp"
#include "symbol.hpp"
#include "class.hpp"
#include "../runtime.hpp"
namespace Mirb
{
value_t Proc::call(Proc *self, value_t block, size_t argc, value_t argv[])
{
return call_code(self->block, self->self, self->name, self->scope, self->scopes, block, argc, argv);
}
value_t Proc::rb_new(value_t block)
{
return get_proc(block);
}
void Proc::initialize()
{
context->proc_class = define_class("Proc", context->object_class);
singleton_method<Arg::Block, &rb_new>(context->proc_class, "new");
method<Self<Proc>, Arg::Block, Arg::Count, Arg::Values, &call>(context->proc_class, "call");
method<Self<Proc>, Arg::Block, Arg::Count, Arg::Values, &call>(context->proc_class, "[]");
}
};
|
thanhtunguet/react-native-telink-ble | android/src/main/java/com/react/telink/ble/model/MeshNetKey.java | package com.react.telink.ble.model;
import java.io.Serializable;
/**
* network key used for network layer encryption
*/
public class MeshNetKey implements MeshKey, Serializable {
public String name;
public int index;
public byte[] key;
public MeshNetKey(String name, int index, byte[] key) {
this.name = name;
this.index = index;
this.key = key;
}
@Override
public String getName() {
return this.name;
}
@Override
public int getIndex() {
return this.index;
}
@Override
public byte[] getKey() {
return this.key;
}
}
|
DALDEI/visjs | visjs-4-addon-addon/src/main/java/org/vaadin/visjs/networkDiagram/options/nodes/HeightConstraint.java | <reponame>DALDEI/visjs<filename>visjs-4-addon-addon/src/main/java/org/vaadin/visjs/networkDiagram/options/nodes/HeightConstraint.java
package org.vaadin.visjs.networkDiagram.options.nodes;
/**
* Created by <NAME> 2020-07-25
*/
public class HeightConstraint {
private int minimum;
private VAlign valign = VAlign.middle;
public HeightConstraint(){
}
public HeightConstraint(int minimum,VAlign valign) {
this.minimum=minimum;
this.valign=valign;
}
public int getMinimum() {
return minimum;
}
public void setMinimum(int minimum) {
this.minimum = minimum;
}
public VAlign getVAlign() {
return valign;
}
public void setVAlign(org.vaadin.visjs.networkDiagram.options.nodes.HeightConstraint.VAlign valign) {
this.valign = valign;
}
public static enum VAlign {
top,
middle,
bottom
}
}
|
jardelnovaes/kikaha | kikaha-injection-processor/tests/kikaha/cdi/tests/PostConstructorSingletonService.java | <reponame>jardelnovaes/kikaha
package kikaha.cdi.tests;
import javax.inject.Singleton;
import lombok.Getter;
@Singleton
public class PostConstructorSingletonService {
@Getter
final Status status = new Status();
@javax.annotation.PostConstruct
public void postConstructorJava() {
status.calledPostContructJavaAnnotation++;
}
}
|
deborahtandurella/Progetto-E19 | flappy/src/network/commands/ScrollingElementGeneratedCommand.java | <filename>flappy/src/network/commands/ScrollingElementGeneratedCommand.java
package network.commands;
import flappyEntities.Entity;
import flappyEntities.logic.HasSerializableVersion;
import flappyEntities.logic.SerializableEntity;
import game.multiplayer.OnlineLocalGame;
import game.multiplayer.OnlineRemoteGame;
import network.Command;
/**
* Comunica un evento di generazione di un elemento di gioco che scorre orizzontalmente
*/
public class ScrollingElementGeneratedCommand extends Command {
private static final long serialVersionUID = -539210512249000006L;
private SerializableEntity entity;
private int ID;
/**
* @param scrollingElement l'elemento creato
*/
public ScrollingElementGeneratedCommand(Entity scrollingElement) {
entity= ((HasSerializableVersion) scrollingElement.getLogicComponent()).getSerializableVersion();
ID= scrollingElement.getID();
}
@Override
public void execute(OnlineRemoteGame remoteGame, OnlineLocalGame localGame) {
Entity newEntity= entity.instantiate(remoteGame.getCanvas());
newEntity.setID(ID);
remoteGame.addScrollingElement(newEntity);
}
}
|
datianshi/ignition | cloudfoundry/space_test.go | <reponame>datianshi/ignition
package cloudfoundry_test
import (
"errors"
"testing"
cfclient "github.com/cloudfoundry-community/go-cfclient"
. "github.com/onsi/gomega"
"github.com/pivotalservices/ignition/cloudfoundry"
"github.com/pivotalservices/ignition/cloudfoundry/cloudfoundryfakes"
"github.com/sclevine/spec"
"github.com/sclevine/spec/report"
)
func TestCreateSpace(t *testing.T) {
spec.Run(t, "CreateSpace", testCreateSpace, spec.Report(report.Terminal{}))
}
func testCreateSpace(t *testing.T, when spec.G, it spec.S) {
it.Before(func() {
RegisterTestingT(t)
})
it("returns an error if the creator returns an error", func() {
a := &cloudfoundryfakes.FakeAPI{}
a.CreateSpaceReturns(cfclient.Space{}, errors.New("test error"))
err := cloudfoundry.CreateSpace("test-space", "test-organization-id", "test-user-id", a)
Expect(err).To(HaveOccurred())
})
it("returns the space if it is created successfully", func() {
a := &cloudfoundryfakes.FakeAPI{}
a.CreateSpaceReturns(cfclient.Space{
Guid: "test-space-guid",
Name: "test-space",
CreatedAt: "created-at",
UpdatedAt: "updated-at",
}, nil)
err := cloudfoundry.CreateSpace("test-space", "test-organization-id", "test-user-id", a)
Expect(err).NotTo(HaveOccurred())
})
}
|
stas-vilchik/bdd-ml | data/9573.js | <filename>data/9573.js
{
strats[hook] = mergeHook;
}
|
PublicInMotionGmbH/ui-kit | packages/navigation/src/Element.js | import React from 'react'
import PropTypes from 'prop-types'
import { buildClassName } from '@talixo/shared'
export const moduleName = 'navigation'
const propTypes = {
/** Active state */
active: PropTypes.bool,
/** Element items */
children: PropTypes.node,
/** Completed state */
completed: PropTypes.bool,
/** Additional class name */
className: PropTypes.string,
/** Disabled state */
disabled: PropTypes.bool,
/** Function passed to element */
onClick: PropTypes.func
}
const defaultProps = {
active: false,
disabled: false
}
/**
* Component which represents Element.
*
* @param {object} props
* @param {boolean} [props.active]
* @param {*} [props.children]
* @param {boolean} [props.completed]
* @param {string} [props.className]
* @param {boolean} [props.disabled]
* @param {function} [props.onClick]
* @returns {React.Element}
*/
function Element (props) {
const { active, children, completed, className, disabled, onClick, ...passedProps } = props
// Build element class name
const classNames = buildClassName([ moduleName, 'element' ], className, { active, completed, disabled })
return (
<li className={classNames} onClick={onClick} {...passedProps}>
{children}
</li>
)
}
Element.displayName = 'Element'
Element.propTypes = propTypes
Element.defaultProps = defaultProps
export default Element
|
phanime/phanime | lib/routes/authentication/resetPassword.js | ResetPasswordController = RouteController.extend({
onBeforeAction: function () {
console.log(this.params.token);
// this check is extremely weak (not really for security purposes)
if (!this.params.token || this.params.token.length < 10) {
// if a token doesn't exist
// we'll just send them to forgotPassword route
Router.go('forgotPassword', {query: ''});
}
SEO.set({
title: siteSettings.getFullTitle("Reset Password"),
og: {
'title' : siteSettings.getFullTitle("Reset Password"),
}
});
this.next();
},
data: function() {
return {
token: this.params.token
};
}
}); |
timrdf/csv2rdf4lod | src/edu/rpi/tw/data/rdf/utils/pipes/starts/Cat.java | <reponame>timrdf/csv2rdf4lod
package edu.rpi.tw.data.rdf.utils.pipes.starts;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.logging.Logger;
import org.openrdf.model.Resource;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.helpers.BasicParserSettings;
import org.openrdf.sail.memory.MemoryStore;
import edu.rpi.tw.data.rdf.utils.pipes.Constants;
/**
* Accept a series of file pathnames and URLs of RDF files, concatenate them into a single model, and print to sysout.
* Strip all named graph designations that may be specified in trig and trix files.
*
* Inspired by jena.rdfcopy, but it cannot handle trig named graph designations.
* TODO: varargs for file could be used as a cat, which would do part of PlusMinus's job.
*
*
* Cat my.rdf
* reads my.rdf as rdf/xml and outputs as rdf/xml
*
* Cat my.blah
* reads my.blah as rdf/xml and outputs as rdf/xml
*
* Cat my.ttl rdf/xml
* reads my.ttl as turtle and outputs as rdf/xml
*
* Cat my.nt ttl
* reads my.nt as n-triples and outputs as turtle
*
* Cat - rdf/xml ttl
* reads stdin as rdf/xml and outputs as turtle
*
* Cat - rdf/xml nt my.ttl other.ttl
* concatenates stdin (as rdf/xml), my.ttl (as turtle) and other.ttl (as turtle); outputs as n-triples
*/
public class Cat {
private static Logger logger = Logger.getLogger(Cat.class.getName());
public static Resource DEFAULT_CONTEXT = Constants.PIPE_CONTEXT;
public static boolean ENCOUNTERED_PARSE_ERROR = false;
public static final String USAGE = "Cat [filePath | URL]*";
/**
* e.g., Cat foaf/examples/hendler.foaf.rdf http://www.w3.org/People/Berners-Lee/card
*
* @param args - [filePath | URL]*
*/
public static void main(String[] args) {
if( args.length < 1 ) {
//System.err.println("usage: "+USAGE);
}
Repository repository = new SailRepository(new MemoryStore());
try {
repository.initialize();
} catch (RepositoryException e) {
e.printStackTrace();
}
RepositoryConnection conn = null;
try {
// If there is content on sysin, include it in the model union.
if( System.in.available() > 0 ) { // TODO: this does not work.
conn = repository.getConnection();
conn.add(System.in, "", Constants.DEFAULT_PIPELINE_FORMAT, Constants.PIPE_CONTEXT);
conn.commit();
conn.close();
}
// Concatenate all RDF from filepaths and URLs designated as arguments.
for( int i=0; i < args.length; i++ ) {
load(args[i], repository, Constants.PIPE_CONTEXT);
}
conn = repository.getConnection();
conn.export(Constants.handlerForFileExtension(Constants.DEFAULT_PIPELINE_EXTENSION, System.out),Constants.PIPE_CONTEXT);
} catch (RepositoryException e) {
e.printStackTrace();
} catch (RDFHandlerException e) {
e.printStackTrace();
} catch (RDFParseException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}finally {
if( conn != null ) {
try {
conn.close();
} catch (RepositoryException e) {
e.printStackTrace();
}
}
}
}
/**
*
* @param location - the file/URL to load. If null, just return an initialized in-memory Repository.
* @param context - the context to load 'location' into.
*
* @return
*/
public static Repository load(String location, Resource context) {
Repository rep = new SailRepository(new MemoryStore());
try {
rep.initialize();
} catch (RepositoryException e) {
e.printStackTrace();
}
if( location != null ) {
Cat.load(location, rep, context);
}
return rep;
}
/**
* Create a repository populated with the given location,
* placed in the context Constants.PIPE_CONTEXT.
*
* @param location
*
* @return a new Repository with context Constants.PIPE_CONTEXT containing RDF at file/URL 'location'.
*/
public static Repository load(String location) {
return load(location, Constants.PIPE_CONTEXT);
}
/**
*
* @param location
* @param repository
*/
public static void load(String location, Repository repository) {
load(location, repository, Cat.DEFAULT_CONTEXT);
}
/**
* Load the location (file path or URL) of an RDF file into the given initialized 'repository'.
*
* @param location - a file path of URL of an RDF file.
* @param repository - an initialized Repository to which to add the RDF file.
*/
public static void load(String location, Repository repository, Resource context) {
// TODO: see http://rdf4j.org/sesame/2.7/docs/users.docbook?view#chapter-rio4
//
// Debug with:
// java \
// -Djava.util.logging.config.file=$CSV2RDF4LOD_HOME/bin/logging/$CSV2RDF4LOD_CONVERT_DEBUG_LEVEL.properties \
// edu.rpi.tw.data.rdf.utils.pipes.starts.Cat
//
logger.info("load(location,repository): "+location+ " --> " +repository);
//System.err.println("load(location,repository): "+repository);
if( location == null || location.length() == 0 ) {
return;
}
/* :-( if( repository == null ) {
repository = load(location); // creates, initializes, and calls load(String location, Repository repository)
}*/
RDFFormat format = Constants.formatForFilename(location);
logger.info("location requested to load: "+ location+"\n"+
"format guess: " + format+" / "+
format.getName()+" / "+
format.getFileExtensions());
RepositoryConnection conn = null;
int attempts = 0;
boolean success = false;
try {
conn = repository.getConnection();
// http://sourceforge.net/mailarchive/message.php?msg_id=31982163
// all of the settings classes are all in the org.openrdf.rio.helpers package right now
conn.getParserConfig().addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES);
// Attempt as local file.
File file = new File(location);
String nonURIPath = location.replaceAll("^file:", "");
if( !file.exists() ) {
file = new File(nonURIPath);
}
if( file.exists() ) {
try {
attempts++;
logger.fine("attempting to parse File as guessed "+Constants.formatForFilename(location)+"... "+file.getAbsolutePath());
logger.fine("size before: "+conn.size(context) + " in context " + context);
// This was approach before adding conversion:includes:
//primary.add(new File(location), "file:/"+location, Constants.formatForFilename(location), context);
//
conn.add(file, null, Constants.formatForFilename(location), context);
logger.fine("added with guessed format");
logger.fine("after: "+conn.size(context));
conn.commit();
success = true;
} catch (RepositoryException e) {
e.printStackTrace();
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
System.err.println(location);
logger.finer(Constants.formatForFilename(location) +" failed");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
logger.fine("success after guess: " + success);
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if( !success ) {
try {
attempts++;
logger.fine("attempting to parse File as "+RDFFormat.TURTLE+"... ");
logger.finer("before turtle parse: "+conn.size(context));
conn.add(file, null, RDFFormat.TURTLE, context);
conn.commit();
success = true;
logger.finer("after turtle parse: "+conn.size(context));
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
e.printStackTrace();
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}else {
logger.finer("We're good with format "+format);
}
logger.fine("success after turtle: " + success);
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if( !success ) {
try {
attempts++;
logger.finer("attempting to parse File as "+RDFFormat.NTRIPLES+"... ");
conn.add(file, null, RDFFormat.NTRIPLES, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
logger.fine("success after ntriples: " + success);
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if( !success ) {
try {
attempts++;
logger.finer("attempting to parse File as "+RDFFormat.NQUADS+"... ");
conn.add(file, null, RDFFormat.NQUADS, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
logger.fine("success after nquads: " + success);
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if( !success ) {
try {
attempts++;
logger.finer("attempting to parse File as "+RDFFormat.RDFA+"... ");
conn.add(file, null, RDFFormat.RDFA, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
logger.fine("success after rdfa: " + success);
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
}else {
// URI does not exist as a local file.
try {
attempts++;
logger.finer("attempting to parse URL as "+Constants.formatForFilename(location)+"... ");
conn.add(new URL(location), location, Constants.formatForFilename(location), context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer(Constants.formatForFilename(location) +" failed"); e.printStackTrace();
} catch (RepositoryException e) {
logger.finer(Constants.formatForFilename(location) +" failed."); e.printStackTrace();
} catch (MalformedURLException e) {
logger.finer(Constants.formatForFilename(location) +" failed."); e.printStackTrace();
} catch (IOException e) {
logger.finer(Constants.formatForFilename(location) +" failed."); e.printStackTrace();
} catch (NullPointerException e ) {
logger.finer(Constants.formatForFilename(location) +" failed."); e.printStackTrace();
} catch (Exception e) {
logger.finer("failed.");
}
if(!success) {
try {
attempts++;
logger.finer("attempting to parse URL as "+RDFFormat.RDFXML+"... ");
conn.add(new URL(location), location, RDFFormat.RDFXML, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
if(!success) {
try {
attempts++;
logger.finer("attempting to parse URL as "+RDFFormat.TURTLE+"... ");
conn.add(new URL(location), location, RDFFormat.TURTLE, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
if(!success) {
try {
attempts++;
logger.finer("attempting to parse URL as "+RDFFormat.N3+"... ");
conn.add(new URL(location), location, RDFFormat.N3, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
if(!success) {
try {
attempts++;
logger.finer("attempting to parse URL as "+RDFFormat.NTRIPLES+"... ");
conn.add(new URL(location), location, RDFFormat.NTRIPLES, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
if(!success) {
try {
attempts++;
logger.finer("attempting to parse URL as "+RDFFormat.TRIG+"... ");
conn.add(new URL(location), location, RDFFormat.TRIG, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
if(!success) {
try {
attempts++;
logger.finer("attempting to parse URL as "+RDFFormat.TRIX+"... ");
conn.add(new URL(location), location, RDFFormat.TRIX, context);
conn.commit();
success = true;
} catch (RDFParseException e) {
ENCOUNTERED_PARSE_ERROR = true;
logger.finer("failed.");
} catch (RepositoryException e) {
logger.finer("failed.");
} catch (MalformedURLException e) {
logger.finer("failed.");
} catch (IOException e) {
logger.finer("failed.");
} catch (Exception e) {
logger.finer("failed.");
}
}
}
} catch (RepositoryException e1) {
e1.printStackTrace();
} finally {
logger.finer("load attempts: " + attempts);
if( conn != null ) {
try {
logger.finer("load size " + conn.size(context) + "in context " + context);
conn.close();
} catch (RepositoryException e) {
e.printStackTrace();
}
}
}
}
public static long size(Repository linksViasRep) {
long size = 0;
try {
RepositoryConnection conn = linksViasRep.getConnection();
size = conn.size();
conn.close();
} catch (RepositoryException e) {
e.printStackTrace();
}
return size;
}
} |
r4b3rt/lindb | parallel/executor.go | // Licensed to LinDB under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. LinDB licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package parallel
//go:generate mockgen -source=./executor.go -destination=./executor_mock.go -package=parallel
// Executor represents a query executor both storage/broker side.
// When returning query results the following is the order in which processing takes place:
// 1) filtering
// 2) Scanning
// 3) Grouping if need
// 4) Down sampling
// 5) Aggregation
// 6) Functions
// 7) Expressions
type Executor interface {
// Execute execute query
// 1) plan query language
// 2) aggregator data from time series(memory/file/network)
Execute()
}
// BrokerExecutor represents the broker query executor,
// 1) chooses the storage nodes that the data is relatively complete
// 2) chooses broker nodes for root and intermediate computing from all available broker nodes
// 3) storage node as leaf computing node does filtering and atomic compute
// 4) intermediate computing nodes are optional, only need if has group by query, does order by for grouping
// 4) root computing node does function and expression computing ???? //TODO need?
// 5) finally returns result set to user ???? //TODO need?
//
// NOTICE: there are some scenarios:
// 1) some assignment shards not in query replica shards,
// maybe some expectant results are lost in data in offline shard, WHY can query not completely data,
// because of for the system availability.
type BrokerExecutor interface {
Executor
// ExecuteContext returns the broker execute context
ExecuteContext() BrokerExecuteContext
}
// MetadataExecutor represents the metadata query executor, includes:
// 1. suggest metric name
// 2. suggest tag keys by spec metric name
// 3. suggest tag values by spec metric name and tag key
// 4. suggest fields by spec metric name
type MetadataExecutor interface {
// Execute executes metadata query logic, (both broker and storage need implement it)
Execute() ([]string, error)
}
|
gonovacloud/manageiq | app/models/aliases/miq_provision_cloud.rb | <reponame>gonovacloud/manageiq<filename>app/models/aliases/miq_provision_cloud.rb
::MiqProvisionCloud = ::NOVAHawk::Providers::CloudManager::Provision
|
14ms/Minecraft-Disclosed-Source-Modifications | Flux 39/today/flux/utility/MathUtils.java | package today.flux.utility;
import today.flux.gui.clickgui.classic.Rect;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Random;
public class MathUtils {
private static final Random rng;
public static float map(float x, float prev_min, float prev_max, float new_min, float new_max) {
return (x - prev_min) / (prev_max - prev_min) * (new_max - new_min) + new_min;
}
public static boolean contains(float x, float y, float minX, float minY, float maxX, float maxY) {
return x > minX && x < maxX && y > minY && y < maxY;
}
public static boolean contains(float x, float y, Rect rect) {
return x > rect.getX() && x < rect.getX() + rect.getWidth() && y > rect.getY() && y < rect.getY() + rect.getHeight();
}
static {
rng = new Random();
}
public static boolean isInteger(final String num) {
try {
Integer.parseInt(num);
return true;
} catch (NumberFormatException e) {
e.printStackTrace();
return false;
}
}
public static int getMid(int x1, int x2) {
return ((x1 + x2) / 2);
}
public static boolean isDouble(final String num) {
try {
Double.parseDouble(num);
return true;
} catch (NumberFormatException e) {
e.printStackTrace();
return false;
}
}
public static boolean isFloat(final String num) {
try {
Float.parseFloat(num);
return true;
} catch (NumberFormatException e) {
e.printStackTrace();
return false;
}
}
public static boolean isLong(final String num) {
try {
Long.parseLong(num);
return true;
} catch (NumberFormatException e) {
e.printStackTrace();
return false;
}
}
public static Random getRng() {
return rng;
}
public static double round(final double value, final int places) {
if (places < 0) {
throw new IllegalArgumentException();
}
BigDecimal bd = new BigDecimal(value);
bd = bd.setScale(places, RoundingMode.HALF_UP);
return bd.doubleValue();
}
public static float getRandom() {
return rng.nextFloat();
}
public static int getRandom(final int cap) {
return rng.nextInt(cap);
}
public static int getRandom(final int floor, final int cap) {
return floor + rng.nextInt(cap - floor + 1);
}
public static int randInt(final int min, final int max) {
return new Random().nextInt(max - min + 1) + min;
}
public static float clampValue(final float value, final float floor, final float cap) {
if (value < floor) {
return floor;
}
return Math.min(value, cap);
}
public static int clampValue(final int value, final int floor, final int cap) {
if (value < floor) {
return floor;
}
return Math.min(value, cap);
}
public static float getSimilarity(final String string1, final String string2) {
final int halflen = Math.min(string1.length(), string2.length()) / 2
+ Math.min(string1.length(), string2.length()) % 2;
final StringBuffer common1 = getCommonCharacters(string1, string2, halflen);
final StringBuffer common2 = getCommonCharacters(string2, string1, halflen);
if (common1.length() == 0 || common2.length() == 0) {
return 0.0f;
}
if (common1.length() != common2.length()) {
return 0.0f;
}
int transpositions = 0;
for (int n = common1.length(), i = 0; i < n; ++i) {
if (common1.charAt(i) != common2.charAt(i)) {
++transpositions;
}
}
transpositions /= (int) 2.0f;
return (common1.length() / string1.length() + common2.length() / string2.length()
+ (common1.length() - transpositions) / common1.length()) / 3.0f;
}
private static StringBuffer getCommonCharacters(final String string1, final String string2, final int distanceSep) {
final StringBuffer returnCommons = new StringBuffer();
final StringBuffer copy = new StringBuffer(string2);
final int n = string1.length();
final int m = string2.length();
for (int i = 0; i < n; ++i) {
final char ch = string1.charAt(i);
boolean foundIt = false;
for (int j = Math.max(0, i - distanceSep); !foundIt && j < Math.min(i + distanceSep, m - 1); ++j) {
if (copy.charAt(j) == ch) {
foundIt = true;
returnCommons.append(ch);
copy.setCharAt(j, '\0');
}
}
}
return returnCommons;
}
public static double meme(double value, int places) {
if (places < 0) {
throw new IllegalArgumentException();
}
BigDecimal bd = new BigDecimal(value);
bd = bd.setScale(places, RoundingMode.HALF_UP);
return bd.doubleValue();
}
public static int customRandInt(int min, int max) {
return new Random().nextInt(max - min + 1) + min;
}
public static double roundToPlace(final double value, final int places) {
if (places < 0) {
throw new IllegalArgumentException();
}
BigDecimal bd = new BigDecimal(value);
bd = bd.setScale(places, RoundingMode.HALF_UP);
return bd.doubleValue();
}
public static double getDistance(final double source, final double target) {
double diff = source - target;
return Math.sqrt(diff * diff);
}
}
|
daningenthron/rudash | lib/rudash/flip.rb | <filename>lib/rudash/flip.rb
module Rudash
module Flip
def flip(a_proc)
raise 'Expected a Proc/Method' if !Rudash::Utils.is_function?(a_proc)
flipped_proc = -> (*args) {
reveresed_args = args.reverse
a_proc.(*reveresed_args)
}
end
end
end
|
gongshoudao/GdxForAndroid | libgdx/src/main/java/com/badlogic/gdx/math/Intersector.java | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.math;
import com.badlogic.gdx.math.Plane.PlaneSide;
import com.badlogic.gdx.math.collision.BoundingBox;
import com.badlogic.gdx.math.collision.Ray;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.FloatArray;
import java.util.Arrays;
import java.util.List;
/** Class offering various static methods for intersection testing between different geometric objects.
*
* @author <EMAIL>
* @author jan.stria
* @author <NAME> */
public final class Intersector {
private final static Vector3 v0 = new Vector3();
private final static Vector3 v1 = new Vector3();
private final static Vector3 v2 = new Vector3();
private final static FloatArray floatArray = new FloatArray();
private final static FloatArray floatArray2 = new FloatArray();
/** Returns whether the given point is inside the triangle. This assumes that the point is on the plane of the triangle. No
* check is performed that this is the case.
*
* @param point the point
* @param t1 the first vertex of the triangle
* @param t2 the second vertex of the triangle
* @param t3 the third vertex of the triangle
* @return whether the point is in the triangle */
public static boolean isPointInTriangle (Vector3 point, Vector3 t1, Vector3 t2, Vector3 t3) {
v0.set(t1).sub(point);
v1.set(t2).sub(point);
v2.set(t3).sub(point);
float ab = v0.dot(v1);
float ac = v0.dot(v2);
float bc = v1.dot(v2);
float cc = v2.dot(v2);
if (bc * ac - cc * ab < 0) return false;
float bb = v1.dot(v1);
if (ab * bc - ac * bb < 0) return false;
return true;
}
/** Returns true if the given point is inside the triangle. */
public static boolean isPointInTriangle (Vector2 p, Vector2 a, Vector2 b, Vector2 c) {
float px1 = p.x - a.x;
float py1 = p.y - a.y;
boolean side12 = (b.x - a.x) * py1 - (b.y - a.y) * px1 > 0;
if ((c.x - a.x) * py1 - (c.y - a.y) * px1 > 0 == side12) return false;
if ((c.x - b.x) * (p.y - b.y) - (c.y - b.y) * (p.x - b.x) > 0 != side12) return false;
return true;
}
/** Returns true if the given point is inside the triangle. */
public static boolean isPointInTriangle (float px, float py, float ax, float ay, float bx, float by, float cx, float cy) {
float px1 = px - ax;
float py1 = py - ay;
boolean side12 = (bx - ax) * py1 - (by - ay) * px1 > 0;
if ((cx - ax) * py1 - (cy - ay) * px1 > 0 == side12) return false;
if ((cx - bx) * (py - by) - (cy - by) * (px - bx) > 0 != side12) return false;
return true;
}
public static boolean intersectSegmentPlane (Vector3 start, Vector3 end, Plane plane, Vector3 intersection) {
Vector3 dir = v0.set(end).sub(start);
float denom = dir.dot(plane.getNormal());
if (denom == 0f) return false;
float t = -(start.dot(plane.getNormal()) + plane.getD()) / denom;
if (t < 0 || t > 1) return false;
intersection.set(start).add(dir.scl(t));
return true;
}
/** Determines on which side of the given line the point is. Returns -1 if the point is on the left side of the line, 0 if the
* point is on the line and 1 if the point is on the right side of the line. Left and right are relative to the lines direction
* which is linePoint1 to linePoint2. */
public static int pointLineSide (Vector2 linePoint1, Vector2 linePoint2, Vector2 point) {
return (int)Math.signum(
(linePoint2.x - linePoint1.x) * (point.y - linePoint1.y) - (linePoint2.y - linePoint1.y) * (point.x - linePoint1.x));
}
public static int pointLineSide (float linePoint1X, float linePoint1Y, float linePoint2X, float linePoint2Y, float pointX,
float pointY) {
return (int)Math
.signum((linePoint2X - linePoint1X) * (pointY - linePoint1Y) - (linePoint2Y - linePoint1Y) * (pointX - linePoint1X));
}
/** Checks whether the given point is in the polygon.
* @param polygon The polygon vertices passed as an array
* @param point The point
* @return true if the point is in the polygon */
public static boolean isPointInPolygon (Array<Vector2> polygon, Vector2 point) {
Vector2 last = polygon.peek();
float x = point.x, y = point.y;
boolean oddNodes = false;
for (int i = 0; i < polygon.size; i++) {
Vector2 vertex = polygon.get(i);
if ((vertex.y < y && last.y >= y) || (last.y < y && vertex.y >= y)) {
if (vertex.x + (y - vertex.y) / (last.y - vertex.y) * (last.x - vertex.x) < x) oddNodes = !oddNodes;
}
last = vertex;
}
return oddNodes;
}
/** Returns true if the specified point is in the polygon.
* @param offset Starting polygon index.
* @param count Number of array indices to use after offset. */
public static boolean isPointInPolygon (float[] polygon, int offset, int count, float x, float y) {
boolean oddNodes = false;
float sx = polygon[offset], sy = polygon[offset + 1], y1 = sy;
int yi = offset + 3;
for (int n = offset + count; yi < n; yi += 2) {
float y2 = polygon[yi];
if ((y2 < y && y1 >= y) || (y1 < y && y2 >= y)) {
float x2 = polygon[yi - 1];
if (x2 + (y - y2) / (y1 - y2) * (polygon[yi - 3] - x2) < x) oddNodes = !oddNodes;
}
y1 = y2;
}
if ((sy < y && y1 >= y) || (y1 < y && sy >= y)) {
if (sx + (y - sy) / (y1 - sy) * (polygon[yi - 3] - sx) < x) oddNodes = !oddNodes;
}
return oddNodes;
}
private final static Vector2 ip = new Vector2();
private final static Vector2 ep1 = new Vector2();
private final static Vector2 ep2 = new Vector2();
private final static Vector2 s = new Vector2();
private final static Vector2 e = new Vector2();
/** Intersects two convex polygons with clockwise vertices and sets the overlap polygon resulting from the intersection.
* Follows the Sutherland-Hodgman algorithm.
* @param p1 The polygon that is being clipped
* @param p2 The clip polygon
* @param overlap The intersection of the two polygons (can be null, if an intersection polygon is not needed)
* @return Whether the two polygons intersect. */
public static boolean intersectPolygons (Polygon p1, Polygon p2, Polygon overlap) {
if (p1.getVertices().length == 0 || p2.getVertices().length == 0) {
return false;
}
Vector2 ip = Intersector.ip, ep1 = Intersector.ep1, ep2 = Intersector.ep2, s = Intersector.s, e = Intersector.e;
FloatArray floatArray = Intersector.floatArray, floatArray2 = Intersector.floatArray2;
floatArray.clear();
floatArray2.clear();
floatArray2.addAll(p1.getTransformedVertices());
float[] vertices2 = p2.getTransformedVertices();
for (int i = 0, last = vertices2.length - 2; i <= last; i += 2) {
ep1.set(vertices2[i], vertices2[i + 1]);
// wrap around to beginning of array if index points to end;
if (i < last)
ep2.set(vertices2[i + 2], vertices2[i + 3]);
else
ep2.set(vertices2[0], vertices2[1]);
if (floatArray2.size == 0) return false;
s.set(floatArray2.get(floatArray2.size - 2), floatArray2.get(floatArray2.size - 1));
for (int j = 0; j < floatArray2.size; j += 2) {
e.set(floatArray2.get(j), floatArray2.get(j + 1));
// determine if point is inside clip edge
boolean side = Intersector.pointLineSide(ep2, ep1, s) > 0;
if (Intersector.pointLineSide(ep2, ep1, e) > 0) {
if (!side) {
Intersector.intersectLines(s, e, ep1, ep2, ip);
if (floatArray.size < 2 || floatArray.get(floatArray.size - 2) != ip.x
|| floatArray.get(floatArray.size - 1) != ip.y) {
floatArray.add(ip.x);
floatArray.add(ip.y);
}
}
floatArray.add(e.x);
floatArray.add(e.y);
} else if (side) {
Intersector.intersectLines(s, e, ep1, ep2, ip);
floatArray.add(ip.x);
floatArray.add(ip.y);
}
s.set(e.x, e.y);
}
floatArray2.clear();
floatArray2.addAll(floatArray);
floatArray.clear();
}
if (floatArray2.size != 0) {
if (overlap != null) {
if (overlap.getVertices().length == floatArray2.size)
System.arraycopy(floatArray2.items, 0, overlap.getVertices(), 0, floatArray2.size);
else
overlap.setVertices(floatArray2.toArray());
}
return true;
}
return false;
}
/** Returns true if the specified poygons intersect. */
static public boolean intersectPolygons (FloatArray polygon1, FloatArray polygon2) {
if (Intersector.isPointInPolygon(polygon1.items, 0, polygon1.size, polygon2.items[0], polygon2.items[1])) return true;
if (Intersector.isPointInPolygon(polygon2.items, 0, polygon2.size, polygon1.items[0], polygon1.items[1])) return true;
return intersectPolygonEdges(polygon1, polygon2);
}
/** Returns true if the lines of the specified poygons intersect. */
static public boolean intersectPolygonEdges (FloatArray polygon1, FloatArray polygon2) {
int last1 = polygon1.size - 2, last2 = polygon2.size - 2;
float[] p1 = polygon1.items, p2 = polygon2.items;
float x1 = p1[last1], y1 = p1[last1 + 1];
for (int i = 0; i <= last1; i += 2) {
float x2 = p1[i], y2 = p1[i + 1];
float x3 = p2[last2], y3 = p2[last2 + 1];
for (int j = 0; j <= last2; j += 2) {
float x4 = p2[j], y4 = p2[j + 1];
if (intersectSegments(x1, y1, x2, y2, x3, y3, x4, y4, null)) return true;
x3 = x4;
y3 = y4;
}
x1 = x2;
y1 = y2;
}
return false;
}
static Vector2 v2a = new Vector2();
static Vector2 v2b = new Vector2();
static Vector2 v2c = new Vector2();
static Vector2 v2d = new Vector2();
/** Returns the distance between the given line and point. Note the specified line is not a line segment. */
public static float distanceLinePoint (float startX, float startY, float endX, float endY, float pointX, float pointY) {
float normalLength = (float)Math.sqrt((endX - startX) * (endX - startX) + (endY - startY) * (endY - startY));
return Math.abs((pointX - startX) * (endY - startY) - (pointY - startY) * (endX - startX)) / normalLength;
}
/** Returns the distance between the given segment and point. */
public static float distanceSegmentPoint (float startX, float startY, float endX, float endY, float pointX, float pointY) {
return nearestSegmentPoint(startX, startY, endX, endY, pointX, pointY, v2a).dst(pointX, pointY);
}
/** Returns the distance between the given segment and point. */
public static float distanceSegmentPoint (Vector2 start, Vector2 end, Vector2 point) {
return nearestSegmentPoint(start, end, point, v2a).dst(point);
}
/** Returns a point on the segment nearest to the specified point. */
public static Vector2 nearestSegmentPoint (Vector2 start, Vector2 end, Vector2 point, Vector2 nearest) {
float length2 = start.dst2(end);
if (length2 == 0) return nearest.set(start);
float t = ((point.x - start.x) * (end.x - start.x) + (point.y - start.y) * (end.y - start.y)) / length2;
if (t < 0) return nearest.set(start);
if (t > 1) return nearest.set(end);
return nearest.set(start.x + t * (end.x - start.x), start.y + t * (end.y - start.y));
}
/** Returns a point on the segment nearest to the specified point. */
public static Vector2 nearestSegmentPoint (float startX, float startY, float endX, float endY, float pointX, float pointY,
Vector2 nearest) {
final float xDiff = endX - startX;
final float yDiff = endY - startY;
float length2 = xDiff * xDiff + yDiff * yDiff;
if (length2 == 0) return nearest.set(startX, startY);
float t = ((pointX - startX) * (endX - startX) + (pointY - startY) * (endY - startY)) / length2;
if (t < 0) return nearest.set(startX, startY);
if (t > 1) return nearest.set(endX, endY);
return nearest.set(startX + t * (endX - startX), startY + t * (endY - startY));
}
/** Returns whether the given line segment intersects the given circle.
* @param start The start point of the line segment
* @param end The end point of the line segment
* @param center The center of the circle
* @param squareRadius The squared radius of the circle
* @return Whether the line segment and the circle intersect */
public static boolean intersectSegmentCircle (Vector2 start, Vector2 end, Vector2 center, float squareRadius) {
tmp.set(end.x - start.x, end.y - start.y, 0);
tmp1.set(center.x - start.x, center.y - start.y, 0);
float l = tmp.len();
float u = tmp1.dot(tmp.nor());
if (u <= 0) {
tmp2.set(start.x, start.y, 0);
} else if (u >= l) {
tmp2.set(end.x, end.y, 0);
} else {
tmp3.set(tmp.scl(u)); // remember tmp is already normalized
tmp2.set(tmp3.x + start.x, tmp3.y + start.y, 0);
}
float x = center.x - tmp2.x;
float y = center.y - tmp2.y;
return x * x + y * y <= squareRadius;
}
/** Returns whether the given line segment intersects the given circle.
* @param start The start point of the line segment
* @param end The end point of the line segment
* @param circle The circle
* @param mtv A Minimum Translation Vector to fill in the case of a collision, or null (optional).
* @return Whether the line segment and the circle intersect */
public static boolean intersectSegmentCircle (Vector2 start, Vector2 end, Circle circle, MinimumTranslationVector mtv) {
v2a.set(end).sub(start);
v2b.set(circle.x - start.x, circle.y - start.y);
float len = v2a.len();
float u = v2b.dot(v2a.nor());
if (u <= 0) {
v2c.set(start);
} else if (u >= len) {
v2c.set(end);
} else {
v2d.set(v2a.scl(u)); // remember v2a is already normalized
v2c.set(v2d).add(start);
}
v2a.set(v2c.x - circle.x, v2c.y - circle.y);
if (mtv != null) {
// Handle special case of segment containing circle center
if (v2a.equals(Vector2.Zero)) {
v2d.set(end.y - start.y, start.x - end.x);
mtv.normal.set(v2d).nor();
mtv.depth = circle.radius;
} else {
mtv.normal.set(v2a).nor();
mtv.depth = circle.radius - v2a.len();
}
}
return v2a.len2() <= circle.radius * circle.radius;
}
/** Intersect two 2D Rays and return the scalar parameter of the first ray at the intersection point. You can get the
* intersection point by: Vector2 point(direction1).scl(scalar).add(start1); For more information, check:
* http://stackoverflow.com/a/565282/1091440
* @param start1 Where the first ray start
* @param direction1 The direction the first ray is pointing
* @param start2 Where the second ray start
* @param direction2 The direction the second ray is pointing
* @return scalar parameter on the first ray describing the point where the intersection happens. May be negative. In case the
* rays are collinear, Float.POSITIVE_INFINITY will be returned. */
public static float intersectRayRay (Vector2 start1, Vector2 direction1, Vector2 start2, Vector2 direction2) {
float difx = start2.x - start1.x;
float dify = start2.y - start1.y;
float d1xd2 = direction1.x * direction2.y - direction1.y * direction2.x;
if (d1xd2 == 0.0f) {
return Float.POSITIVE_INFINITY; // collinear
}
float d2sx = direction2.x / d1xd2;
float d2sy = direction2.y / d1xd2;
return difx * d2sy - dify * d2sx;
}
/** Intersects a {@link Ray} and a {@link Plane}. The intersection point is stored in intersection in case an intersection is
* present.
*
* @param ray The ray
* @param plane The plane
* @param intersection The vector the intersection point is written to (optional)
* @return Whether an intersection is present. */
public static boolean intersectRayPlane (Ray ray, Plane plane, Vector3 intersection) {
float denom = ray.direction.dot(plane.getNormal());
if (denom != 0) {
float t = -(ray.origin.dot(plane.getNormal()) + plane.getD()) / denom;
if (t < 0) return false;
if (intersection != null) intersection.set(ray.origin).add(v0.set(ray.direction).scl(t));
return true;
} else if (plane.testPoint(ray.origin) == PlaneSide.OnPlane) {
if (intersection != null) intersection.set(ray.origin);
return true;
} else
return false;
}
/** Intersects a line and a plane. The intersection is returned as the distance from the first point to the plane. In case an
* intersection happened, the return value is in the range [0,1]. The intersection point can be recovered by point1 + t *
* (point2 - point1) where t is the return value of this method.
* @param x
* @param y
* @param z
* @param x2
* @param y2
* @param z2
* @param plane */
public static float intersectLinePlane (float x, float y, float z, float x2, float y2, float z2, Plane plane,
Vector3 intersection) {
Vector3 direction = tmp.set(x2, y2, z2).sub(x, y, z);
Vector3 origin = tmp2.set(x, y, z);
float denom = direction.dot(plane.getNormal());
if (denom != 0) {
float t = -(origin.dot(plane.getNormal()) + plane.getD()) / denom;
if (intersection != null) intersection.set(origin).add(direction.scl(t));
return t;
} else if (plane.testPoint(origin) == PlaneSide.OnPlane) {
if (intersection != null) intersection.set(origin);
return 0;
}
return -1;
}
private static final Plane p = new Plane(new Vector3(), 0);
private static final Vector3 i = new Vector3();
/** Intersect a {@link Ray} and a triangle, returning the intersection point in intersection.
*
* @param ray The ray
* @param t1 The first vertex of the triangle
* @param t2 The second vertex of the triangle
* @param t3 The third vertex of the triangle
* @param intersection The intersection point (optional)
* @return True in case an intersection is present. */
public static boolean intersectRayTriangle (Ray ray, Vector3 t1, Vector3 t2, Vector3 t3, Vector3 intersection) {
Vector3 edge1 = v0.set(t2).sub(t1);
Vector3 edge2 = v1.set(t3).sub(t1);
Vector3 pvec = v2.set(ray.direction).crs(edge2);
float det = edge1.dot(pvec);
if (MathUtils.isZero(det)) {
p.set(t1, t2, t3);
if (p.testPoint(ray.origin) == PlaneSide.OnPlane && Intersector.isPointInTriangle(ray.origin, t1, t2, t3)) {
if (intersection != null) intersection.set(ray.origin);
return true;
}
return false;
}
det = 1.0f / det;
Vector3 tvec = i.set(ray.origin).sub(t1);
float u = tvec.dot(pvec) * det;
if (u < 0.0f || u > 1.0f) return false;
Vector3 qvec = tvec.crs(edge1);
float v = ray.direction.dot(qvec) * det;
if (v < 0.0f || u + v > 1.0f) return false;
float t = edge2.dot(qvec) * det;
if (t < 0) return false;
if (intersection != null) {
if (t <= MathUtils.FLOAT_ROUNDING_ERROR) {
intersection.set(ray.origin);
} else {
ray.getEndPoint(intersection, t);
}
}
return true;
}
private static final Vector3 dir = new Vector3();
private static final Vector3 start = new Vector3();
/** Intersects a {@link Ray} and a sphere, returning the intersection point in intersection.
*
* @param ray The ray, the direction component must be normalized before calling this method
* @param center The center of the sphere
* @param radius The radius of the sphere
* @param intersection The intersection point (optional, can be null)
* @return Whether an intersection is present. */
public static boolean intersectRaySphere (Ray ray, Vector3 center, float radius, Vector3 intersection) {
final float len = ray.direction.dot(center.x - ray.origin.x, center.y - ray.origin.y, center.z - ray.origin.z);
if (len < 0.f) // behind the ray
return false;
final float dst2 = center.dst2(ray.origin.x + ray.direction.x * len, ray.origin.y + ray.direction.y * len,
ray.origin.z + ray.direction.z * len);
final float r2 = radius * radius;
if (dst2 > r2) return false;
if (intersection != null) intersection.set(ray.direction).scl(len - (float)Math.sqrt(r2 - dst2)).add(ray.origin);
return true;
}
/** Intersects a {@link Ray} and a {@link BoundingBox}, returning the intersection point in intersection. This intersection is
* defined as the point on the ray closest to the origin which is within the specified bounds.
*
* <p>
* The returned intersection (if any) is guaranteed to be within the bounds of the bounding box, but it can occasionally
* diverge slightly from ray, due to small floating-point errors.
* </p>
*
* <p>
* If the origin of the ray is inside the box, this method returns true and the intersection point is set to the origin of the
* ray, accordingly to the definition above.
* </p>
*
* @param ray The ray
* @param box The box
* @param intersection The intersection point (optional)
* @return Whether an intersection is present. */
public static boolean intersectRayBounds (Ray ray, BoundingBox box, Vector3 intersection) {
if (box.contains(ray.origin)) {
if (intersection != null) intersection.set(ray.origin);
return true;
}
float lowest = 0, t;
boolean hit = false;
// min x
if (ray.origin.x <= box.min.x && ray.direction.x > 0) {
t = (box.min.x - ray.origin.x) / ray.direction.x;
if (t >= 0) {
v2.set(ray.direction).scl(t).add(ray.origin);
if (v2.y >= box.min.y && v2.y <= box.max.y && v2.z >= box.min.z && v2.z <= box.max.z && (!hit || t < lowest)) {
hit = true;
lowest = t;
}
}
}
// max x
if (ray.origin.x >= box.max.x && ray.direction.x < 0) {
t = (box.max.x - ray.origin.x) / ray.direction.x;
if (t >= 0) {
v2.set(ray.direction).scl(t).add(ray.origin);
if (v2.y >= box.min.y && v2.y <= box.max.y && v2.z >= box.min.z && v2.z <= box.max.z && (!hit || t < lowest)) {
hit = true;
lowest = t;
}
}
}
// min y
if (ray.origin.y <= box.min.y && ray.direction.y > 0) {
t = (box.min.y - ray.origin.y) / ray.direction.y;
if (t >= 0) {
v2.set(ray.direction).scl(t).add(ray.origin);
if (v2.x >= box.min.x && v2.x <= box.max.x && v2.z >= box.min.z && v2.z <= box.max.z && (!hit || t < lowest)) {
hit = true;
lowest = t;
}
}
}
// max y
if (ray.origin.y >= box.max.y && ray.direction.y < 0) {
t = (box.max.y - ray.origin.y) / ray.direction.y;
if (t >= 0) {
v2.set(ray.direction).scl(t).add(ray.origin);
if (v2.x >= box.min.x && v2.x <= box.max.x && v2.z >= box.min.z && v2.z <= box.max.z && (!hit || t < lowest)) {
hit = true;
lowest = t;
}
}
}
// min z
if (ray.origin.z <= box.min.z && ray.direction.z > 0) {
t = (box.min.z - ray.origin.z) / ray.direction.z;
if (t >= 0) {
v2.set(ray.direction).scl(t).add(ray.origin);
if (v2.x >= box.min.x && v2.x <= box.max.x && v2.y >= box.min.y && v2.y <= box.max.y && (!hit || t < lowest)) {
hit = true;
lowest = t;
}
}
}
// max y
if (ray.origin.z >= box.max.z && ray.direction.z < 0) {
t = (box.max.z - ray.origin.z) / ray.direction.z;
if (t >= 0) {
v2.set(ray.direction).scl(t).add(ray.origin);
if (v2.x >= box.min.x && v2.x <= box.max.x && v2.y >= box.min.y && v2.y <= box.max.y && (!hit || t < lowest)) {
hit = true;
lowest = t;
}
}
}
if (hit && intersection != null) {
intersection.set(ray.direction).scl(lowest).add(ray.origin);
if (intersection.x < box.min.x) {
intersection.x = box.min.x;
} else if (intersection.x > box.max.x) {
intersection.x = box.max.x;
}
if (intersection.y < box.min.y) {
intersection.y = box.min.y;
} else if (intersection.y > box.max.y) {
intersection.y = box.max.y;
}
if (intersection.z < box.min.z) {
intersection.z = box.min.z;
} else if (intersection.z > box.max.z) {
intersection.z = box.max.z;
}
}
return hit;
}
/** Quick check whether the given {@link Ray} and {@link BoundingBox} intersect.
*
* @param ray The ray
* @param box The bounding box
* @return Whether the ray and the bounding box intersect. */
static public boolean intersectRayBoundsFast (Ray ray, BoundingBox box) {
return intersectRayBoundsFast(ray, box.getCenter(tmp1), box.getDimensions(tmp2));
}
/** Quick check whether the given {@link Ray} and {@link BoundingBox} intersect.
*
* @param ray The ray
* @param center The center of the bounding box
* @param dimensions The dimensions (width, height and depth) of the bounding box
* @return Whether the ray and the bounding box intersect. */
static public boolean intersectRayBoundsFast (Ray ray, Vector3 center, Vector3 dimensions) {
final float divX = 1f / ray.direction.x;
final float divY = 1f / ray.direction.y;
final float divZ = 1f / ray.direction.z;
float minx = ((center.x - dimensions.x * .5f) - ray.origin.x) * divX;
float maxx = ((center.x + dimensions.x * .5f) - ray.origin.x) * divX;
if (minx > maxx) {
final float t = minx;
minx = maxx;
maxx = t;
}
float miny = ((center.y - dimensions.y * .5f) - ray.origin.y) * divY;
float maxy = ((center.y + dimensions.y * .5f) - ray.origin.y) * divY;
if (miny > maxy) {
final float t = miny;
miny = maxy;
maxy = t;
}
float minz = ((center.z - dimensions.z * .5f) - ray.origin.z) * divZ;
float maxz = ((center.z + dimensions.z * .5f) - ray.origin.z) * divZ;
if (minz > maxz) {
final float t = minz;
minz = maxz;
maxz = t;
}
float min = Math.max(Math.max(minx, miny), minz);
float max = Math.min(Math.min(maxx, maxy), maxz);
return max >= 0 && max >= min;
}
static Vector3 best = new Vector3();
static Vector3 tmp = new Vector3();
static Vector3 tmp1 = new Vector3();
static Vector3 tmp2 = new Vector3();
static Vector3 tmp3 = new Vector3();
/** Intersects the given ray with list of triangles. Returns the nearest intersection point in intersection
*
* @param ray The ray
* @param triangles The triangles, each successive 9 elements are the 3 vertices of a triangle, a vertex is made of 3
* successive floats (XYZ)
* @param intersection The nearest intersection point (optional)
* @return Whether the ray and the triangles intersect. */
public static boolean intersectRayTriangles (Ray ray, float[] triangles, Vector3 intersection) {
float min_dist = Float.MAX_VALUE;
boolean hit = false;
if (triangles.length % 9 != 0) throw new RuntimeException("triangles array size is not a multiple of 9");
for (int i = 0; i < triangles.length; i += 9) {
boolean result = intersectRayTriangle(ray, tmp1.set(triangles[i], triangles[i + 1], triangles[i + 2]),
tmp2.set(triangles[i + 3], triangles[i + 4], triangles[i + 5]),
tmp3.set(triangles[i + 6], triangles[i + 7], triangles[i + 8]), tmp);
if (result) {
float dist = ray.origin.dst2(tmp);
if (dist < min_dist) {
min_dist = dist;
best.set(tmp);
hit = true;
}
}
}
if (!hit)
return false;
else {
if (intersection != null) intersection.set(best);
return true;
}
}
/** Intersects the given ray with list of triangles. Returns the nearest intersection point in intersection
*
* @param ray The ray
* @param vertices the vertices
* @param indices the indices, each successive 3 shorts index the 3 vertices of a triangle
* @param vertexSize the size of a vertex in floats
* @param intersection The nearest intersection point (optional)
* @return Whether the ray and the triangles intersect. */
public static boolean intersectRayTriangles (Ray ray, float[] vertices, short[] indices, int vertexSize,
Vector3 intersection) {
float min_dist = Float.MAX_VALUE;
boolean hit = false;
if (indices.length % 3 != 0) throw new RuntimeException("triangle list size is not a multiple of 3");
for (int i = 0; i < indices.length; i += 3) {
int i1 = indices[i] * vertexSize;
int i2 = indices[i + 1] * vertexSize;
int i3 = indices[i + 2] * vertexSize;
boolean result = intersectRayTriangle(ray, tmp1.set(vertices[i1], vertices[i1 + 1], vertices[i1 + 2]),
tmp2.set(vertices[i2], vertices[i2 + 1], vertices[i2 + 2]),
tmp3.set(vertices[i3], vertices[i3 + 1], vertices[i3 + 2]), tmp);
if (result) {
float dist = ray.origin.dst2(tmp);
if (dist < min_dist) {
min_dist = dist;
best.set(tmp);
hit = true;
}
}
}
if (!hit)
return false;
else {
if (intersection != null) intersection.set(best);
return true;
}
}
/** Intersects the given ray with list of triangles. Returns the nearest intersection point in intersection
*
* @param ray The ray
* @param triangles The triangles, each successive 3 elements are the 3 vertices of a triangle
* @param intersection The nearest intersection point (optional)
* @return Whether the ray and the triangles intersect. */
public static boolean intersectRayTriangles (Ray ray, List<Vector3> triangles, Vector3 intersection) {
float min_dist = Float.MAX_VALUE;
boolean hit = false;
if (triangles.size() % 3 != 0) throw new RuntimeException("triangle list size is not a multiple of 3");
for (int i = 0; i < triangles.size(); i += 3) {
boolean result = intersectRayTriangle(ray, triangles.get(i), triangles.get(i + 1), triangles.get(i + 2), tmp);
if (result) {
float dist = ray.origin.dst2(tmp);
if (dist < min_dist) {
min_dist = dist;
best.set(tmp);
hit = true;
}
}
}
if (!hit)
return false;
else {
if (intersection != null) intersection.set(best);
return true;
}
}
/**
* Quick check whether the given {@link BoundingBox} and {@link Plane} intersect.
*
* @param box The bounding box
* @param plane The plane
* @return Whether the bounding box and the plane intersect. */
public static boolean intersectBoundsPlaneFast (BoundingBox box, Plane plane) {
return intersectBoundsPlaneFast(box.getCenter(tmp1), box.getDimensions(tmp2).scl(0.5f), plane.normal, plane.d);
}
/**
* Quick check whether the given bounding box and a plane intersect.
* Code adapted from <NAME>'s Real Time Collision
*
* @param center The center of the bounding box
* @param halfDimensions Half of the dimensions (width, height and depth) of the bounding box
* @param normal The normal of the plane
* @param distance The distance of the plane
* @return Whether the bounding box and the plane intersect. */
public static boolean intersectBoundsPlaneFast (Vector3 center, Vector3 halfDimensions, Vector3 normal, float distance) {
// Compute the projection interval radius of b onto L(t) = b.c + t * p.n
float radius = halfDimensions.x * Math.abs(normal.x) +
halfDimensions.y * Math.abs(normal.y) +
halfDimensions.z * Math.abs(normal.z);
// Compute distance of box center from plane
float s = normal.dot(center) - distance;
// Intersection occurs when plane distance falls within [-r,+r] interval
return Math.abs(s) <= radius;
}
/** Intersects the two lines and returns the intersection point in intersection.
*
* @param p1 The first point of the first line
* @param p2 The second point of the first line
* @param p3 The first point of the second line
* @param p4 The second point of the second line
* @param intersection The intersection point. May be null.
* @return Whether the two lines intersect */
public static boolean intersectLines (Vector2 p1, Vector2 p2, Vector2 p3, Vector2 p4, Vector2 intersection) {
float x1 = p1.x, y1 = p1.y, x2 = p2.x, y2 = p2.y, x3 = p3.x, y3 = p3.y, x4 = p4.x, y4 = p4.y;
float d = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
if (d == 0) return false;
if (intersection != null) {
float ua = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / d;
intersection.set(x1 + (x2 - x1) * ua, y1 + (y2 - y1) * ua);
}
return true;
}
/** Intersects the two lines and returns the intersection point in intersection.
* @param intersection The intersection point, or null.
* @return Whether the two lines intersect */
public static boolean intersectLines (float x1, float y1, float x2, float y2, float x3, float y3, float x4, float y4,
Vector2 intersection) {
float d = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
if (d == 0) return false;
if (intersection != null) {
float ua = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / d;
intersection.set(x1 + (x2 - x1) * ua, y1 + (y2 - y1) * ua);
}
return true;
}
/** Check whether the given line and {@link Polygon} intersect.
* @param p1 The first point of the line
* @param p2 The second point of the line
* @param polygon The polygon
* @return Whether polygon and line intersects */
public static boolean intersectLinePolygon (Vector2 p1, Vector2 p2, Polygon polygon) {
float[] vertices = polygon.getTransformedVertices();
float x1 = p1.x, y1 = p1.y, x2 = p2.x, y2 = p2.y;
int n = vertices.length;
float x3 = vertices[n - 2], y3 = vertices[n - 1];
for (int i = 0; i < n; i += 2) {
float x4 = vertices[i], y4 = vertices[i + 1];
float d = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
if (d != 0) {
float yd = y1 - y3;
float xd = x1 - x3;
float ua = ((x4 - x3) * yd - (y4 - y3) * xd) / d;
if (ua >= 0 && ua <= 1) {
return true;
}
}
x3 = x4;
y3 = y4;
}
return false;
}
/** Determines whether the given rectangles intersect and, if they do, sets the supplied {@code intersection} rectangle to the
* area of overlap.
* @return Whether the rectangles intersect */
static public boolean intersectRectangles (Rectangle rectangle1, Rectangle rectangle2, Rectangle intersection) {
if (rectangle1.overlaps(rectangle2)) {
intersection.x = Math.max(rectangle1.x, rectangle2.x);
intersection.width = Math.min(rectangle1.x + rectangle1.width, rectangle2.x + rectangle2.width) - intersection.x;
intersection.y = Math.max(rectangle1.y, rectangle2.y);
intersection.height = Math.min(rectangle1.y + rectangle1.height, rectangle2.y + rectangle2.height) - intersection.y;
return true;
}
return false;
}
/** Determines whether the given rectangle and segment intersect
* @param startX x-coordinate start of line segment
* @param startY y-coordinate start of line segment
* @param endX y-coordinate end of line segment
* @param endY y-coordinate end of line segment
* @param rectangle rectangle that is being tested for collision
* @return whether the rectangle intersects with the line segment */
public static boolean intersectSegmentRectangle (float startX, float startY, float endX, float endY, Rectangle rectangle) {
float rectangleEndX = rectangle.x + rectangle.width;
float rectangleEndY = rectangle.y + rectangle.height;
if (intersectSegments(startX, startY, endX, endY, rectangle.x, rectangle.y, rectangle.x, rectangleEndY, null)) return true;
if (intersectSegments(startX, startY, endX, endY, rectangle.x, rectangle.y, rectangleEndX, rectangle.y, null)) return true;
if (intersectSegments(startX, startY, endX, endY, rectangleEndX, rectangle.y, rectangleEndX, rectangleEndY, null))
return true;
if (intersectSegments(startX, startY, endX, endY, rectangle.x, rectangleEndY, rectangleEndX, rectangleEndY, null))
return true;
return rectangle.contains(startX, startY);
}
/** {@link #intersectSegmentRectangle(float, float, float, float, Rectangle)} */
public static boolean intersectSegmentRectangle (Vector2 start, Vector2 end, Rectangle rectangle) {
return intersectSegmentRectangle(start.x, start.y, end.x, end.y, rectangle);
}
/** Check whether the given line segment and {@link Polygon} intersect.
* @param p1 The first point of the segment
* @param p2 The second point of the segment
* @return Whether polygon and segment intersect */
public static boolean intersectSegmentPolygon (Vector2 p1, Vector2 p2, Polygon polygon) {
float[] vertices = polygon.getTransformedVertices();
float x1 = p1.x, y1 = p1.y, x2 = p2.x, y2 = p2.y;
int n = vertices.length;
float x3 = vertices[n - 2], y3 = vertices[n - 1];
for (int i = 0; i < n; i += 2) {
float x4 = vertices[i], y4 = vertices[i + 1];
float d = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
if (d != 0) {
float yd = y1 - y3;
float xd = x1 - x3;
float ua = ((x4 - x3) * yd - (y4 - y3) * xd) / d;
if (ua >= 0 && ua <= 1) {
float ub = ((x2 - x1) * yd - (y2 - y1) * xd) / d;
if (ub >= 0 && ub <= 1) {
return true;
}
}
}
x3 = x4;
y3 = y4;
}
return false;
}
/** Intersects the two line segments and returns the intersection point in intersection.
*
* @param p1 The first point of the first line segment
* @param p2 The second point of the first line segment
* @param p3 The first point of the second line segment
* @param p4 The second point of the second line segment
* @param intersection The intersection point. May be null.
* @return Whether the two line segments intersect */
public static boolean intersectSegments (Vector2 p1, Vector2 p2, Vector2 p3, Vector2 p4, Vector2 intersection) {
float x1 = p1.x, y1 = p1.y, x2 = p2.x, y2 = p2.y, x3 = p3.x, y3 = p3.y, x4 = p4.x, y4 = p4.y;
float d = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
if (d == 0) return false;
float yd = y1 - y3;
float xd = x1 - x3;
float ua = ((x4 - x3) * yd - (y4 - y3) * xd) / d;
if (ua < 0 || ua > 1) return false;
float ub = ((x2 - x1) * yd - (y2 - y1) * xd) / d;
if (ub < 0 || ub > 1) return false;
if (intersection != null) intersection.set(x1 + (x2 - x1) * ua, y1 + (y2 - y1) * ua);
return true;
}
/** @param intersection May be null. */
public static boolean intersectSegments (float x1, float y1, float x2, float y2, float x3, float y3, float x4, float y4,
Vector2 intersection) {
float d = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
if (d == 0) return false;
float yd = y1 - y3;
float xd = x1 - x3;
float ua = ((x4 - x3) * yd - (y4 - y3) * xd) / d;
if (ua < 0 || ua > 1) return false;
float ub = ((x2 - x1) * yd - (y2 - y1) * xd) / d;
if (ub < 0 || ub > 1) return false;
if (intersection != null) intersection.set(x1 + (x2 - x1) * ua, y1 + (y2 - y1) * ua);
return true;
}
static float det (float a, float b, float c, float d) {
return a * d - b * c;
}
static double detd (double a, double b, double c, double d) {
return a * d - b * c;
}
public static boolean overlaps (Circle c1, Circle c2) {
return c1.overlaps(c2);
}
public static boolean overlaps (Rectangle r1, Rectangle r2) {
return r1.overlaps(r2);
}
public static boolean overlaps (Circle c, Rectangle r) {
float closestX = c.x;
float closestY = c.y;
if (c.x < r.x) {
closestX = r.x;
} else if (c.x > r.x + r.width) {
closestX = r.x + r.width;
}
if (c.y < r.y) {
closestY = r.y;
} else if (c.y > r.y + r.height) {
closestY = r.y + r.height;
}
closestX = closestX - c.x;
closestX *= closestX;
closestY = closestY - c.y;
closestY *= closestY;
return closestX + closestY < c.radius * c.radius;
}
/** Check whether specified counter-clockwise wound convex polygons overlap.
* @param p1 The first polygon.
* @param p2 The second polygon.
* @return Whether polygons overlap. */
public static boolean overlapConvexPolygons (Polygon p1, Polygon p2) {
return overlapConvexPolygons(p1, p2, null);
}
/** Check whether specified counter-clockwise wound convex polygons overlap. If they do, optionally obtain a Minimum
* Translation Vector indicating the minimum magnitude vector required to push the polygon p1 out of collision with polygon p2.
* @param p1 The first polygon.
* @param p2 The second polygon.
* @param mtv A Minimum Translation Vector to fill in the case of a collision, or null (optional).
* @return Whether polygons overlap. */
public static boolean overlapConvexPolygons (Polygon p1, Polygon p2, MinimumTranslationVector mtv) {
return overlapConvexPolygons(p1.getTransformedVertices(), p2.getTransformedVertices(), mtv);
}
/** @see #overlapConvexPolygons(float[], int, int, float[], int, int, MinimumTranslationVector) */
public static boolean overlapConvexPolygons (float[] verts1, float[] verts2, MinimumTranslationVector mtv) {
return overlapConvexPolygons(verts1, 0, verts1.length, verts2, 0, verts2.length, mtv);
}
/** Check whether polygons defined by the given counter-clockwise wound vertex arrays overlap. If they do, optionally obtain a
* Minimum Translation Vector indicating the minimum magnitude vector required to push the polygon defined by verts1 out of the
* collision with the polygon defined by verts2.
* @param verts1 Vertices of the first polygon.
* @param verts2 Vertices of the second polygon.
* @param mtv A Minimum Translation Vector to fill in the case of a collision, or null (optional).
* @return Whether polygons overlap. */
public static boolean overlapConvexPolygons (float[] verts1, int offset1, int count1, float[] verts2, int offset2, int count2,
MinimumTranslationVector mtv) {
float overlap = Float.MAX_VALUE;
float smallestAxisX = 0;
float smallestAxisY = 0;
int numInNormalDir;
int end1 = offset1 + count1;
int end2 = offset2 + count2;
// Get polygon1 axes
for (int i = offset1; i < end1; i += 2) {
float x1 = verts1[i];
float y1 = verts1[i + 1];
float x2 = verts1[(i + 2) % count1];
float y2 = verts1[(i + 3) % count1];
float axisX = y1 - y2;
float axisY = -(x1 - x2);
final float length = (float)Math.sqrt(axisX * axisX + axisY * axisY);
axisX /= length;
axisY /= length;
// -- Begin check for separation on this axis --//
// Project polygon1 onto this axis
float min1 = axisX * verts1[0] + axisY * verts1[1];
float max1 = min1;
for (int j = offset1; j < end1; j += 2) {
float p = axisX * verts1[j] + axisY * verts1[j + 1];
if (p < min1) {
min1 = p;
} else if (p > max1) {
max1 = p;
}
}
// Project polygon2 onto this axis
numInNormalDir = 0;
float min2 = axisX * verts2[0] + axisY * verts2[1];
float max2 = min2;
for (int j = offset2; j < end2; j += 2) {
// Counts the number of points that are within the projected area.
numInNormalDir -= pointLineSide(x1, y1, x2, y2, verts2[j], verts2[j + 1]);
float p = axisX * verts2[j] + axisY * verts2[j + 1];
if (p < min2) {
min2 = p;
} else if (p > max2) {
max2 = p;
}
}
if (!(min1 <= min2 && max1 >= min2 || min2 <= min1 && max2 >= min1)) {
return false;
} else {
float o = Math.min(max1, max2) - Math.max(min1, min2);
if (min1 < min2 && max1 > max2 || min2 < min1 && max2 > max1) {
float mins = Math.abs(min1 - min2);
float maxs = Math.abs(max1 - max2);
if (mins < maxs) {
o += mins;
} else {
o += maxs;
}
}
if (o < overlap) {
overlap = o;
// Adjusts the direction based on the number of points found
smallestAxisX = numInNormalDir >= 0 ? axisX : -axisX;
smallestAxisY = numInNormalDir >= 0 ? axisY : -axisY;
}
}
// -- End check for separation on this axis --//
}
// Get polygon2 axes
for (int i = offset2; i < end2; i += 2) {
float x1 = verts2[i];
float y1 = verts2[i + 1];
float x2 = verts2[(i + 2) % count2];
float y2 = verts2[(i + 3) % count2];
float axisX = y1 - y2;
float axisY = -(x1 - x2);
final float length = (float)Math.sqrt(axisX * axisX + axisY * axisY);
axisX /= length;
axisY /= length;
// -- Begin check for separation on this axis --//
numInNormalDir = 0;
// Project polygon1 onto this axis
float min1 = axisX * verts1[0] + axisY * verts1[1];
float max1 = min1;
for (int j = offset1; j < end1; j += 2) {
float p = axisX * verts1[j] + axisY * verts1[j + 1];
// Counts the number of points that are within the projected area.
numInNormalDir -= pointLineSide(x1, y1, x2, y2, verts1[j], verts1[j + 1]);
if (p < min1) {
min1 = p;
} else if (p > max1) {
max1 = p;
}
}
// Project polygon2 onto this axis
float min2 = axisX * verts2[0] + axisY * verts2[1];
float max2 = min2;
for (int j = offset2; j < end2; j += 2) {
float p = axisX * verts2[j] + axisY * verts2[j + 1];
if (p < min2) {
min2 = p;
} else if (p > max2) {
max2 = p;
}
}
if (!(min1 <= min2 && max1 >= min2 || min2 <= min1 && max2 >= min1)) {
return false;
} else {
float o = Math.min(max1, max2) - Math.max(min1, min2);
if (min1 < min2 && max1 > max2 || min2 < min1 && max2 > max1) {
float mins = Math.abs(min1 - min2);
float maxs = Math.abs(max1 - max2);
if (mins < maxs) {
o += mins;
} else {
o += maxs;
}
}
if (o < overlap) {
overlap = o;
// Adjusts the direction based on the number of points found
smallestAxisX = numInNormalDir < 0 ? axisX : -axisX;
smallestAxisY = numInNormalDir < 0 ? axisY : -axisY;
}
}
// -- End check for separation on this axis --//
}
if (mtv != null) {
mtv.normal.set(smallestAxisX, smallestAxisY);
mtv.depth = overlap;
}
return true;
}
/** Splits the triangle by the plane. The result is stored in the SplitTriangle instance. Depending on where the triangle is
* relative to the plane, the result can be:
*
* <ul>
* <li>Triangle is fully in front/behind: {@link SplitTriangle#front} or {@link SplitTriangle#back} will contain the original
* triangle, {@link SplitTriangle#total} will be one.</li>
* <li>Triangle has two vertices in front, one behind: {@link SplitTriangle#front} contains 2 triangles,
* {@link SplitTriangle#back} contains 1 triangles, {@link SplitTriangle#total} will be 3.</li>
* <li>Triangle has one vertex in front, two behind: {@link SplitTriangle#front} contains 1 triangle,
* {@link SplitTriangle#back} contains 2 triangles, {@link SplitTriangle#total} will be 3.</li>
* </ul>
*
* The input triangle should have the form: x, y, z, x2, y2, z2, x3, y3, z3. One can add additional attributes per vertex which
* will be interpolated if split, such as texture coordinates or normals. Note that these additional attributes won't be
* normalized, as might be necessary in case of normals.
*
* @param triangle
* @param plane
* @param split output SplitTriangle */
public static void splitTriangle (float[] triangle, Plane plane, SplitTriangle split) {
int stride = triangle.length / 3;
boolean r1 = plane.testPoint(triangle[0], triangle[1], triangle[2]) == PlaneSide.Back;
boolean r2 = plane.testPoint(triangle[0 + stride], triangle[1 + stride], triangle[2 + stride]) == PlaneSide.Back;
boolean r3 = plane.testPoint(triangle[0 + stride * 2], triangle[1 + stride * 2],
triangle[2 + stride * 2]) == PlaneSide.Back;
split.reset();
// easy case, triangle is on one side (point on plane means front).
if (r1 == r2 && r2 == r3) {
split.total = 1;
if (r1) {
split.numBack = 1;
System.arraycopy(triangle, 0, split.back, 0, triangle.length);
} else {
split.numFront = 1;
System.arraycopy(triangle, 0, split.front, 0, triangle.length);
}
return;
}
// set number of triangles
split.total = 3;
split.numFront = (r1 ? 0 : 1) + (r2 ? 0 : 1) + (r3 ? 0 : 1);
split.numBack = split.total - split.numFront;
// hard case, split the three edges on the plane
// determine which array to fill first, front or back, flip if we
// cross the plane
split.setSide(!r1);
// split first edge
int first = 0;
int second = stride;
if (r1 != r2) {
// split the edge
splitEdge(triangle, first, second, stride, plane, split.edgeSplit, 0);
// add first edge vertex and new vertex to current side
split.add(triangle, first, stride);
split.add(split.edgeSplit, 0, stride);
// flip side and add new vertex and second edge vertex to current side
split.setSide(!split.getSide());
split.add(split.edgeSplit, 0, stride);
} else {
// add both vertices
split.add(triangle, first, stride);
}
// split second edge
first = stride;
second = stride + stride;
if (r2 != r3) {
// split the edge
splitEdge(triangle, first, second, stride, plane, split.edgeSplit, 0);
// add first edge vertex and new vertex to current side
split.add(triangle, first, stride);
split.add(split.edgeSplit, 0, stride);
// flip side and add new vertex and second edge vertex to current side
split.setSide(!split.getSide());
split.add(split.edgeSplit, 0, stride);
} else {
// add both vertices
split.add(triangle, first, stride);
}
// split third edge
first = stride + stride;
second = 0;
if (r3 != r1) {
// split the edge
splitEdge(triangle, first, second, stride, plane, split.edgeSplit, 0);
// add first edge vertex and new vertex to current side
split.add(triangle, first, stride);
split.add(split.edgeSplit, 0, stride);
// flip side and add new vertex and second edge vertex to current side
split.setSide(!split.getSide());
split.add(split.edgeSplit, 0, stride);
} else {
// add both vertices
split.add(triangle, first, stride);
}
// triangulate the side with 2 triangles
if (split.numFront == 2) {
System.arraycopy(split.front, stride * 2, split.front, stride * 3, stride * 2);
System.arraycopy(split.front, 0, split.front, stride * 5, stride);
} else {
System.arraycopy(split.back, stride * 2, split.back, stride * 3, stride * 2);
System.arraycopy(split.back, 0, split.back, stride * 5, stride);
}
}
static Vector3 intersection = new Vector3();
private static void splitEdge (float[] vertices, int s, int e, int stride, Plane plane, float[] split, int offset) {
float t = Intersector.intersectLinePlane(vertices[s], vertices[s + 1], vertices[s + 2], vertices[e], vertices[e + 1],
vertices[e + 2], plane, intersection);
split[offset + 0] = intersection.x;
split[offset + 1] = intersection.y;
split[offset + 2] = intersection.z;
for (int i = 3; i < stride; i++) {
float a = vertices[s + i];
float b = vertices[e + i];
split[offset + i] = a + t * (b - a);
}
}
public static class SplitTriangle {
public float[] front;
public float[] back;
float[] edgeSplit;
public int numFront;
public int numBack;
public int total;
boolean frontCurrent = false;
int frontOffset = 0;
int backOffset = 0;
/** Creates a new instance, assuming numAttributes attributes per triangle vertex.
* @param numAttributes must be >= 3 */
public SplitTriangle (int numAttributes) {
front = new float[numAttributes * 3 * 2];
back = new float[numAttributes * 3 * 2];
edgeSplit = new float[numAttributes];
}
@Override
public String toString () {
return "SplitTriangle [front=" + Arrays.toString(front) + ", back=" + Arrays.toString(back) + ", numFront=" + numFront
+ ", numBack=" + numBack + ", total=" + total + "]";
}
void setSide (boolean front) {
frontCurrent = front;
}
boolean getSide () {
return frontCurrent;
}
void add (float[] vertex, int offset, int stride) {
if (frontCurrent) {
System.arraycopy(vertex, offset, front, frontOffset, stride);
frontOffset += stride;
} else {
System.arraycopy(vertex, offset, back, backOffset, stride);
backOffset += stride;
}
}
void reset () {
frontCurrent = false;
frontOffset = 0;
backOffset = 0;
numFront = 0;
numBack = 0;
total = 0;
}
}
/** Minimum translation required to separate two polygons. */
public static class MinimumTranslationVector {
/** Unit length vector that indicates the direction for the separation */
public Vector2 normal = new Vector2();
/** Distance of the translation required for the separation */
public float depth = 0;
}
}
|
luoyuan800/NeverEnd | dataModel/src/cn/luo/yuan/maze/model/Parameter.java | package cn.luo.yuan.maze.model;
/**
* Copyright @Luo
* Created by <NAME> on 8/16/2017.
*/
public class Parameter {
public static final String TARGET = "target";;
public static final String CONTEXT = "context";;
public static final String COUNT = "count";
public static final String RANDOM = "random";
public static final String MESSAGE = "message";
public static final String MINHARM = "minHarm";
public static final String ATKER = "atker";
public static final String DEFENDER = "defender";
}
|
ZXin0305/hri | demo.py | import sys
sys.path.append('/home/xuchengjun/ZXin/smap')
import argparse
import os
import cv2
import numpy as np
import torch
import random
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from human_pose_msg.msg import HumanList, Human, PointCoors
from time import time
from exps.stage3_root2.test import generate_3d_point_pairs
# from model.main_model.smap import SMAP
# from model.main_model.mode_1 import SMAP_ #with mask
from model.main_model.new_model import SMAP_new
# from model.main_model.model_tmp import SMAP_tmp as SMAP
from model.refine_model.refinenet import RefineNet
from model.action.EARN import EARN
# from model.action.EARN_v2 import EARN
# from model.action.vsgcnn import VSGCNN
import dapalib_light
import dapalib
from exps.stage3_root2.config import cfg
from path import Path
from IPython import embed
from torchvision.transforms import transforms
from matplotlib import pyplot as plt
from lib.utils.tools import *
from lib.utils.camera_wrapper import CustomDataset, VideoReader, CameraReader, CameraInfo
from lib.utils.track_pose import *
from torch.utils.data import DataLoader
import copy
from exps.stage3_root2.test_util import *
import csv
import h5py
from lib.collect_action.collect import *
from tqdm import tqdm
import time
from torchsummary import summary
def process_single_image(model, refine_model, cfg, device, img_path, angle, json_path=None):
"""
Note:
the order of the pred_2d_coors is [x, y, Z, score] instead of [y, x, Z, score]
so as the gt_data
"""
model.eval()
if refine_model is not None:
refine_model.eval()
# data = read_json(json_path)
# gt_bodys = np.array(data['bodys'])
image = cv2.imread(img_path, cv2.IMREAD_COLOR)
# cv2.imshow('img', image)
# cv2.waitKey(0)
img_scale, scales, pad_value = croppad_img(image, cfg)
# scales['f_x'] = gt_bodys[0, 0, 7]
# scales['f_y'] = gt_bodys[0, 0, 8]
# scales['cx'] = gt_bodys[0, 0, 9]
# scales['cy'] = gt_bodys[0, 0, 10]
# myself
scales['f_x'] = 1059.95
scales['f_y'] = 1053.93
scales['cx'] = 954.88
scales['cy'] = 523.74
stride = cfg.dataset.STRIDE
# print("pad_value", pad_value)
img_trans = transform(img_scale, cfg)
img_trans = img_trans.unsqueeze(0).to(device)
time_ = 0
with torch.no_grad():
# st = time.time()
outputs_2d, outputs_3d, outputs_rd = model(img_trans)
# et = time.time()
# time_ += et-st
outputs_3d = outputs_3d.cpu()
outputs_rd = outputs_rd.cpu()
hmsIn = outputs_2d[0]
# for i in range(hmsIn.shape[0]):
# print(i)
# show_map(copy.deepcopy(outputs_3d[0,i]), i)
# show_map(copy.deepcopy(hmsIn[2]), 1)
# map_ = hmsIn[2].detach().cpu() #paf
# map_ = outputs_3d[0,3].detach().cpu() #relative depth
# new_root_d_upsamp = np.array(map_)
# # new_root_d_upsamp = np.maximum(map_, 0)
# new_root_d_upsamp /= np.max(new_root_d_upsamp)
# new_root_d_upsamp = cv2.resize(new_root_d_upsamp, (0,0), fx=1/scales['scale'], fy=1/scales['scale'])
# new_root_d_upsamp = cv2.resize(new_root_d_upsamp, (1920,1080), fx=1/scales['scale'], fy=1/scales['scale'])
# # new_root_d_upsamp = cv2.resize(new_root_d_upsamp, (1920,1080))
# # plt.matshow(new_root_d_upsamp)
# # plt.show()
# new_root_d_upsamp = np.uint8(100 * new_root_d_upsamp)
# new_root_d_upsamp = cv2.applyColorMap(new_root_d_upsamp, cv2.COLORMAP_JET)
# ori_imgs = image.astype(np.uint8)
# add_img = cv2.addWeighted(ori_imgs, 1, new_root_d_upsamp, 0.6, 0)
# cv2.imwrite('/home/xuchengjun/ZXin/smap/results/add_img.jpg',add_img)
hmsIn[:cfg.DATASET.KEYPOINT.NUM] /= 255 # keypoint maps
hmsIn[cfg.DATASET.KEYPOINT.NUM:] /= 127 # paf maps
rDepth = outputs_rd[0][0]
# show_map(hmsIn[2], 3) #展示root的图
# st = time.time()
pred_bodys_2d = dapalib.connect(hmsIn, rDepth, cfg.DATASET.ROOT_IDX, distFlag=True) # --> tensor, shape:(person_num, 15, 4)
# et = time.time()
# time_ += et-st
if pred_bodys_2d.shape[0] == 0:
print('here is no people ..')
# zero_pose = [0] * 45
# return zero_pose
return None
if pred_bodys_2d.shape[0] >= 2:
print('误检测 ..')
return None
else:
pred_bodys_2d[:,:,:2] *= stride
pred_bodys_2d = pred_bodys_2d.numpy()
pafs_3d = outputs_3d[0].numpy().transpose(1, 2, 0) #part relative depth map (c,h,w) --> (h,w,c) --> (128, 208)
root_d = outputs_rd[0][0].numpy() # --> (128, 208)
# upsample the outputs' shape to obtain more accurate results
# --> (256, 456)
paf_3d_upsamp = cv2.resize(
pafs_3d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456,14)
root_d_upsamp = cv2.resize(
root_d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456)
# for i in range(14):
# print(i)
# show_map(copy.deepcopy(paf_3d_upsamp[:,:,i]), i)
pred_rdepths = generate_relZ(pred_bodys_2d, paf_3d_upsamp, root_d_upsamp, scales) #
pred_bodys_3d = gen_3d_pose(pred_bodys_2d, pred_rdepths, scales, pad_value)
# new_pred_bodys_3d --> numpy()
if refine_model is not None:
# st = time.time()
new_pred_bodys_3d = lift_and_refine_3d_pose(pred_bodys_2d, pred_bodys_3d, refine_model,
device=device, root_n=cfg.DATASET.ROOT_IDX)
# et = time.time()
else:
new_pred_bodys_3d = pred_bodys_3d # shape-->(pnum,15,4)
# time_ += et - st
# print(time_)
for i in range(new_pred_bodys_3d.shape[0]):
new_pred_bodys_3d[i,0,:3] = (np.array(new_pred_bodys_3d[i,3,:3]) + np.array(new_pred_bodys_3d[i,9,:3])) / 2
# show_3d_results(new_pred_bodys_3d, cfg.SHOW.BODY_EADGES)
aug_pose_3d = augment_pose(pred_3d_bodys=new_pred_bodys_3d, angles=angle)
norm_pose_3d = pose_normalization(aug_pose_3d[0])[:,:3] # 每一帧都要进行norm
pose_3d = change_pose(norm_pose_3d)
return pose_3d
def process_video(model, refine_model, action_model, frame_provider, cfg, device):
delay = 1
esc_code = 27
p_code = 112
mean_time = 0
model.eval()
if refine_model is not None:
refine_model.eval()
kpt_num = cfg.DATASET.KEYPOINT.NUM
# 姿态跟踪使用
pose_tracker = TRACK()
track_maxFrames = 100
last_pose_list = [] # global
frame = 0
last_id = 0
thres = 0.2
max_human = 100
consec_list = []
# 动作识别使用
non_pose_frame = 0
pose_dict_for_action = {}
time_step = 54 #54
time_flag = 0
drop_rate = 20
time_step = 54
# action_model = None
time_list = []
time_num = []
for (img, img_trans, scales) in frame_provider: # img have processed
current_time = cv2.getTickCount()
img_trans = img_trans.to(device)
st_1 = st_2 = st_3 = 0
et_1 = et_2 = et_3 = 0
with torch.no_grad():
# st_1 = time.time()
outputs_2d, outputs_3d, outputs_rd = model(img_trans)
# et_1 = time.time()
outputs_3d = outputs_3d.cpu()
outputs_rd = outputs_rd.cpu()
hmsIn = outputs_2d[0]
hmsIn[:cfg.DATASET.KEYPOINT.NUM] /= 255
hmsIn[cfg.DATASET.KEYPOINT.NUM:] /= 127
rDepth = outputs_rd[0][0]
# show_map(rDepth)
# st_2 = time.time()
pred_bodys_2d = dapalib.connect(hmsIn, rDepth, cfg.DATASET.ROOT_IDX, distFlag=True)
# et_2 = time.time()
if len(pred_bodys_2d) > 0:
# print(pred_bodys_2d[:,2,:])
pred_bodys_2d[:, :, :2] *= cfg.dataset.STRIDE # resize poses to the input-net shape
pred_bodys_2d = pred_bodys_2d.numpy()
# ori_resoulution_bodys = recover_origin_resolution(pred_bodys_2d, scales['scale'])
# draw_lines(img, pred_bodys_2d, cfg.SHOW.BODY_EADGES, (255,0,0))
# ----------------------------------------------------------------------------------------
# pafs_3d = outputs_3d[0].numpy().transpose(1, 2, 0) #part relative depth map (c,h,w) --> (h,w,c) --> (128, 208)
# root_d = outputs_rd[0][0].numpy() # --> (128, 208)
# upsample the outputs' shape to obtain more accurate results
# --> (256, 456)
# paf_3d_upsamp = cv2.resize(
# pafs_3d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456,14)
# root_d_upsamp = cv2.resize(
# root_d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456)
# new_root_d_upsamp = cv2.resize(root_d, (0,0), fx=1/scales['scale'], fy=1/scales['scale'], interpolation=cv2.INTER_NEAREST)
# new_root_d_upsamp = np.maximum(root_d, 0)
# new_root_d_upsamp /= np.max(new_root_d_upsamp)
# new_root_d_upsamp = np.uint8(255 * new_root_d_upsamp)
# new_root_depth_map = cv2.applyColorMap(new_root_d_upsamp, cv2.COLORMAP_JET)
# new_root_depth_map = cv2.resize(new_root_depth_map, (1920,1080), fx=1/scales['scale'], fy=1/scales['scale'], interpolation=cv2.INTER_NEAREST)
# cv2.imwrite('/home/xuchengjun/ZXin/smap/results/depth.jpg', new_root_depth_map)
# -------------------------------------------------------------------
# 12.05
if len(pred_bodys_2d) > 0:
K = scales['K']
pafs_3d = outputs_3d[0].numpy().transpose(1, 2, 0) #part relative depth map (c,h,w) --> (h,w,c) --> (128, 208)
root_d = outputs_rd[0][0].numpy() # --> (128, 208)
# upsample the outputs' shape to obtain more accurate results
# --> (256, 456)
paf_3d_upsamp = cv2.resize(
pafs_3d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456,14)
root_d_upsamp = cv2.resize(
root_d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456)
pred_rdepths = generate_relZ(pred_bodys_2d, paf_3d_upsamp, root_d_upsamp, scales) #
pred_bodys_3d = gen_3d_pose(pred_bodys_2d, pred_rdepths, scales, scales['pad_value'])
"""
refine
"""
# new_pred_bodys_3d --> numpy()
if refine_model is not None:
# st_3 = time.time()
new_pred_bodys_3d = lift_and_refine_3d_pose(pred_bodys_2d, pred_bodys_3d, refine_model,
device=device, root_n=cfg.DATASET.ROOT_IDX)
# et_3 = time.time()
else:
new_pred_bodys_3d = pred_bodys_3d # shape-->(pnum,15,4)
# print(len(pred_bodys_2d))
# if len(pred_bodys_2d) == 10:
# time_num.append((et_1-st_1)+(et_2-st_2)+(et_3-st_3))
# print(f"human num --> {len(pred_bodys_2d)}, time --> {(et_1-st_1)+(et_2-st_2)+(et_3-st_3)}, avg --> {np.mean(np.array(time_num))} length --> {len(time_num)}")
# show_3d_results(new_pred_bodys_3d, cfg.SHOW.BODY_EADGES)
# print(new_pred_bodys_3d[:,2,2])
for i in range(new_pred_bodys_3d.shape[0]):
new_pred_bodys_3d[i,0,:3] = (np.array(new_pred_bodys_3d[i,3,:3]) + np.array(new_pred_bodys_3d[i,9,:3])) / 2
current_frame_human = copy.deepcopy(new_pred_bodys_3d[:,:,:3]) #for image embedding
# 姿态跟踪
current_pose_list = []
non_pose_frame = 0
if frame == 0:
for i in range(len(current_frame_human)):
human = HumanPoseID(current_frame_human[i], i) #根据顺序配置ID
current_pose_list.append(human)
last_id += 1 #第一帧的时候进行更新到最新的last_id
last_pose_list = current_pose_list
consec_list.append(current_pose_list)
frame += 1
elif frame > 0:
for i in range(len(current_frame_human)):
human = HumanPoseID(current_frame_human[i], -1)
current_pose_list.append(human)
# st = time.time()
last_id = pose_tracker.track_pose(consec_list, last_pose_list, current_pose_list, last_id, thres, max_human) # 这里的last_id得传回来,python好像不能在另外的函数中改变一个值,除非这个值和函数是在一个文件中的
# et = time.time()
# last_id = track_pose(last_pose_list, current_pose_list, last_id, thres, max_human)
last_pose_list = current_pose_list # update
for i, current_pose in enumerate(current_frame_human):
for j in range(15):
current_pose[j,0] = current_pose_list[i].filter[j][0](current_pose[j,0])
current_pose[j,1] = current_pose_list[i].filter[j][1](current_pose[j,1])
current_pose[j,2] = current_pose_list[i].filter[j][2](current_pose[j,2])
#计算跟踪时间
# if len(pred_bodys_2d) == 8:
# time_num.append(et-st)
# print(f"human num --> {len(pred_bodys_2d)}, time --> {et-st}, avg --> {np.mean(np.array(time_num)):0.8f} length --> {len(time_num)}")
# print("length --> ", len(consec_list))
consec_list.append(current_pose_list)
if len(consec_list) == track_maxFrames:
del consec_list[0]
frame += 1
if refine_model is not None:
refine_pred_2d = project_to_pixel(new_pred_bodys_3d, K)
draw_lines(img, refine_pred_2d, cfg.SHOW.BODY_EADGES, color=(0,0,255))
draw_cicles(refine_pred_2d, img)
for i in range(len(new_pred_bodys_3d)):
cv2.putText(img, 'ID: {}'.format(current_pose_list[i].human_id),(int(refine_pred_2d[i][0,0]-50),int(refine_pred_2d[i][0,1]-200)), cv2.FONT_HERSHEY_COMPLEX, 1, (255, 0, 255))
else:
refine_pred_2d = project_to_pixel(new_pred_bodys_3d, K)
draw_lines(img, refine_pred_2d, cfg.SHOW.BODY_EADGES, color=(0,0,255))
draw_cicles(refine_pred_2d, img)
else:
# frame = 0
# pose_dict_for_action = {}
# consec_list = []
non_pose_frame += 1
if non_pose_frame > time_step / 2: #为了防止在短时间内人突然跟踪失败,这个时候最好别立马就清空frame而重新排序
frame = 0
pose_dict_for_action = {}
if non_pose_frame >= int(track_maxFrames/2):
consec_list = []
# cv2.imwrite('./results/img.jpg', img)
# else:
# cv2.imwrite('./results/img.jpg', img)
# time_list.append((et_1-st_1)+(et_2-st_2)+(et_3-st_3))
# print(np.mean(np.array(time_list)))
# -------------------------------------------------------
# current_time = (cv2.getTickCount() - current_time) / cv2.getTickFrequency()
# if mean_time == 0:
# mean_time = current_time
# else:
# mean_time = mean_time * 0.95 + current_time * 0.05
# # del hmsIn, rDepth, img_trans
# cv2.putText(img, 'FPS: {}'.format(int(1 / mean_time * 10) / 10),
# (40, 80), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255))
cv2.imshow('Human Pose Estimation', img)
key = cv2.waitKey(delay)
if key == esc_code:
time_list = np.array(time_list)
avg = np.mean(time_list)
print(f"average time --> {avg}, num --> {np.mean(np.array(time_num))}")
break
if key == p_code:
if delay == 1:
delay = 0
else:
delay = 1
def run_demo(model, refine_model, action_model, frame_provider, cfg, device):
delay = 1
esc_code = 27
p_code = 112
mean_time = 0
model.eval()
if refine_model is not None:
refine_model.eval()
if action_model is not None:
action_model.eval()
kpt_num = cfg.DATASET.KEYPOINT.NUM
HumanPub = rospy.Publisher('pub_human', HumanList, queue_size=5)
rate = rospy.Rate(60)
idd = 0
# 除去不合理的人
remove_thres = 0.5
# 姿态跟踪使用
pose_tracker = TRACK()
track_maxFrames = 100
last_pose_list = [] # global
frame = 0
last_id = 0
thres = 0.5
max_human = 10
consec_list = []
operator = [0]
# 动作识别使用
non_pose_frame = 0
pose_dict_for_action = {}
time_step = 54
time_flag = 0
drop_rate = 25
#动作类别
static_thres = 0.75
static_action = [0]
dynamic_action = [1,2,3,4]
cal_frames = 40
time_list = []
while not rospy.is_shutdown():
for (img, img_trans, scales) in frame_provider: # img have processed
# current_time = cv2.getTickCount()
total = 0
img_trans = img_trans.to(device)
with torch.no_grad():
# st = time.time()
outputs_2d, outputs_3d, outputs_rd = model(img_trans)
# et = time.time()
# total += et -st
# outputs_2d, outputs_3d, outputs_rd = model(img_trans)
outputs_3d = outputs_3d.cpu()
outputs_rd = outputs_rd.cpu()
hmsIn = outputs_2d[0]
hmsIn[:cfg.DATASET.KEYPOINT.NUM] /= 255
hmsIn[cfg.DATASET.KEYPOINT.NUM:] /= 127
rDepth = outputs_rd[0][0]
# st1 = time.time()
pred_bodys_2d = dapalib.connect(hmsIn, rDepth, cfg.DATASET.ROOT_IDX, distFlag=True)
# et1 = time.time()
# total += et1 -st1
if len(pred_bodys_2d) > 0:
pred_bodys_2d[:, :, :2] *= cfg.dataset.STRIDE # resize poses to the input-net shape pred_bodys_2d --> (人数,关节点个数,(y,x,Z=0,score))
pred_bodys_2d = pred_bodys_2d.numpy()
need_remove_person_idx = []
#除去不合理的人
for i in range(pred_bodys_2d.shape[0]):
non_zero = pred_bodys_2d[i,:,3] != 0
useful_joints_idx = [i for i in range(len(non_zero)) if non_zero[i] == True]
score = sum(pred_bodys_2d[i, :, 3][useful_joints_idx]) / len(useful_joints_idx)
if score < remove_thres:
need_remove_person_idx.append(i)
pred_bodys_2d = np.delete(pred_bodys_2d, need_remove_person_idx, axis=0)
# ori_resoulution_bodys = recover_origin_resolution(pred_bodys_2d, scales['scale'])
# draw_lines(img, pred_bodys_2d, cfg.SHOW.BODY_EADGES, (255,0,0))
# -------------------------------------------------------------------
# 12.05
if len(pred_bodys_2d) > 0:
# print('working ..')
K = scales['K']
pafs_3d = outputs_3d[0].numpy().transpose(1, 2, 0) # part relative depth map (c,h,w) --> (h,w,c) --> (128, 208)
root_d = outputs_rd[0][0].numpy() # --> (128, 208)
# upsample the outputs' shape to obtain more accurate results
# --> (256, 456)
paf_3d_upsamp = cv2.resize(
pafs_3d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456,14)
root_d_upsamp = cv2.resize(
root_d, (cfg.INPUT_SHAPE[1], cfg.INPUT_SHAPE[0]), interpolation=cv2.INTER_NEAREST) # (256,456)
pred_rdepths = generate_relZ(pred_bodys_2d, paf_3d_upsamp, root_d_upsamp, scales) #
pred_bodys_3d = gen_3d_pose(pred_bodys_2d, pred_rdepths, scales, scales['pad_value'])
"""
refine
"""
# new_pred_bodys_3d --> numpy()
if refine_model is not None:
# st2 = time.time()
new_pred_bodys_3d = lift_and_refine_3d_pose(pred_bodys_2d, pred_bodys_3d, refine_model,
device=device, root_n=cfg.DATASET.ROOT_IDX)
# et2 = time.time()
# total += et2 -st2
else:
new_pred_bodys_3d = pred_bodys_3d # shape-->(pcd num,15,4)
# 计算时间
# time_list.append(total)
# print(f"total -> {total} avg --> {np.mean(np.array(time_list))}")
# show_3d_results(new_pred_bodys_3d, cfg.SHOW.BODY_EADGES)
# rospy.loginfo(new_pred_bodys_3d[:,2,:])
for i in range(new_pred_bodys_3d.shape[0]):
new_pred_bodys_3d[i,0,:3] = (np.array(new_pred_bodys_3d[i,3,:3]) + np.array(new_pred_bodys_3d[i,9,:3])) / 2
current_frame_human = copy.deepcopy(new_pred_bodys_3d[:,:,:3]) #for image embedding
world_pose = copy.deepcopy(new_pred_bodys_3d) # for world pose
# 姿态跟踪
current_pose_list = []
non_pose_frame = 0
if frame == 0:
#根据深度先进行简单的排序
root_depth_value = []
for i in range(len(current_frame_human)):
root_depth_value.append(current_frame_human[i,2,2])
root_depth_value = np.array(root_depth_value)
sort_idx = np.argsort(root_depth_value)
current_frame_human = current_frame_human[sort_idx] #初始化帧时根据深度确定人员的ID
for i in range(len(current_frame_human)):
human = HumanPoseID(current_frame_human[i], i) #根据顺序配置ID
current_pose_list.append(human)
last_id += 1 #第一帧的时候进行更新到最新的last_id
last_pose_list = current_pose_list
consec_list.append(current_pose_list)
frame += 1
elif frame > 0:
for i in range(len(current_frame_human)):
human = HumanPoseID(current_frame_human[i], -1)
current_pose_list.append(human)
# 跟踪
last_id = pose_tracker.track_pose(consec_list, last_pose_list, current_pose_list, last_id, thres, max_human)
#smooth the pose 用的是一欧元滤波
for i, current_pose in enumerate(current_frame_human):
for j in range(15):
current_pose[j,0] = current_pose_list[i].filter[j][0](current_pose[j,0])
current_pose[j,1] = current_pose_list[i].filter[j][1](current_pose[j,1])
current_pose[j,2] = current_pose_list[i].filter[j][2](current_pose[j,2])
last_pose_list = current_pose_list # update
#跟踪匹配序列
# print(len(consec_list))
consec_list.append(current_pose_list)
if len(consec_list) >= track_maxFrames:
del consec_list[0]
frame += 1
# 姿态的归一化 动作识别 这里需要进行判断一下
# if frame == 0:
# for 人数
# Norm
# change_pose
# pose_dict_for_action[str(id)] = [总帧数, 缺的帧数, pose[]] ..
# else:
# id_with_action = [] #顺序
# for 人数:
# if human id in dict_keys():
# ...
# else:
# pose_dict_for_action[str(id)] = [总帧数, 缺的帧数, pose[]] ..
#动作识别
id_with_action = []
if time_flag == 0:
#第一帧
for i in range(len(current_pose_list)):
last_action = -1
human_id = current_pose_list[i].human_id
current_human_pose = copy.deepcopy(current_frame_human[i]) #为什么这里用的是current_frame_human,因为current_pose_list就是用current_frame_human的顺序添加的
norm_pose = pose_normalization(current_frame_human[i]) #(human_num[i], 15, 3)
change_pose_ = change_pose(norm_pose)
pose_dict_for_action[str(human_id)] = [1,[change_pose_], last_action] # 总帧数, 规范化的数,未规范化的数,上一帧的动作
id_with_action.append([human_id, -1])
time_flag = 1
elif time_flag == 1:
for i in range(len(current_pose_list)): #便利所有的人
human_id = current_pose_list[i].human_id
if str(human_id) in pose_dict_for_action.keys():
current_human_pose = copy.deepcopy(current_frame_human[i]) #这里的原因一样
norm_pose = pose_normalization(current_frame_human[i])
change_pose_ = change_pose(norm_pose)
pose_dict_for_action[str(human_id)][0] += 1 #存在,那么这个总帧数加1
pose_dict_for_action[str(human_id)][1].append(change_pose_)
# pose_dict_for_action[str(human_id)][2].append(current_human_pose) #记录时间段内的姿态数据
# if human_id == 0:
# print(pose_dict_for_action[str(human_id)][0])
if pose_dict_for_action[str(human_id)][0] == time_step: #等于现在的步长
if action_model is not None:
# if human_id == 0:
# st = time.time()
embed_image = get_embedding(pose_dict_for_action[str(human_id)][1])
embed_image = embed_image.transpose((2, 0, 1)).astype(np.float32)
embed_image = torch.from_numpy(embed_image).unsqueeze(0).to(device) #1,3,54,15
pre = action_model(embed_image)
action = int(pre.argmax(1)[0])
# if human_id == 0:
# et = time.time()
# print(f"耗时: --> {(et - st) / 3600}")
# ------------------------------------------------------------------------------------
# if action in static_action: #先判断是不是静态的
# pass
# elif action in dynamic_action: #如果是动态的
# #首先判定这个时期的动作的变化范围,因为动作可能会产生抖动...
# print(action)
# pose_change_values = cal_pose_changes(pose_dict_for_action[str(human_id)][1], cal_frames, 15)
# # pose_change_values = cal_pose_changes(pose_dict_for_action[str(human_id)][2], cal_frames, 15)
# print(pose_change_values)
# # numerical_index = (-1) * (pose_change_values)
# # static_score = math.exp(numerical_index)
# static_score = pose_change_values
# if static_score < 1: #静态的动作
# action = 0
# else:
# if pose_dict_for_action[str(human_id)][3] != 0: #上一次的动作不是静态的
# action =
# ---------------------------------------------------------------------------------------
# 动作触发的信号
origin_action = action
# if action in static_action:
# pass
# elif action in dynamic_action:
# if pose_dict_for_action[str(human_id)][2] != 0:
# action = -1
# ---------------------------------------------------------------------------------------
if human_id in operator:
# cv2.putText(img, "action: {} ".format(" "),(100, 200), cv2.ACCESS_MASK, 4, (255, 255, 0), 4)
# if pose_dict_for_action[str(human_id)][2] == 0: #这个是动作显示的标准
cv2.putText(img, "action: {} ".format(origin_action),(100, 200), cv2.ACCESS_MASK, 4, (255, 255, 0), 4)
pose_dict_for_action[str(human_id)][2] = action #保存上一次的动作
id_with_action.append([human_id, action])
else:
id_with_action.append([human_id, -1])
del pose_dict_for_action[str(human_id)][1][0:drop_rate]
# del pose_dict_for_action[str(human_id)][2][0:drop_rate] #姿态序列减少
pose_dict_for_action[str(human_id)][0] -= drop_rate #总帧数减少
else:
id_with_action.append([human_id, -1]) #-1代表现在没有动作 处于中间过渡帧的时候,是没有进行动作预测的
else:
last_action = -1
current_human_pose = copy.deepcopy(current_frame_human[i])
norm_pose = pose_normalization(current_frame_human[i]) #不存在,就重新添加
change_pose_ = change_pose(norm_pose)
pose_dict_for_action[str(human_id)] = [1,[change_pose_], last_action]
id_with_action.append([human_id, -1])
# print(id_with_action)
# #将当前帧中的坐标转换到世界坐标系 不用发布了,因为得到的结果是错的,在这里发布在世界坐标系中的
# world_pose[:,:,:3] /= 100
# for pose_data in world_pose:
# pose_data = camera2world(pose_data, T)
# 姿态的发布
human_list = HumanList()
for i in range(len(current_pose_list)): #当前的帧
human_id = id_with_action[i][0]
if human_id == -1:
continue
# if human_id != 0:
# continue
human = Human()
for j in range(15):
point = PointCoors()
point.x = world_pose[i][j][0]
point.y = world_pose[i][j][1]
point.z = world_pose[i][j][2]
human.body_points.append(point)
human.human_id = id_with_action[i][0]
human.action = id_with_action[i][1]
# human.human_id = 0
# human.action = 1
#转换到像素坐标进行可视化
refine_pred_2d = project_to_pixel(new_pred_bodys_3d, K)
if frame <= 99999:
if human.human_id == 0:
# pass
draw_lines_once_only_one(img, refine_pred_2d[i], cfg.SHOW.BODY_EADGES, color=pose_color(human_id, max_human))
# draw_cicles_once_only_one(bodys=refine_pred_2d[i], image=img, color=(255,0,0))
elif human_id < max_human and human_id != -1: # pose_color(human.human_id, max_human)
# color = (25 * human_id, 25 * human_id, 25 * human_id)
color = pose_color(human.human_id, max_human)
draw_lines_once_only_one(img, refine_pred_2d[i], cfg.SHOW.BODY_EADGES, color=color)
cv2.putText(img,"id: {}".format(human.human_id),(int(refine_pred_2d[i][1][0]-50),
int(refine_pred_2d[i][1][1]-50)),
cv2.ACCESS_MASK,1,(0,0,255),2)
human_list.human_list.append(human)
HumanPub.publish(human_list)
# rospy.loginfo('sending pose ..')
else:
# frame = 0 #判断动作的时候要加上的
non_pose_frame += 1
# if len(consec_list) > track_maxFrames: #没有人的时候,仍会保留前面帧的pose,万一人又回来了,就可以匹配的到
# del consec_list[0]
if non_pose_frame > time_step / 2: #为了防止在短时间内人突然跟踪失败,这个时候最好别立马就清空frame而重新排序
frame = 0
pose_dict_for_action = {}
if non_pose_frame >= int(track_maxFrames/2):
consec_list = []
# if non_pose_frame > time_step: #当没有人的帧超过了阈值的话,那么动作列表清空,是为了防止在人不在视野中,但是后面来的人接上了
# pose_dict_for_action = {}
# consec_list = []
# current_time = (cv2.getTickCount() - current_time) / cv2.getTickFrequency()
# if mean_time == 0:
# mean_time = current_time
# else:
# mean_time = mean_time * 0.95 + current_time * 0.05
# del hmsIn, rDepth, img_trans
# cv2.putText(img, 'FPS: {}'.format(int(1 / mean_time * 10) / 10),
# (40, 80), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255))
# cv2.putText(img, "human_id: {}".format(0),(100, 200), cv2.ACCESS_MASK, 4, (0,0,255),4)
cv2.imshow('Human Pose Estimation', img)
# cv2.imshow('human',img_copy)
key = cv2.waitKey(delay)
if key == esc_code:
break
if key == p_code:
if delay == 1:
delay = 0
else:
delay = 1
rate.sleep()
def main():
parser = argparse.ArgumentParser()
# /home/xuchengjun/ZXin/smap/pretrained/main_model.pth
# /media/xuchengjun/zx/human_pose/pth/main/1.4/train.pth
# /media/xuchengjun/zx/human_pose/pth/main/12.16/train.pth
# /media/xuchengjun/zx/human_pose/pth/main/20220328/train.pth
parser.add_argument('--SMAP_path', type=str,
default='/home/xuchengjun/ZXin/smap/pretrained/main_model.pth')
# /media/xuchengjun/zx/human_pose/pth/main/1.4/RefineNet_epoch_250.pth
# /home/xuchengjun/ZXin/smap/pretrained/refine.pth
parser.add_argument('--RefineNet_path', type=str,
default='/home/xuchengjun/ZXin/smap/pretrained/refine.pth')
parser.add_argument('--Action_path', type=str, default='/home/xuchengjun/ZXin/smap/pretrained/action.pth') # /home/xuchengjun/ZXin/smap/pretrained/action.pth train_previous trained_model
parser.add_argument('--device',"-de", type=int, default=0)
# donot use mode 1 ..
parser.add_argument('--mode', '-m', type=int, default=0,
help='mode --> 0: process single iamge, 1:using CustomDataset, 2: process video, 3: real-time process')
parser.add_argument('--track','-t', type=int, default=1,help='wether track person or not, if not track, the action recognition will not execute')
# stage_root test.py
parser.add_argument("--test_mode", "-t", type=str, default="run_inference",
choices=['generate_train', 'generate_result', 'run_inference'],
help='Type of test. One of "generate_train": generate refineNet datasets, '
'"generate_result": save inference result and groundtruth, '
'"run_inference": save inference result for input images.')
parser.add_argument("--data_mode", "-d", type=str, default="test",
choices=['test', 'generation'],
help='Only used for "generate_train" test_mode, "generation" for refineNet train dataset,'
'"test" for refineNet test dataset.')
parser.add_argument("--batch_size", type=int, default=1,help='Batch_size of test')
parser.add_argument("--do_flip", type=float, default=0,help='Set to 1 if do flip when test')
# process img dir
# /media/xuchengjun/datasets/CMU/170407_haggling_a1/hdImgs/00_16
# /media/xuchengjun/datasets/panoptic-toolbox/171204_pose1_sample
# /media/xuchengjun/datasets/action/images/stand
# /media/xuchengjun/datasets/coco_2017
parser.add_argument("--dataset_path", type=str, default="/media/xuchengjun/datasets/action/images/walk",
help='Image dir path of "run_inference" test mode')
# process video
# /media/xuchengjun/datasets/panoptic-toolbox/170407_haggling_a1/hdVideos/hd_00_00.mp4
# /media/xuchengjun/datasets/panoptic-toolbox/160422_ultimatum1/hdVideos
# /media/xuchengjun/datasets/panoptic-toolbox/160906_pizza1/hdVideos/hd_00_00.mp4
# /media/xuchengjun/datasets/panoptic-toolbox/161029_sports1/hdVideos
# /media/xuchengjun/datasets/panoptic-toolbox/171204_pose1_sample/hdVideos/hd_00_00.mp4
# /media/xuchengjun/datasets/action/video/walk.avi
# /home/xuchengjun/Videos/识别1.mp4
# /home/xuchengjun/Videos/test.mp4 output.mp4
# /media/xuchengjun/datasets/panoptic-toolbox/161029_sports1/hdVideos/hd_00_16.mp4
# /media/xuchengjun/zx/videos/01.avi
# /media/xuchengjun/datasets/JTA-Dataset/jta_dataset/videos/train
# /media/xuchengjun/datasets/panoptic-toolbox/171204_pose1/hdVideos
parser.add_argument('--video_path', type=str, default='/media/xuchengjun/zx/videos/raise_arm0331(5).avi')
parser.add_argument("--json_name", type=str, default="final_json",
help='Add a suffix to the result json.')
# process single img
# 160422_ultimatum1--00_02--00014176.json
# 160906_pizza1
# /media/xuchengjun/datasets/panoptic-toolbox/171204_pose1_sample/hdImgs/00_00/00_00_00000000.jpg
# /media/xuchengjun/datasets/panoptic-toolbox/160422_ultimatum1/hdImgs/00_02/00_02_00014176.jpg
# /media/xuchengjun/datasets/CMU/170407_haggling_a1/hdImgs/00_30/00_30_00003500.jpg
# /media/xuchengjun/datasets/CMU/160906_pizza1/hdImgs/00_30/00_30_00003500.jpg
# /media/xuchengjun/datasets/action/images/stand/0.jpg
# /home/xuchengjun/ZXin/SMAP-master/results/current.jpg
# /media/xuchengjun/datasets/CMU/160422_ultimatum1/hdImgs/00_08/00_08_00015350.jpg
# /media/xuchengjun/datasets/panoptic-toolbox/171204_pose1_sample/hdImgs/00_00
# 161029_sports1
parser.add_argument('--json_path', default="/media/xuchengjun/datasets/CMU/CMU_val_json_file/160906_pizza1/160906_pizza1--00_16--00000200.json")
parser.add_argument('--img_path', default="/media/xuchengjun/datasets/panoptic-toolbox/160422_ultimatum1/hdImgs/00_00/00_00_00005000.jpg")
# process real camera
parser.add_argument('--camera_topic', default='/kinect2_1/hd/image_color')
args = parser.parse_args()
# set device
device = torch.device("cuda:"+ str(args.device))
# load main model
# model = SMAP(cfg, run_efficient=cfg.RUN_EFFICIENT)
# model = SMAP_(cfg, run_efficient=cfg.RUN_EFFICIENT) # mode_1 原来的带有root mask
model = SMAP_new(cfg, run_efficient=cfg.RUN_EFFICIENT)
model.to(device)
# load refine model
refine_model = RefineNet()
refine_model.to(device)
# load action model
action_model = EARN(depth=28, num_classes=5, widen_factor=4, dropRate=0.0, nc=3)
action_model.to(device)
# action_model = VSGCNN(5,3,5,0.4)
# action_model.to(device)
smap_model_path = args.SMAP_path
refine_model_path = args.RefineNet_path
action_model_path = args.Action_path
if Path(smap_model_path).exists():
# smap_state_dict = torch.load(smap_model_path, map_location=torch.device('cpu'))
# smap_state_dict = smap_state_dict['state_dict']
# model.load_state_dict(smap_state_dict)
# smap
state_dict = torch.load(smap_model_path, map_location=torch.device('cpu'))
state_dict = state_dict['model']
model.load_state_dict(state_dict)
# refine
if Path(refine_model_path).exists():
print('using refine net ..')
refine_state_dict = torch.load(refine_model_path)
refine_model.load_state_dict(refine_state_dict)
else:
refine_model = None
if Path(action_model_path).exists():
print('using action net ..')
action_state_dict = torch.load(action_model_path)
# action_model.load_state_dict(action_state_dict['state_dict'])
action_model.load_state_dict(action_state_dict)
else:
action_model = None
if args.mode == 0:
process_single_image(model, refine_model, cfg, device, args.img_path, args.json_path)
elif args.mode == 1:
# set params
cfg.TEST_MODE = args.test_mode
cfg.DATA_MODE = args.data_mode
cfg.REFINE = len(args.RefineNet_path) > 0
cfg.DO_FLIP = args.do_flip
cfg.JSON_NAME = args.json_name
cfg.TEST.IMG_PER_GPU = args.batch_size # if just run_inference, set to 1
if args.test_mode == "run_inference":
test_dataset = CustomDataset(cfg, args.dataset_path)
data_loader = DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False)
else:
data_loader = get_test_loader(cfg, num_gpu=1, local_rank=0, stage=args.data_mode)
generate_3d_point_pairs(model, refine_model, data_loader, cfg, device, output_dir=cfg.TEST_PATH)
elif args.mode == 2:
# /media/xuchengjun/datasets/panoptic-toolbox/170407_haggling_a1/hdVideos
# /media/xuchengjun/datasets/action/video/walk.avi
# /home/xuchengjun/Videos
file_name = args.video_path
frame_provider = VideoReader(file_name, cfg)
process_video(model, refine_model, action_model, frame_provider, cfg, device)
elif args.mode == 3:
rospy.init_node('human_pose', anonymous=True)
camera_topic = args.camera_topic
frame_provider = CameraReader(args.camera_topic, cfg)
run_demo(model, refine_model, action_model, frame_provider, cfg, device)
elif args.mode == 4:
# image_dir_root = '/media/xuchengjun/zx/process/00'
# image_dir_list = Path(image_dir_root).dirs()
# embed()
image_dir_list = ['/media/xuchengjun/zx/videos/52'] # image files root dir
csv_dir = '/media/xuchengjun/datasets/action_zx/4_wo'
action_label = '4' # 0-stand 1-walk 2-wave_arm 3-T-pose 4-raise-arm
total_data_num = 60 #0,1,2中每一个都有200个
consec_frames = 54 #设定连续动作的帧数,设为54, 32
if not os.path.exists(csv_dir):
os.mkdir(csv_dir)
print('have created a new dir ..')
print('begin ..')
csv_id = 60
for image_dir in image_dir_list:
all_image_list = os.listdir(image_dir)
#这一部主要是对另外两种动作的起始帧数进行一个排序
image_idx_list_ori = [int(img_name.split(".")[0]) for img_name in all_image_list]
sort_list = np.array(image_idx_list_ori).argsort()
image_idx_list = list(np.array(image_idx_list_ori)[sort_list]) #int的列表
have_appeared = []
total_frames = len(image_idx_list)
print(total_frames)
# 0, 1, 2, 4
for i in range(0, total_data_num):
consec_frames = random.randrange(20,54)
start_frame = random.sample(image_idx_list, 1)[0] #在存在的图片中随机选择一张作为动作的起始, int
print(f'start_frame --> {start_frame} consec_frames --> {consec_frames}')
while True:
if start_frame in have_appeared or start_frame not in image_idx_list:
start_frame = random.sample(image_idx_list, 1)[0]
else:
break
have_appeared.append(start_frame)
angles = [3 * angle_step for angle_step in range(1)] #5(for (1,61)) 以及 3(for (1,121))
pbarAngle = tqdm(angles,desc="Angle")
# aug pose
for single_angle in pbarAngle:
pose_frame_list = []
useful_frame = 0
for j in range(0, consec_frames):
if (start_frame + j) not in image_idx_list: #不在图片列表中,直接break掉
break
else:
img_path = os.path.join(image_dir, str(start_frame + j) + '.jpg')
pose_3d = process_single_image(model, refine_model, cfg, device, img_path, single_angle)
if pose_3d is None:
# none_frame += 1
continue
pose_frame_list.append(pose_3d)
useful_frame += 1
if useful_frame < (consec_frames / 2):
continue
csv_file = os.path.join(csv_dir, action_label + '_' + str(csv_id) + '.csv')
with open(csv_file, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(headers)
csv_writer.writerows(pose_frame_list)
csv_id += 1
print(f"第{i}个完成 ..")
# 3,4
# print(image_dir)
# angles = [3 * angle_step for angle_step in range(1)] #5(for (1,61)) 以及 3(for (1,121))
# pbarAngle = tqdm(angles,desc="Angle")
# for single_angle in pbarAngle:
# none_frame = 0
# pose_frame_list = []
# for i in range(len(image_idx_list)):
# img_path = os.path.join(image_dir, str(image_idx_list[i]) + '.jpg')
# pose_3d = process_single_image(model, refine_model, cfg, device, img_path, single_angle)
# if pose_3d is None:
# none_frame += 1
# continue
# pose_frame_list.append(pose_3d)
# if none_frame >= 10:
# continue
# csv_file = os.path.join(csv_dir, action_label + '_' + str(csv_id) + '.csv')
# with open(csv_file, 'w', newline='') as csvfile:
# csv_writer = csv.writer(csvfile)
# csv_writer.writerow(headers)
# csv_writer.writerows(pose_frame_list)
# csv_id += 1
# print("csv_id --> ", csv_id)
else:
print('no model !')
if __name__ == "__main__":
# rospy.init_node('human_pose', anonymous=True)
main()
|
h7ga40/azure_iot_hub | ntshell/src/net_misc.h | /*
* TOPPERS PROJECT Home Network Working Group Software
*
* Copyright (C) 2018-2019 Cores Co., Ltd. Japan
*
* 上記著作権者は,以下の(1)~(4)の条件を満たす場合に限り,本ソフトウェ
* ア(本ソフトウェアを改変したものを含む.以下同じ)を使用・複製・改
* 変・再配布(以下,利用と呼ぶ)することを無償で許諾する.
* (1) 本ソフトウェアをソースコードの形で利用する場合には,上記の著作
* 権表示,この利用条件および下記の無保証規定が,そのままの形でソー
* スコード中に含まれていること.
* (2) 本ソフトウェアを,ライブラリ形式など,他のソフトウェア開発に使
* 用できる形で再配布する場合には,再配布に伴うドキュメント(利用
* 者マニュアルなど)に,上記の著作権表示,この利用条件および下記
* の無保証規定を掲載すること.
* (3) 本ソフトウェアを,機器に組み込むなど,他のソフトウェア開発に使
* 用できない形で再配布する場合には,次のいずれかの条件を満たすこ
* と.
* (a) 再配布に伴うドキュメント(利用者マニュアルなど)に,上記の著
* 作権表示,この利用条件および下記の無保証規定を掲載すること.
* (b) 再配布の形態を,別に定める方法によって,TOPPERSプロジェクトに
* 報告すること.
* (4) 本ソフトウェアの利用により直接的または間接的に生じるいかなる損
* 害からも,上記著作権者およびTOPPERSプロジェクトを免責すること.
* また,本ソフトウェアのユーザまたはエンドユーザからのいかなる理
* 由に基づく請求からも,上記著作権者およびTOPPERSプロジェクトを
* 免責すること.
*
* 本ソフトウェアは,無保証で提供されているものである.上記著作権者お
* よびTOPPERSプロジェクトは,本ソフトウェアに関して,特定の使用目的
* に対する適合性も含めて,いかなる保証も行わない.また,本ソフトウェ
* アの利用により直接的または間接的に生じたいかなる損害に関しても,そ
* の責任を負わない.
*
* @(#) $Id$
*/
#ifndef _NET_MISC_H_
#define _NET_MISC_H_
/* スタックサイズ */
#define NET_MISC_STACK_SIZE 2048 /* NTP クライアントタスク(IPv4)のスタックサイズ */
/* 優先度 */
#define NET_MISC_MAIN_PRIORITY 5 /* NTP クライアントタスク(IPv4)の優先度 */
/* タスク */
extern void net_misc_task(intptr_t exinf);
#endif /* of #ifndef _NET_MISC_H_ */
|
marcinbiegun/HappyInteractions | Source/HappyInteractions/Systems/Triggers/Conditions/HTriggerCondition.h | // Copyright SpaceCatLabs. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "HTriggerCondition.generated.h"
UCLASS(Abstract, Blueprintable, EditInlineNew, CollapseCategories)
class UHTriggerCondition : public UObject
{
GENERATED_BODY()
public:
UFUNCTION(BlueprintNativeEvent)
bool IsConditionPassing();
};
|
ggeurts/fop | src/java/org/apache/fop/render/txt/TXTRenderer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.render.txt;
import java.awt.Color;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.Map;
import org.apache.xmlgraphics.util.UnitConv;
import org.apache.fop.apps.FOPException;
import org.apache.fop.apps.FOUserAgent;
import org.apache.fop.area.Area;
import org.apache.fop.area.CTM;
import org.apache.fop.area.PageViewport;
import org.apache.fop.area.inline.Image;
import org.apache.fop.area.inline.TextArea;
import org.apache.fop.render.AbstractPathOrientedRenderer;
import org.apache.fop.render.txt.border.AbstractBorderElement;
import org.apache.fop.render.txt.border.BorderManager;
/**
* <p>Renderer that renders areas to plain text.</p>
*
* <p>This work was authored by <NAME> and
* <NAME> (<EMAIL>) [to use the new Renderer interface].</p>
*/
public class TXTRenderer extends AbstractPathOrientedRenderer {
private static final char LIGHT_SHADE = '\u2591';
private static final char MEDIUM_SHADE = '\u2592';
private static final char DARK_SHADE = '\u2593';
private static final char FULL_BLOCK = '\u2588';
private static final char IMAGE_CHAR = '#';
/**The stream for output */
private OutputStream outputStream;
/** The current stream to add Text commands to. */
private TXTStream currentStream;
/** Buffer for text. */
private StringBuffer[] charData;
/** Buffer for background and images. */
private StringBuffer[] decoData;
/** Leading of line containing Courier font size of 10pt. */
public static final int LINE_LEADING = 1070;
/** Height of one symbol in Courier font size of 10pt. */
public static final int CHAR_HEIGHT = 7860;
/** Width of one symbol in Courier font size of 10pt. */
public static final int CHAR_WIDTH = 6000;
/** Current processing page width. */
private int pageWidth;
/** Current processing page height. */
private int pageHeight;
/**
* Every line except the last line on a page (which will end with
* pageEnding) will be terminated with this string.
*/
private static final String LINE_ENDING = "\r\n";
/** Every page except the last one will end with this string. */
private static final String PAGE_ENDING = "\f";
/** Equals true, if current page is first. */
private boolean firstPage;
/** Manager for storing border's information. */
private BorderManager bm;
/** Char for current filling. */
private char fillChar;
/** Saves current coordinate transformation. */
private final TXTState currentState = new TXTState();
private String encoding;
/**
* Constructs a newly allocated <code>TXTRenderer</code> object.
*
* @param userAgent the user agent that contains configuration details. This cannot be null.
*/
public TXTRenderer(FOUserAgent userAgent) {
super(userAgent);
}
/** {@inheritDoc} */
public String getMimeType() {
return "text/plain";
}
/**
* Sets the encoding of the target file.
* @param encoding the encoding, null to select the default encoding (UTF-8)
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
/**
* Indicates if point (x, y) lay inside currentPage.
*
* @param x x coordinate
* @param y y coordinate
* @return <b>true</b> if point lay inside page
*/
public boolean isLayInside(int x, int y) {
return (x >= 0) && (x < pageWidth) && (y >= 0) && (y < pageHeight);
}
/**
* Add char to text buffer.
*
* @param x x coordinate
* @param y y coordinate
* @param ch char to add
* @param ischar boolean, repersenting is character adding to text buffer
*/
protected void addChar(int x, int y, char ch, boolean ischar) {
Point point = currentState.transformPoint(x, y);
putChar(point.x, point.y, ch, ischar);
}
/**
* Add char to text or background buffer.
*
* @param x x coordinate
* @param y x coordinate
* @param ch char to add
* @param ischar indicates if it char or background
*/
protected void putChar(int x, int y, char ch, boolean ischar) {
if (isLayInside(x, y)) {
StringBuffer sb = ischar ? charData[y] : decoData[y];
while (sb.length() <= x) {
sb.append(' ');
}
sb.setCharAt(x, ch);
}
}
/**
* Adds string to text buffer (<code>charData</code>). <p>
* Chars of string map in turn.
*
* @param row x coordinate
* @param col y coordinate
* @param s string to add
*/
protected void addString(int row, int col, String s) {
for (int l = 0; l < s.length(); l++) {
addChar(col + l, row, s.charAt(l), true);
}
}
/**
* Render TextArea to Text.
*
* @param area inline area to render
*/
protected void renderText(TextArea area) {
int col = Helper.ceilPosition(this.currentIPPosition, CHAR_WIDTH);
int row = Helper.ceilPosition(this.currentBPPosition - LINE_LEADING, CHAR_HEIGHT + 2 * LINE_LEADING);
String s = area.getText();
addString(row, col, s);
super.renderText(area);
}
/**
* {@inheritDoc}
*/
public void renderPage(PageViewport page) throws IOException, FOPException {
if (firstPage) {
firstPage = false;
} else {
currentStream.add(PAGE_ENDING);
}
Rectangle2D bounds = page.getViewArea();
double width = bounds.getWidth();
double height = bounds.getHeight();
pageWidth = Helper.ceilPosition((int) width, CHAR_WIDTH);
pageHeight = Helper.ceilPosition((int) height, CHAR_HEIGHT + 2 * LINE_LEADING);
// init buffers
charData = new StringBuffer[pageHeight];
decoData = new StringBuffer[pageHeight];
for (int i = 0; i < pageHeight; i++) {
charData[i] = new StringBuffer();
decoData[i] = new StringBuffer();
}
bm = new BorderManager(pageWidth, pageHeight, currentState);
super.renderPage(page);
flushBorderToBuffer();
flushBuffer();
}
/**
* Projects current page borders (i.e.<code>bm</code>) to buffer for
* background and images (i.e.<code>decoData</code>).
*/
private void flushBorderToBuffer() {
for (int x = 0; x < pageWidth; x++) {
for (int y = 0; y < pageHeight; y++) {
Character c = bm.getCharacter(x, y);
if (c != null) {
putChar(x, y, c.charValue(), false);
}
}
}
}
/**
* Write out the buffer to output stream.
*/
private void flushBuffer() {
for (int row = 0; row < pageHeight; row++) {
StringBuffer cr = charData[row];
StringBuffer dr = decoData[row];
StringBuffer outr = null;
if (cr != null && dr == null) {
outr = cr;
} else if (dr != null && cr == null) {
outr = dr;
} else if (cr != null && dr != null) {
int len = dr.length();
if (cr.length() > len) {
len = cr.length();
}
outr = new StringBuffer();
for (int countr = 0; countr < len; countr++) {
if (countr < cr.length() && cr.charAt(countr) != ' ') {
outr.append(cr.charAt(countr));
} else if (countr < dr.length()) {
outr.append(dr.charAt(countr));
} else {
outr.append(' ');
}
}
}
if (outr != null) {
currentStream.add(outr.toString());
}
if (row < pageHeight) {
currentStream.add(LINE_ENDING);
}
}
}
/**
* {@inheritDoc}
*/
public void startRenderer(OutputStream os) throws IOException {
log.info("Rendering areas to TEXT.");
this.outputStream = os;
currentStream = new TXTStream(os);
currentStream.setEncoding(this.encoding);
firstPage = true;
}
/**
* {@inheritDoc}
*/
public void stopRenderer() throws IOException {
log.info("writing out TEXT");
outputStream.flush();
super.stopRenderer();
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void restoreStateStackAfterBreakOut(List breakOutList) {
}
/**
* Does nothing.
* @return null
* {@inheritDoc}
*/
protected List breakOutOfStateStack() {
return null;
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void saveGraphicsState() {
currentState.push(new CTM());
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void restoreGraphicsState() {
currentState.pop();
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void beginTextObject() {
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void endTextObject() {
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void clip() {
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void clipRect(float x, float y, float width, float height) {
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void moveTo(float x, float y) {
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void lineTo(float x, float y) {
}
/**
* Does nothing.
* {@inheritDoc}
*/
protected void closePath() {
}
/**
* Fills rectangle startX, startY, width, height with char
* <code>charToFill</code>.
*
* @param startX x-coordinate of upper left point
* @param startY y-coordinate of upper left point
* @param width width of rectangle
* @param height height of rectangle
* @param charToFill filling char
*/
private void fillRect(int startX, int startY, int width, int height,
char charToFill) {
for (int x = startX; x < startX + width; x++) {
for (int y = startY; y < startY + height; y++) {
addChar(x, y, charToFill, false);
}
}
}
/**
* Fills a rectangular area with the current filling char.
* {@inheritDoc}
*/
protected void fillRect(float x, float y, float width, float height) {
fillRect(bm.getStartX(), bm.getStartY(), bm.getWidth(), bm.getHeight(),
fillChar);
}
/**
* Changes current filling char.
* {@inheritDoc}
*/
protected void updateColor(Color col, boolean fill) {
if (col == null) {
return;
}
// fillShade evaluation was taken from fop-0.20.5
// TODO: This fillShase is catually the luminance component of the color
// transformed to the YUV (YPrBb) Colorspace. It should use standard
// Java methods for its conversion instead of the formula given here.
double fillShade = 0.30f / 255f * col.getRed()
+ 0.59f / 255f * col.getGreen()
+ 0.11f / 255f * col.getBlue();
fillShade = 1 - fillShade;
if (fillShade > 0.8f) {
fillChar = FULL_BLOCK;
} else if (fillShade > 0.6f) {
fillChar = DARK_SHADE;
} else if (fillShade > 0.4f) {
fillChar = MEDIUM_SHADE;
} else if (fillShade > 0.2f) {
fillChar = LIGHT_SHADE;
} else {
fillChar = ' ';
}
}
/** {@inheritDoc} */
protected void drawImage(String url, Rectangle2D pos, Map foreignAttributes) {
//No images are painted here
}
/**
* Fills image rectangle with a <code>IMAGE_CHAR</code>.
*
* @param image the base image
* @param pos the position of the image
*/
public void renderImage(Image image, Rectangle2D pos) {
int x1 = Helper.ceilPosition(currentIPPosition, CHAR_WIDTH);
int y1 = Helper.ceilPosition(currentBPPosition - LINE_LEADING, CHAR_HEIGHT + 2 * LINE_LEADING);
int width = Helper.ceilPosition((int) pos.getWidth(), CHAR_WIDTH);
int height = Helper.ceilPosition((int) pos.getHeight(), CHAR_HEIGHT + 2 * LINE_LEADING);
fillRect(x1, y1, width, height, IMAGE_CHAR);
}
/**
* Returns the closest integer to the multiplication of a number and 1000.
*
* @param x the value of the argument, multiplied by
* 1000 and rounded
* @return the value of the argument multiplied by
* 1000 and rounded to the nearest integer
*/
protected int toMilli(float x) {
return Math.round(x * 1000f);
}
/**
* Adds one element of border.
*
* @param x x coordinate
* @param y y coordinate
* @param style integer, representing border style
* @param type integer, representing border element type
*/
private void addBitOfBorder(int x, int y, int style, int type) {
Point point = currentState.transformPoint(x, y);
if (isLayInside(point.x, point.y)) {
bm.addBorderElement(point.x, point.y, style, type);
}
}
/**
* {@inheritDoc}
*/
protected void drawBorderLine(float x1, float y1, float x2, float y2,
boolean horz, boolean startOrBefore, int style, Color col) {
int borderHeight = bm.getHeight();
int borderWidth = bm.getWidth();
int borderStartX = bm.getStartX();
int borderStartY = bm.getStartY();
int x;
int y;
if (horz && startOrBefore) { // BEFORE
x = borderStartX;
y = borderStartY;
} else if (horz && !startOrBefore) { // AFTER
x = borderStartX;
y = borderStartY + borderHeight - 1;
} else if (!horz && startOrBefore) { // START
x = borderStartX;
y = borderStartY;
} else { // END
x = borderStartX + borderWidth - 1;
y = borderStartY;
}
int dx;
int dy;
int length;
int startType;
int endType;
if (horz) {
length = borderWidth;
dx = 1;
dy = 0;
startType = 1 << AbstractBorderElement.RIGHT;
endType = 1 << AbstractBorderElement.LEFT;
} else {
length = borderHeight;
dx = 0;
dy = 1;
startType = 1 << AbstractBorderElement.DOWN;
endType = 1 << AbstractBorderElement.UP;
}
addBitOfBorder(x, y, style, startType);
for (int i = 0; i < length - 2; i++) {
x += dx;
y += dy;
addBitOfBorder(x, y, style, startType + endType);
}
x += dx;
y += dy;
addBitOfBorder(x, y, style, endType);
}
/**
* {@inheritDoc}
*/
protected void drawBackAndBorders(Area area, float startx, float starty,
float width, float height) {
bm.setWidth(Helper.ceilPosition(toMilli(width), CHAR_WIDTH));
bm.setHeight(Helper.ceilPosition(toMilli(height), CHAR_HEIGHT + 2 * LINE_LEADING));
bm.setStartX(Helper.ceilPosition(toMilli(startx), CHAR_WIDTH));
bm.setStartY(Helper.ceilPosition(toMilli(starty), CHAR_HEIGHT + 2 * LINE_LEADING));
super.drawBackAndBorders(area, startx, starty, width, height);
}
/**
* {@inheritDoc}
*/
protected void startVParea(CTM ctm, Rectangle clippingRect) {
currentState.push(ctm);
}
/**
* {@inheritDoc}
*/
protected void endVParea() {
currentState.pop();
}
/** {@inheritDoc} */
protected void startLayer(String layer) {
}
/** {@inheritDoc} */
protected void endLayer() {
}
/** {@inheritDoc} */
protected void concatenateTransformationMatrix(AffineTransform at) {
currentState.push(new CTM(UnitConv.ptToMpt(at)));
}
}
|
fw42/cubecomp | test/controllers/admin_controller_test.rb | <filename>test/controllers/admin_controller_test.rb<gh_stars>1-10
require 'test_helper'
class AdminControllerTest < ActionController::TestCase
test '#index redirects to login page if user is not logged in' do
logout
use_https
get :index
assert_redirected_to admin_login_path
end
test '#index requires https' do
use_https(false)
get :index
assert_redirected_to admin_root_url(protocol: 'https')
end
test "#index redirects to login page if old user session exists but user doesn't exist anymore" do
user = users(:regular_user_with_no_competitions)
login_as(user)
user.destroy!
get :index
assert_redirected_to admin_login_path
end
test '#index redirects to dashboard from session' do
user = users(:regular_user_with_two_competitions)
login_as(user)
competition = user.competitions.first
session[:competition_id] = competition.id
get :index
assert_redirected_to admin_competition_dashboard_index_path(competition.id)
end
test "#index redirects to users' last competition if it exists and no competition is set in the session" do
user = users(:regular_user_with_two_competitions)
login_as(user)
get :index
competition = user.policy.competitions.last
assert_redirected_to admin_competition_dashboard_index_path(competition.id)
end
test '#index redirects to user page if user has no competitions' do
user = users(:regular_user_with_no_competitions)
login_as(user)
get :index
assert_redirected_to edit_admin_user_path(user.id)
end
test '#index redirects to user page if user is not allowed to login to any competitions' do
user = users(:regular_user_with_two_competitions)
login_as(user)
UserPolicy.any_instance.expects(:login?).with(anything).at_least_once.returns(false)
get :index
assert_redirected_to edit_admin_user_path(user.id)
end
test '#index allowed if user can #login? to competition' do
user = users(:regular_user_with_no_competitions)
login_as(user)
competition = Competition.first
UserPolicy.any_instance.expects(:login?).with(competition).at_least_once.returns(true)
get :index, params: { competition_id: competition.id }
assert_redirected_to admin_competition_dashboard_index_path(competition.id)
end
test '#index renders 403 if user cannot #login? to competition' do
user = users(:regular_user_with_no_competitions)
login_as(user)
competition = Competition.first
UserPolicy.any_instance.expects(:login?).with(competition).at_least_once.returns(false)
get :index, params: { competition_id: competition.id }
assert_response :forbidden
end
test '#index redirects back to login if user is inactive' do
user = users(:admin)
user.update(active: false)
login_as(user)
get :index, params: { competition_id: Competition.first }
assert_redirected_to admin_login_path
end
test "#index redirects to last competition if user session has old competition that doesn't exist anymore" do
user = users(:regular_user_with_two_competitions)
login_as(user)
competition = user.competitions.first
session[:competition_id] = competition.id
competition.destroy!
get :index
competition = user.competitions.last
assert_redirected_to admin_competition_dashboard_index_path(competition.id)
end
test "#index redirects to last competition if user doesn't have permission to competition from session" do
user = users(:regular_user_with_two_competitions)
login_as(user)
competition = user.competitions.first
session[:competition_id] = competition.id
user.permissions.where(competition: competition).each(&:destroy!)
get :index
competition = user.competitions.last
assert_redirected_to admin_competition_dashboard_index_path(competition.id)
end
test "#index redirects to last competition if user is admin and doesn't have any explicit permissions" do
user = users(:admin)
login_as(user)
user.permissions.each(&:destroy!)
get :index
assert_redirected_to admin_competition_dashboard_index_path(user.policy.competitions.last.id)
end
test 'forms contain CSRF tokens' do
@controller = Admin::UsersController.new
login_as(users(:regular_user_with_no_competitions))
with_csrf_protection do
get :edit, params: { id: users(:regular_user_with_no_competitions).id }
assert_match(/<meta name="csrf-param" content="authenticity_token" \/>/, @response.body)
assert_match(/<meta name="csrf-token" content="[^"]+" \/>/, @response.body)
end
end
test 'POST fails if CSRF token is missing' do
@controller = Admin::UsersController.new
login_as(users(:regular_user_with_no_competitions))
with_csrf_protection do
assert_raises ActionController::InvalidAuthenticityToken do
post :edit, params: {
id: users(:regular_user_with_no_competitions),
user: {
permission_level: User::PERMISSION_LEVELS.values.max
}
}
end
end
end
end
|
josephpak/eztip-frontend-leigh-ann | eztip/src/components/presentational/EmployeeList/EmployeeList.js | import React from "react";
import { EmployeeCard } from "../Employee";
import PropTypes from "prop-types";
import styled from 'styled-components';
const EmployeeListDiv = styled.div`
display: flex;
flex-wrap: wrap;
max-width: 1200px;
width: 100%;
justify-content: center;
`;
const EmployeeList = props => {
const employees = props.users.filter(user => user.user_type === "employee");
return (
<div>
<h2>Which employee would you like to tip?</h2>
<EmployeeListDiv>
{employees.map(employee => (
<EmployeeCard
employee={employee}
key={employee.id}
match={props.match}
history={props.history}
/>
))}
</EmployeeListDiv>
</div>
);
};
EmployeeList.propTypes = {
users: PropTypes.arrayOf(
PropTypes.shape({
first_name: PropTypes.string,
last_name: PropTypes.string,
id: PropTypes.number,
tagline: PropTypes.string,
profile_photo: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number
]),
type_id: PropTypes.number,
user_type: PropTypes.string,
username: PropTypes.string,
working_since: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number
])
})
),
match: PropTypes.object.isRequired,
history: PropTypes.object.isRequired
};
export default EmployeeList;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.