repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
fuelingtheweb/prettier | tests/flow/recheck/e1.js | // @flow
export type Action =
| { type: 'FOO' }
| { type: 'BAR' }
;
export const LIFE = 42;
|
sharabeshj/course-editor-test | modules/student_groups/student_groups_tests.py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Collect sets of students into groups."""
__author__ = '<NAME> (<EMAIL>)'
import datetime
import logging
import urllib
import zlib
from common import crypto
from common import resource
from common import users
from common import utils as common_utils
from controllers import sites
from models import courses
from models.data_sources import paginated_table
from models import models
from models import transforms
from modules.analytics import gradebook
from modules.analytics import student_aggregate
from modules.courses import constants as course_constants
from modules.courses import triggers_tests
from modules.i18n_dashboard import i18n_dashboard
from modules.student_groups import messages
from modules.student_groups import student_groups
from tests.functional import actions
from google.appengine.api import namespace_manager
AvailabilityRestHandler = student_groups.StudentGroupAvailabilityRestHandler
class StudentGroupsTestBase(actions.TestBase):
ADMIN_EMAIL = '<EMAIL>'
ADMIN_ASSISTANT_EMAIL = '<EMAIL>'
STUDENT_EMAIL = '<EMAIL>'
COURSE_NAME = 'student_groups_test'
COURSE_TITLE = 'Title'
NAMESPACE = 'ns_%s' % COURSE_NAME
def setUp(self):
super(StudentGroupsTestBase, self).setUp()
self.base = '/' + self.COURSE_NAME
self.app_context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, self.COURSE_TITLE)
self.STUDENT_LOWER = self.STUDENT_EMAIL.lower()
def _grant_student_groups_permission_to_assistant(self):
with common_utils.Namespace(self.NAMESPACE):
role_dto = models.RoleDTO(None, {
'name': 'modify_student_groups_role',
'users': [self.ADMIN_ASSISTANT_EMAIL],
'permissions': {
student_groups.MODULE_NAME:
[student_groups.EDIT_STUDENT_GROUPS_PERMISSION]
},
})
models.RoleDAO.save(role_dto)
def _grant_availability_permission_to_assistant(self):
with common_utils.Namespace(self.NAMESPACE):
role_dto = models.RoleDTO(None, {
'name': 'modify_availability_role',
'users': [self.ADMIN_ASSISTANT_EMAIL],
'permissions': {
course_constants.MODULE_NAME:
[course_constants.MODIFY_AVAILABILITY_PERMISSION]
},
})
models.RoleDAO.save(role_dto)
def tearDown(self):
sites.remove_course(self.app_context)
super(StudentGroupsTestBase, self).tearDown()
def _get_group(self, key):
response = self.get(
student_groups.StudentGroupRestHandler.URL.lstrip('/') +
'?key=' + str(key))
self.assertEquals(200, response.status_int)
return transforms.loads(response.body)
def _put_group(self, key, name, description, xsrf_token=None):
if not xsrf_token:
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
student_groups.StudentGroupRestHandler.ACTION)
payload = {
student_groups.StudentGroupDTO.NAME_PROPERTY: name,
student_groups.StudentGroupDTO.DESCRIPTION_PROPERTY: description,
}
request = {
'xsrf_token': xsrf_token,
'key': str(key),
'payload': transforms.dumps(payload),
}
response = self.put(
student_groups.StudentGroupRestHandler.URL.lstrip('/'),
{'request': transforms.dumps(request)})
self.assertEquals(200, response.status_int)
return transforms.loads(response.body)
def _get_availability(self, key):
response = self.get(
AvailabilityRestHandler.URL.lstrip('/') + '?key=' + str(key))
self.assertEquals(200, response.status_int)
return transforms.loads(response.body)
def _settings_with_defaults(self, default_availability,
course_availability, course_triggers,
element_settings, content_triggers):
if not course_availability:
course_availability = default_availability
if course_triggers is None:
course_triggers = {}
if element_settings is None:
element_settings = []
if content_triggers is None:
content_triggers = []
settings = {
AvailabilityRestHandler.COURSE_AVAILABILITY:
course_availability,
AvailabilityRestHandler._ELEMENT_SETTINGS:
element_settings,
student_groups.StudentGroupDTO.CONTENT_TRIGGERS_PROPERTY:
content_triggers,
}
# course_triggers are merged into the `settings` dict because that
# is how 'course_start' and 'course_end' actually appear in the form
# data that this is simulating. This is in contrast to how the
# triggers eventually end up, being transformed by from_payload()
# into a single list value of a 'course_triggers' key for saving via
# set_into_settings().
settings.update(course_triggers)
return settings
def _put_availability(self, key, members, xsrf_token=None,
course_availability=None, course_triggers=None,
element_settings=None, content_triggers=None):
if not xsrf_token:
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
AvailabilityRestHandler.ACTION)
group_settings = self._settings_with_defaults(
student_groups.AVAILABILITY_NO_OVERRIDE,
course_availability, course_triggers,
element_settings, content_triggers)
group_settings[AvailabilityRestHandler._MEMBERS] = '\n'.join(members)
payload = {
AvailabilityRestHandler._STUDENT_GROUP: key,
AvailabilityRestHandler._STUDENT_GROUP_SETTINGS: group_settings,
}
request = {
'xsrf_token': xsrf_token,
'key': str(key),
'payload': transforms.dumps(payload),
}
response = self.put(
AvailabilityRestHandler.URL.lstrip('/'),
{'request': transforms.dumps(request)})
self.assertEquals(200, response.status_int)
return transforms.loads(response.body)
# The student_groups module hijacks the handler for availability
# settings at the overall course level so that we can show course and
# group level settings on the same page. Verify that we pass through
# and affect course level settings when we don't send a student_group
# ID as part of the parameters.
def _put_course_availability(self,
course_availability=None, course_triggers=None,
element_settings=None, content_triggers=None):
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
AvailabilityRestHandler.ACTION)
payload = self._settings_with_defaults(
# This default is the most common value to appear in test cases.
courses.COURSE_AVAILABILITY_REGISTRATION_REQUIRED,
course_availability, course_triggers,
element_settings, content_triggers)
payload[AvailabilityRestHandler._STUDENT_GROUP] = ''
request = {
'xsrf_token': xsrf_token,
'payload': transforms.dumps(payload),
}
response = self.put(
AvailabilityRestHandler.URL.lstrip('/'),
{'request': transforms.dumps(request)})
self.assertEquals(200, response.status_int)
return transforms.loads(response.body)
def _delete_group(self, key, xsrf_token=None):
if not xsrf_token:
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
student_groups.StudentGroupRestHandler.ACTION)
response = self.delete(
student_groups.StudentGroupRestHandler.URL.lstrip('/') +
'?%s' % urllib.urlencode({
'key': str(key),
'xsrf_token': xsrf_token,
}))
self.assertEquals(200, response.status_int)
return transforms.loads(response.body)
class GroupLifecycleTests(StudentGroupsTestBase):
def test_list_page_not_available_without_permission(self):
actions.login(self.STUDENT_EMAIL)
response = self.get('dashboard?action=%s' %
student_groups.StudentGroupListHandler.ACTION)
self.assertEquals(302, response.status_int)
self.assertEquals('http://localhost/' + self.COURSE_NAME,
response.location)
def test_list_page_available_to_admin_assisstant(self):
self._grant_student_groups_permission_to_assistant()
actions.login(self.ADMIN_ASSISTANT_EMAIL)
response = self.get('dashboard?action=%s' %
student_groups.StudentGroupListHandler.ACTION)
self.assertEquals(200, response.status_int)
self.assertIn(str(messages.STUDENT_GROUPS_DESCRIPTION), response.body)
def test_list_page_with_no_groups(self):
actions.login(self.ADMIN_EMAIL)
response = self.get('dashboard?action=%s' %
student_groups.StudentGroupListHandler.ACTION)
self.assertEquals(200, response.status_int)
self.assertIn(str(messages.STUDENT_GROUPS_DESCRIPTION), response.body)
self.assertIn('No items', response.body)
def test_button_to_add_new_student_group(self):
actions.login(self.ADMIN_EMAIL)
response = self.get('dashboard?action=%s' %
student_groups.StudentGroupListHandler.ACTION)
response = self.click(response, 'Add Group')
self.assertEquals(200, response.status_int)
self.assertIn(
'<title>Course Builder > Title > Dashboard > '
'Edit Student Group</title>', response.body)
def test_link_to_edit_existing_group(self):
actions.login(self.ADMIN_EMAIL)
self._put_group(None, 'My Test Group', 'this is my group')
response = self.get('dashboard?action=%s' %
student_groups.StudentGroupListHandler.ACTION)
response = self.click(response, 'My Test Group')
self.assertEquals(200, response.status_int)
self.assertIn(
'<title>Course Builder > Title > Dashboard > '
'Edit Student Group</title>', response.body)
def test_rest_not_available_without_permission(self):
actions.login(self.STUDENT_EMAIL)
response = self._get_group(12345)
self.assertEquals(401, response['status'])
self.assertEquals('Access denied.', response['message'])
response = self._put_group(12345, 'Some Group', 'this is my group')
self.assertEquals(401, response['status'])
self.assertEquals('Access denied.', response['message'])
response = self._delete_group(12345)
self.assertEquals(401, response['status'])
self.assertEquals('Access denied.', response['message'])
def test_rest_with_permission(self):
self._grant_student_groups_permission_to_assistant()
actions.login(self.ADMIN_ASSISTANT_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
self.assertEquals(200, response['status'])
self.assertEquals('Saved.', response['message'])
payload = transforms.loads(response['payload'])
group_id = payload['key']
response = self._get_group(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
response = self._delete_group(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('Deleted.', response['message'])
def test_put_with_bad_xsrf_token(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group',
'bad xsrf token')
self.assertEquals(403, response['status'])
self.assertEquals(
'Bad XSRF token. Please reload the page and try again',
response['message'])
def test_delete_with_bad_xsrf_token(self):
actions.login(self.ADMIN_EMAIL)
response = self._delete_group(12345, 'bad xsrf token')
self.assertEquals(403, response['status'])
self.assertEquals(
'Bad XSRF token. Please reload the page and try again',
response['message'])
def test_get_nonexistent_group(self):
actions.login(self.ADMIN_EMAIL)
response = self._get_group(1234)
self.assertEquals(404, response['status'])
self.assertEquals('Not found.', response['message'])
def test_put_nonexistent_group(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(1234, 'My Group', 'this is my group')
self.assertEquals(404, response['status'])
self.assertEquals('Not found.', response['message'])
def test_delete_nonexistent_group(self):
actions.login(self.ADMIN_EMAIL)
response = self._delete_group(1234)
self.assertEquals(200, response['status'])
self.assertEquals('Deleted.', response['message'])
def test_with_malformed_email(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
response = self._put_availability(group_id, ['@domain.com'])
self.assertEquals(400, response['status'])
self.assertEquals(
'"@domain.com" is not a valid email address.', response['message'])
response = self._put_availability(group_id, ['@'])
self.assertEquals(400, response['status'])
self.assertEquals(
'"@" is not a valid email address.', response['message'])
response = self._put_availability(group_id, ['x@'])
self.assertEquals(400, response['status'])
self.assertEquals(
'"x@" is not a valid email address.', response['message'])
def test_lifecycle(self):
actions.login(self.ADMIN_EMAIL)
# Make new group.
response = self._put_group(None, 'My New Group', 'this is my group')
self.assertEquals(200, response['status'])
self.assertEquals('Saved.', response['message'])
payload = transforms.loads(response['payload'])
group_id = payload['key']
# Verify contents.
response = self._get_group(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
payload = transforms.loads(response['payload'])
self.assertEquals(
'My New Group',
payload[student_groups.StudentGroupDTO.NAME_PROPERTY])
self.assertEquals(
'this is my group',
payload[student_groups.StudentGroupDTO.DESCRIPTION_PROPERTY])
# Change all fields.
response = self._put_group(group_id, 'New Name',
'there are many like it')
# Verify changes.
response = self._get_group(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
payload = transforms.loads(response['payload'])
self.assertEquals(
'New Name',
payload[student_groups.StudentGroupDTO.NAME_PROPERTY])
self.assertEquals(
'there are many like it',
payload[student_groups.StudentGroupDTO.DESCRIPTION_PROPERTY])
# Delete.
response = self._delete_group(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('Deleted.', response['message'])
# Verify get returns not-found response now.
response = self._get_group(group_id)
self.assertEquals(404, response['status'])
self.assertEquals('Not found.', response['message'])
def test_add_too_many_groups(self):
actions.login(self.ADMIN_EMAIL)
for unused in xrange(
student_groups.StudentGroupRestHandler.MAX_NUM_STUDENT_GROUPS):
response = self._put_group(None, 'My New Group', 'this one is mine')
self.assertEquals(response['status'], 200)
# Save group ID of last group successfully added.
group_id = transforms.loads(response['payload'])['key']
# Verify that adding the next group fails.
response = self._put_group(None, 'My New Group', 'this one is mine')
self.assertEquals(response['status'], 403)
self.assertEquals(
response['message'],
'Cannot create more groups; already have %s.' %
student_groups.StudentGroupRestHandler.MAX_NUM_STUDENT_GROUPS)
# Remove a pre-existing group and verify that we can now add another.
self._delete_group(group_id)
response = self._put_group(None, 'My New Group', 'this one is mine')
self.assertEquals(response['status'], 200)
self.assertEquals(response['message'], 'Saved.')
def test_put_does_not_affect_sibling_groups(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Group One', 'group one')
group_one_id = transforms.loads(response['payload'])['key']
response = self._put_group(None, 'Group Two', 'group two')
group_two_id = transforms.loads(response['payload'])['key']
self._put_availability(group_one_id, ['<EMAIL>'])
self._put_availability(group_two_id, ['<EMAIL>'])
response = self._get_availability(group_one_id)
payload = transforms.loads(response['payload'])
actual = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._MEMBERS].split()
group_one_email = '<EMAIL>'.lower()
self.assertEquals([group_one_email], actual)
class UserIdLookupLifecycleTests(StudentGroupsTestBase):
def test_immediate_removal(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
found_any = student_groups.EmailToObfuscatedUserId.all().get()
self.assertIsNone(found_any)
def test_cron_removal(self):
# Add a row (and verify that it's there)
actions.login(self.ADMIN_EMAIL)
with common_utils.Namespace(self.NAMESPACE):
student_groups.EmailToObfuscatedUserId(
user=users.get_current_user()).put()
found_any = student_groups.EmailToObfuscatedUserId.all().get()
self.assertIsNotNone(found_any)
# NOT in namespace, call cron cleanup handler.
Cleanup = student_groups.EmailToObfuscatedUserIdCleanup
try:
tmp = Cleanup.MIN_AGE
# Due to last_modified having a fractional seconds part and cutoff
# only being in whole seconds, it can sometimes appear that
# last_modified is later in the same whole second than cutoff
# (whose truncated fractional seconds is zero).
Cleanup.MIN_AGE = datetime.timedelta(seconds=-1)
Cleanup._for_testing_only_get()
finally:
Cleanup.MIN_AGE = tmp
# Verify row is now gone.
with common_utils.Namespace(self.NAMESPACE):
found_any = student_groups.EmailToObfuscatedUserId.all().get()
self.assertIsNone(found_any)
def test_cron_removal_with_no_work_to_do(self):
# Just looking for no crashes.
student_groups.EmailToObfuscatedUserIdCleanup._for_testing_only_get()
class UserIdentityTests(StudentGroupsTestBase):
def test_add_group_then_register(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(self.STUDENT_LOWER, membership.key().name())
self.assertEquals(group_id, membership.group_id)
self.assertIsNone(models.Student.all().get())
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_id, student.group_id)
def test_register_then_add_group(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertIsNone(student.group_id)
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_id, student.group_id)
def test_add_group_then_register_mismatched_case(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL.upper()])
actions.login(self.STUDENT_EMAIL.lower())
actions.register(self, '<NAME>')
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.all().get()
self.assertEquals(self.STUDENT_EMAIL.lower(), student.email)
self.assertEquals(group_id, student.group_id)
def test_register_then_add_group_mismatched_case(self):
actions.login(self.STUDENT_EMAIL.upper())
actions.register(self, '<NAME>')
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_id, student.group_id)
def test_move_unregistered_student_to_new_group(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_one_id = transforms.loads(response['payload'])['key']
response = self._put_group(None, 'My New Group', 'this is my group')
group_two_id = transforms.loads(response['payload'])['key']
self._put_availability(group_one_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(self.STUDENT_LOWER, membership.key().name())
self.assertEquals(group_one_id, membership.group_id)
self.assertIsNone(models.Student.all().get())
self._put_availability(group_two_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(self.STUDENT_LOWER, membership.key().name())
self.assertEquals(group_two_id, membership.group_id)
self.assertIsNone(models.Student.all().get())
def test_move_registered_student_to_new_group(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_one_id = transforms.loads(response['payload'])['key']
response = self._put_group(None, 'My New Group', 'this is my group')
group_two_id = transforms.loads(response['payload'])['key']
self._put_availability(group_one_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_one_id, student.group_id)
self._put_availability(group_two_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_two_id, student.group_id)
def test_move_unregistered_student_to_same_group(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_one_id = transforms.loads(response['payload'])['key']
response = self._put_group(None, 'My New Group', 'this is my group')
group_two_id = transforms.loads(response['payload'])['key']
self._put_availability(group_one_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(self.STUDENT_LOWER, membership.key().name())
self.assertEquals(group_one_id, membership.group_id)
self.assertIsNone(models.Student.all().get())
self._put_availability(group_one_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(self.STUDENT_LOWER, membership.key().name())
self.assertEquals(group_one_id, membership.group_id)
self.assertIsNone(models.Student.all().get())
def test_move_registered_student_to_same_group(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_one_id = transforms.loads(response['payload'])['key']
response = self._put_group(None, 'My New Group', 'this is my group')
group_two_id = transforms.loads(response['payload'])['key']
self._put_availability(group_one_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_one_id, student.group_id)
self._put_availability(group_one_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_one_id, student.group_id)
def test_remove_unregistered_student_from_group(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(self.STUDENT_LOWER, membership.key().name())
self.assertEquals(group_id, membership.group_id)
self.assertIsNone(models.Student.all().get())
self._put_availability(group_id, [])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
self.assertIsNone(models.Student.all().get())
def test_remove_registered_student_from_group(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertEquals(group_id, student.group_id)
self._put_availability(group_id, [])
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
student = models.Student.all().get()
self.assertEquals(self.STUDENT_LOWER, student.email)
self.assertIsNone(student.group_id)
def test_in_group_user_signup_to_otherwise_private_course(self):
actions.login(self.ADMIN_EMAIL)
self._put_course_availability(
course_availability=courses.COURSE_AVAILABILITY_PRIVATE)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(
group_id, [self.STUDENT_EMAIL], course_availability=(
courses.COURSE_AVAILABILITY_REGISTRATION_REQUIRED))
actions.login(self.STUDENT_EMAIL)
response = self.get('register')
register_form = actions.get_form_by_action(response, 'register')
register_form.set('form01', '<NAME>')
response = self.submit(register_form, response)
self.assertEquals(302, response.status_int)
self.assertEquals('http://localhost/student_groups_test/course'
'#registration_confirmation', response.location)
response = self.get(response.location)
# Here, if user group membership does not move transactionally with
# Student creation, we'd expect a 404. (b/27206132)
self.assertEquals(200, response.status_int)
self.assertIn('Thank you for registering for the course', response.body)
class AvailabilityLifecycleTests(StudentGroupsTestBase):
def _group_for_email(self, email):
with common_utils.Namespace(self.NAMESPACE):
m = student_groups.StudentGroupMembership.get_by_key_name(email)
return m.group_id if m else None
def test_email_to_uid_conversion(self):
pass
def test_add_and_remove_student_in_group(self):
actions.login(self.ADMIN_EMAIL)
self.assertIsNone(self._group_for_email(self.ADMIN_EMAIL))
self.assertIsNone(self._group_for_email(self.STUDENT_EMAIL))
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(
group_id, [self.STUDENT_EMAIL, self.ADMIN_EMAIL])
# Verify REST response
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
expected = set([self.STUDENT_LOWER, self.ADMIN_EMAIL.lower()])
actual = set(payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._MEMBERS].split())
self.assertEquals(expected, actual)
# Verify via DB layer access.
self.assertEquals(group_id, self._group_for_email(self.ADMIN_EMAIL))
self.assertEquals(group_id, self._group_for_email(self.STUDENT_LOWER))
# Remove admin from group
self._put_availability(group_id, [self.STUDENT_EMAIL])
# Verify REST response
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
expected = set([self.STUDENT_LOWER])
actual = set(payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._MEMBERS].split())
self.assertEquals(expected, actual)
# Verify via DB layer access.
self.assertEquals(group_id, self._group_for_email(self.STUDENT_LOWER))
self.assertIsNone(self._group_for_email(self.ADMIN_EMAIL))
def test_move_student_to_new_group(self):
actions.login(self.ADMIN_EMAIL)
self.assertIsNone(self._group_for_email(self.ADMIN_EMAIL))
response = self._put_group(None, 'Group One', 'this is my group')
group_one_id = transforms.loads(response['payload'])['key']
self._put_availability(group_one_id, [self.ADMIN_EMAIL])
self.assertEquals(group_one_id, self._group_for_email(self.ADMIN_EMAIL))
response = self._put_group(None, 'Group Two', 'this is another group')
group_two_id = transforms.loads(response['payload'])['key']
self._put_availability(group_two_id, [self.ADMIN_EMAIL])
self.assertEquals(group_two_id, self._group_for_email(self.ADMIN_EMAIL))
# Also verify that we didn't just get lucky finding group two; check
# that the count of records in StudentGroupMembership is exactly one.
with common_utils.Namespace(self.NAMESPACE):
records = list(student_groups.StudentGroupMembership.all().run())
self.assertEquals(1, len(records))
def test_remove_group_removes_group_membership(self):
actions.login(self.ADMIN_EMAIL)
self.assertIsNone(self._group_for_email(self.ADMIN_EMAIL))
response = self._put_group(None, 'Group One', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.ADMIN_EMAIL])
self.assertEquals(group_id, self._group_for_email(self.ADMIN_EMAIL))
self._delete_group(group_id)
self.assertIsNone(self._group_for_email(self.ADMIN_EMAIL))
def test_large_group_lifecycle(self):
# Group operations with more than the number of entities that
# can be handled in a single transaction.
actions.login(self.ADMIN_EMAIL)
# Add group w/ 50 members.
emails = ['<EMAIL>' % i for i in xrange(50)]
response = self._put_group(None, 'Big Group', 'lots of students here')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, emails)
# Verify content.
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
email_text = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._MEMBERS]
fetched_emails = sorted(email_text.split('\n'))
expected_emails = [email.lower() for email in emails]
self.assertEquals(expected_emails, fetched_emails)
# Change membership: Remove 250 users, add 250 new ones,
# and leave 250 the same.
emails = ['<EMAIL>' % i for i in xrange(0, 100, 2)]
response = self._put_availability(group_id, emails)
# Verify content
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
email_text = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._MEMBERS]
fetched_emails = sorted(email_text.split('\n'))
expected_emails = [email.lower() for email in emails]
self.assertEquals(expected_emails, fetched_emails)
# Delete group; verify.
self._delete_group(group_id)
response = self._get_group(group_id)
self.assertEquals(404, response['status'])
# All items should be gone from the DB.
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(student_groups.StudentGroupMembership.all().get())
self.assertIsNone(student_groups.StudentGroupEntity.all().get())
def test_large_group_too_big(self):
actions.login(self.ADMIN_EMAIL)
# Add group w/ 50 members.
emails = ['<EMAIL>' % i for i in xrange(
student_groups.StudentGroupAvailabilityRestHandler.MAX_NUM_MEMBERS
+ 1)]
response = self._put_group(None, 'Big Group', 'lots of students')
group_id = transforms.loads(response['payload'])['key']
response = self._put_availability(group_id, emails)
self.assertEquals(400, response['status'])
self.assertEquals(
'A group may contain at most %d members.' %
student_groups.StudentGroupAvailabilityRestHandler.MAX_NUM_MEMBERS,
response['message'])
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
email_text = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._MEMBERS]
self.assertEquals('', email_text)
def test_availability_can_set_zero_members(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Big Group', 'lots of students')
group_id = transforms.loads(response['payload'])['key']
response = self._put_availability(group_id, [])
self.assertEquals(200, response['status'])
def test_course_availability_lifecycle(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Big Group', 'lots of students')
group_id = transforms.loads(response['payload'])['key']
# Verify default availability.
response = self._get_availability(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
payload = transforms.loads(response['payload'])
availability = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler.COURSE_AVAILABILITY]
self.assertEquals(
student_groups.AVAILABILITY_NO_OVERRIDE, availability)
# Set availability to something non-default; verify.
response = self._put_availability(group_id, [],
course_availability=courses.AVAILABILITY_UNAVAILABLE)
self.assertEquals(200, response['status'])
self.assertEquals('Saved', response['message'])
response = self._get_availability(group_id)
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
payload = transforms.loads(response['payload'])
availability = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler.COURSE_AVAILABILITY]
self.assertEquals(courses.AVAILABILITY_UNAVAILABLE, availability)
# Delete group; verify availability API responds with 404.
self._delete_group(group_id)
response = self._get_availability(group_id)
self.assertEquals(404, response['status'])
self.assertEquals('Not found.', response['message'])
response = self._put_availability(group_id, [])
self.assertEquals(404, response['status'])
self.assertEquals('Not found.', response['message'])
def test_component_availability_lifecycle(self):
actions.login(self.ADMIN_EMAIL)
# Add a unit and a lesson.
course = courses.Course(None, app_context=self.app_context)
unit = course.add_unit()
unit.availability = courses.AVAILABILITY_COURSE
lesson = course.add_lesson(unit)
course.save()
response = self._put_group(None, 'Big Group', 'lots of students')
group_id = transforms.loads(response['payload'])['key']
# Verify default availability
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
settings = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._ELEMENT_SETTINGS]
unit_settings = common_utils.find(
lambda e: str(e['id']) == str(unit.unit_id), settings)
lesson_settings = common_utils.find(
lambda e: str(e['id']) == str(lesson.lesson_id), settings)
# unit default availability
self.assertEquals(
student_groups.AVAILABILITY_NO_OVERRIDE,
unit_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_COURSE.title(),
unit_settings[AvailabilityRestHandler._DEFAULT_AVAILABILITY])
self.assertEquals(False, unit_settings['indent'])
self.assertEquals('unit', unit_settings['type'])
self.assertEquals('New Unit', unit_settings['name'])
# lesson default availability
self.assertEquals(student_groups.AVAILABILITY_NO_OVERRIDE,
lesson_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_COURSE.title(),
lesson_settings[AvailabilityRestHandler._DEFAULT_AVAILABILITY])
self.assertEquals(True, lesson_settings['indent'])
self.assertEquals('lesson', lesson_settings['type'])
self.assertEquals('New Lesson', lesson_settings['name'])
# Now, change underlying availability for unit and lesson. Make unit
# public and lesson private, so we can be sure these are independent
# both of each other, and of our overrides.
unit.availability = courses.AVAILABILITY_AVAILABLE
lesson.availability = courses.AVAILABILITY_UNAVAILABLE
course.save()
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
settings = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._ELEMENT_SETTINGS]
unit_settings = common_utils.find(
lambda e: str(e['id']) == str(unit.unit_id), settings)
lesson_settings = common_utils.find(
lambda e: str(e['id']) == str(lesson.lesson_id), settings)
self.assertEquals(student_groups.AVAILABILITY_NO_OVERRIDE,
unit_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_AVAILABLE.title(),
unit_settings[AvailabilityRestHandler._DEFAULT_AVAILABILITY])
self.assertEquals(student_groups.AVAILABILITY_NO_OVERRIDE,
lesson_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_UNAVAILABLE.title(),
lesson_settings[AvailabilityRestHandler._DEFAULT_AVAILABILITY])
# Set overrides at the group level to be opposite of the settings on
# the base unit and lesson.
response = self._put_availability(
group_id, [], element_settings=[
{'id': str(unit.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(lesson.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_AVAILABLE}])
self.assertEquals(200, response['status'])
response = self._get_availability(group_id)
payload = transforms.loads(response['payload'])
settings = payload[AvailabilityRestHandler._STUDENT_GROUP_SETTINGS][
AvailabilityRestHandler._ELEMENT_SETTINGS]
unit_settings = common_utils.find(
lambda e: str(e['id']) == str(unit.unit_id), settings)
lesson_settings = common_utils.find(
lambda e: str(e['id']) == str(lesson.lesson_id), settings)
self.assertEquals(
courses.AVAILABILITY_UNAVAILABLE,
unit_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_AVAILABLE.title(),
unit_settings[AvailabilityRestHandler._DEFAULT_AVAILABILITY])
self.assertEquals(
courses.AVAILABILITY_AVAILABLE,
lesson_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_UNAVAILABLE.title(),
lesson_settings[AvailabilityRestHandler._DEFAULT_AVAILABILITY])
def test_passthrough_to_course_settings_lifecycle(self):
actions.login(self.ADMIN_EMAIL)
# Add a unit and a lesson.
course = courses.Course(None, app_context=self.app_context)
unit = course.add_unit()
unit.availability = courses.AVAILABILITY_COURSE
lesson = course.add_lesson(unit)
course.save()
# NOTE: we add no groups, and we still expect passthrough to work.
# Verify defaults. (Well, not really default, but what we get when
# setUp uses actions.simple_add_course() )
response = self._get_availability('')
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
payload = transforms.loads(response['payload'])
self.assertEquals(
payload['course_availability'],
courses.COURSE_AVAILABILITY_REGISTRATION_OPTIONAL)
settings = payload['element_settings']
unit_settings = common_utils.find(
lambda e: str(e['id']) == str(unit.unit_id), settings)
lesson_settings = common_utils.find(
lambda e: str(e['id']) == str(lesson.lesson_id), settings)
self.assertEquals(
courses.AVAILABILITY_COURSE, unit_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_COURSE, lesson_settings['availability'])
# Set to non-default; verify.
self._put_course_availability(
element_settings=[
{'id': str(unit.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(lesson.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_AVAILABLE}])
response = self._get_availability('')
self.assertEquals(200, response['status'])
self.assertEquals('OK.', response['message'])
payload = transforms.loads(response['payload'])
self.assertEquals(
payload['course_availability'],
courses.COURSE_AVAILABILITY_REGISTRATION_REQUIRED)
settings = payload['element_settings']
unit_settings = common_utils.find(
lambda e: str(e['id']) == str(unit.unit_id), settings)
lesson_settings = common_utils.find(
lambda e: str(e['id']) == str(lesson.lesson_id), settings)
self.assertEquals(
courses.AVAILABILITY_UNAVAILABLE, unit_settings['availability'])
self.assertEquals(
courses.AVAILABILITY_AVAILABLE, lesson_settings['availability'])
def test_set_availability_bad_xsrf_token(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Group One', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
response = self._put_availability(
group_id, [], xsrf_token='not a valid XSRF token')
self.assertEquals(403, response['status'])
self.assertEquals(
'Bad XSRF token. Please reload the page and try again',
response['message'])
def test_set_availability_non_admin_with_no_permission(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Group One', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
actions.login(self.ADMIN_ASSISTANT_EMAIL)
response = self._put_availability(group_id, [])
self.assertEquals(401, response['status'])
self.assertEquals('Access denied.', response['message'])
def test_set_availability_non_admin_with_permission(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Group One', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._grant_student_groups_permission_to_assistant()
actions.login(self.ADMIN_ASSISTANT_EMAIL)
response = self._put_availability(group_id, [])
self.assertEquals(200, response['status'])
self.assertEquals('Saved', response['message'])
def test_get_course_whitelist_non_admin_shows_actual_whitelist(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Group One', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(
group_id, [self.ADMIN_EMAIL, self.ADMIN_ASSISTANT_EMAIL])
self._grant_student_groups_permission_to_assistant()
self._grant_availability_permission_to_assistant()
url = student_groups.StudentGroupAvailabilityRestHandler.URL.lstrip('/')
with actions.OverriddenEnvironment(
{'course': {'whitelist': 'FAKE CONTENT'}}):
response = self.get(url)
payload = transforms.loads(
transforms.loads(response.body)['payload'])
self.assertEquals(payload['whitelist'], 'FAKE CONTENT')
actions.login(self.ADMIN_ASSISTANT_EMAIL)
response = self.get(url)
payload = transforms.loads(
transforms.loads(response.body)['payload'])
self.assertEquals(payload['whitelist'], 'FAKE CONTENT')
class AvailabilityTests(StudentGroupsTestBase):
COURSE_URL = 'http://localhost/%s/' % StudentGroupsTestBase.COURSE_NAME
SYLLABUS_URL = COURSE_URL + 'course'
LESSON_ONE_URL = COURSE_URL + 'unit?unit=1&lesson=2'
LESSON_TWO_URL = COURSE_URL + 'unit?unit=3&lesson=4'
IN_GROUP_STUDENT_EMAIL = '<EMAIL>'
NON_GROUP_STUDENT_EMAIL = '<EMAIL>'
def setUp(self):
super(AvailabilityTests, self).setUp()
self.course = courses.Course(None, app_context=self.app_context)
self.unit_one = self.course.add_unit()
self.unit_one.availability = courses.AVAILABILITY_COURSE
self.lesson_one = self.course.add_lesson(self.unit_one)
self.unit_two = self.course.add_unit()
self.unit_two.availability = courses.AVAILABILITY_COURSE
self.lesson_two = self.course.add_lesson(self.unit_two)
self.course.save()
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Group One', 'this is my group')
self.group_id = transforms.loads(response['payload'])['key']
self._put_availability(self.group_id, [self.IN_GROUP_STUDENT_EMAIL])
def test_group_creation_defaults_pass_through_to_course(self):
actions.login(self.IN_GROUP_STUDENT_EMAIL)
# Verify accessibility to non-logged-in user.
response = self.get(self.SYLLABUS_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_TWO_URL)
self.assertEquals(response.status_int, 200)
# Change course accessibility to require login; verify non-access.
# Despite being in group with default settings, course-level settings
# still show through.
# Set to non-default; verify.
actions.login(self.ADMIN_EMAIL)
self._put_course_availability()
actions.login(self.IN_GROUP_STUDENT_EMAIL)
response = self.get(self.SYLLABUS_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL)
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location, self.SYLLABUS_URL)
response = self.get(self.LESSON_TWO_URL)
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location, self.SYLLABUS_URL)
# Register, should now be able to access.
actions.register(self, '<NAME>')
response = self.get(self.SYLLABUS_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_TWO_URL)
self.assertEquals(response.status_int, 200)
# Set course-level access on unit two, lesson two to private.
actions.login(self.ADMIN_EMAIL)
self._put_course_availability(
element_settings=[
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_UNAVAILABLE}])
actions.login(self.IN_GROUP_STUDENT_EMAIL)
response = self.get(self.SYLLABUS_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_TWO_URL)
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location, self.SYLLABUS_URL)
def test_group_member_versus_nonmember(self):
# Most-commonly expected use case. Here, we're just verifying that
# in-group users get different settings than non-group users, not
# exhaustively verifying override properties: Make unit two generally
# unavailable, but available to group members.
self._put_course_availability(
element_settings=[
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_UNAVAILABLE}])
self._put_availability(
self.group_id, [self.IN_GROUP_STUDENT_EMAIL],
element_settings=[
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_AVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_AVAILABLE}])
actions.login(self.IN_GROUP_STUDENT_EMAIL)
actions.register(self, '<NAME>')
response = self.get(self.LESSON_ONE_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_TWO_URL)
self.assertEquals(response.status_int, 200)
actions.login(self.NON_GROUP_STUDENT_EMAIL)
actions.register(self, '<NAME>')
response = self.get(self.LESSON_ONE_URL)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_TWO_URL)
self.assertEquals(response.status_int, 302)
def test_course_availability_overrides(self):
# Register normal student before we make the course private.
actions.login(self.NON_GROUP_STUDENT_EMAIL)
actions.register(self, '<NAME>')
# Make course setting say absolutely no-one can see anything.
actions.login(self.ADMIN_EMAIL)
self._put_course_availability(
course_availability=courses.COURSE_AVAILABILITY_PRIVATE)
# Verify that a logged-in, registered (how?) student can not even
# see the syllabus.
actions.login(self.NON_GROUP_STUDENT_EMAIL)
response = self.get(self.SYLLABUS_URL, expect_errors=True)
self.assertEquals(response.status_int, 404)
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 404)
# Admin adds group, and student to group.
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL],
course_availability=(
courses.COURSE_AVAILABILITY_REGISTRATION_REQUIRED))
# As student in group, login. Should be able to see syllabus, but
# not lesson since not yet registered.
actions.login(self.STUDENT_EMAIL)
response = self.get(self.SYLLABUS_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 302)
# Register; verify that override to reg-required and satisfying the
# condition allows us to see course content, not just syllabus.
actions.register(self, '<NAME>')
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
def test_unit_and_lesson_availability_overrides(self):
self._put_course_availability(
element_settings=[
{'id': str(self.unit_one.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.lesson_one.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_UNAVAILABLE}])
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(
group_id, [self.STUDENT_EMAIL],
element_settings=[
{'id': str(self.unit_one.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_COURSE},
{'id': str(self.lesson_one.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_COURSE},
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_AVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_AVAILABLE}])
# No course-level override from group, so we should see the
# reg-required from the base course. Both lessons marked
# private in base course, so here we mark one public, and
# one 'course', so one should be available w/o registration.
actions.login(self.STUDENT_EMAIL)
response = self.get(self.SYLLABUS_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 302)
response = self.get(self.LESSON_TWO_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
# Register; now lesson-one should also be available.
actions.register(self, '<NAME>')
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
def test_course_and_element_overrides_combined(self):
self._put_course_availability(
course_availability=courses.COURSE_AVAILABILITY_PRIVATE,
element_settings=[{'id': str(self.unit_one.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.lesson_one.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_UNAVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_UNAVAILABLE}])
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(
group_id, [self.STUDENT_EMAIL], course_availability=(
courses.COURSE_AVAILABILITY_REGISTRATION_REQUIRED),
element_settings=[
{'id': str(self.unit_one.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_COURSE},
{'id': str(self.lesson_one.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_COURSE},
{'id': str(self.unit_two.unit_id),
'type': 'unit',
'availability': courses.AVAILABILITY_AVAILABLE},
{'id': str(self.lesson_two.lesson_id),
'type': 'lesson',
'availability': courses.AVAILABILITY_AVAILABLE}])
# No course-level override from group, so we should see the
# reg-required from the base course. Both lessons marked
# private in base course, so here we mark one public, and
# one 'course', so one should be available w/o registration.
actions.login(self.STUDENT_EMAIL)
response = self.get(self.SYLLABUS_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 302)
response = self.get(self.LESSON_TWO_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
# Register; now lesson-one should also be available.
actions.register(self, '<NAME>')
response = self.get(self.LESSON_ONE_URL, expect_errors=True)
self.assertEquals(response.status_int, 200)
class I18nTests(StudentGroupsTestBase):
COURSE_URL = 'http://localhost/%s/' % StudentGroupsTestBase.COURSE_NAME
PROFILE_URL = COURSE_URL + 'student/home'
TRANSLATE_URL = (COURSE_URL.rstrip('/') +
i18n_dashboard.TranslationConsoleRestHandler.URL)
LOCALE = 'de'
def setUp(self):
super(I18nTests, self).setUp()
# Add setting for additional language.
actions.update_course_config(
self.COURSE_NAME,
{'extra_locales': [
{'locale': self.LOCALE, 'availability': 'available'}]})
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
with common_utils.Namespace(self.NAMESPACE):
prefs = models.StudentPreferencesDAO.load_or_default()
prefs.locale = self.LOCALE
models.StudentPreferencesDAO.save(prefs)
def _put_translation(self, key, name, description):
key = '%s:%s' % (str(key), self.LOCALE)
payload = {
'title': 'unused',
'key': key,
'source_locale': 'en_US',
'target_locale': self.LOCALE,
'sections': [
{
'name': student_groups.StudentGroupDTO.NAME_PROPERTY,
'label': 'Name',
'type': 'string',
'source_value': '',
'data': [
{
'source_value': 'My New Group',
'target_value': name,
'verb': 1,
'old_source_value': '',
'changed': True
}]},
{
'name': student_groups.StudentGroupDTO.DESCRIPTION_PROPERTY,
'label': 'Description',
'type': 'text',
'source_value': '',
'data': [
{
'source_value': 'this is my group',
'target_value': description,
'verb': 1,
'old_source_value': '',
'changed': True
}]},
],
}
request_dict = {
'key': key,
'xsrf_token': crypto.XsrfTokenManager.create_xsrf_token(
i18n_dashboard.TranslationConsoleRestHandler.XSRF_TOKEN_NAME),
'payload': transforms.dumps(payload),
'validate': False}
response = self.put(
self.TRANSLATE_URL, {'request': transforms.dumps(request_dict)})
response = transforms.loads(response.body)
self.assertEquals(200, response['status'])
self.assertEquals('Saved.', response['message'])
def _verify_progress(self, key, expected_status):
with common_utils.Namespace(self.NAMESPACE):
progress_dto = i18n_dashboard.I18nProgressDAO.load(str(key))
self.assertEquals(expected_status,
progress_dto.get_progress(self.LOCALE))
def test_translation_event_flow(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
# Verify that saving the student group DAO implicitly starts a job
# to buff up I18N progress.
key = resource.Key(student_groups.ResourceHandlerStudentGroup.TYPE,
group_id)
self.execute_all_deferred_tasks()
self._verify_progress(key, i18n_dashboard.I18nProgressDTO.NOT_STARTED)
# Provide a translation; verify state change.
self._put_translation(key, 'MY NEW GROUP', 'THIS IS MY GROUP')
self.execute_all_deferred_tasks()
self._verify_progress(key, i18n_dashboard.I18nProgressDTO.DONE)
# Now, change the original group and save; verify that we notice
# the change via the progress changing from DONE to IN_PROGRESS.
self._put_group(group_id, 'A New Name', 'a new description')
self.execute_all_deferred_tasks()
self._verify_progress(key, i18n_dashboard.I18nProgressDTO.IN_PROGRESS)
def _verify_profile_content(self, expected_name, expected_description):
response = self.get(self.PROFILE_URL)
soup = self.parse_html_string_to_soup(response.body)
name_p = soup.select('#student-group-name')
description_p = soup.select('#student-group-description')
if expected_name is None:
self.assertEquals([], name_p)
else:
self.assertEquals(expected_name, name_p[0].text)
if expected_description is None:
self.assertEquals([], description_p)
else:
self.assertEquals(expected_description, description_p[0].text)
def test_profile_with_translations(self):
# No group -> No group content on profile page.
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content(None, None)
# Group exists, but student not in group -> No group content on profile
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self.execute_all_deferred_tasks()
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content(None, None)
# Student in group sees name/descr. No translations yet, so sees
# untranslated (lowercase) version.
actions.login(self.ADMIN_EMAIL)
self._put_availability(group_id, [self.STUDENT_EMAIL])
self.execute_all_deferred_tasks()
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content('My New Group', 'this is my group')
# Provide a translation; verify state change.
actions.login(self.ADMIN_EMAIL)
key = resource.Key(student_groups.ResourceHandlerStudentGroup.TYPE,
group_id)
self._put_translation(key, 'MY NEW GROUP', 'THIS IS MY GROUP')
self.execute_all_deferred_tasks()
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content('MY NEW GROUP', 'THIS IS MY GROUP')
def test_i18n_title(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', 'this is my group')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
self.execute_all_deferred_tasks()
key = resource.Key(student_groups.ResourceHandlerStudentGroup.TYPE,
group_id)
self._put_translation(key, 'MY NEW GROUP', 'THIS IS MY GROUP')
self.execute_all_deferred_tasks()
actions.login(self.STUDENT_EMAIL)
# Verify that one-off title translation also works.
try:
sites.set_path_info('/' + self.COURSE_NAME)
ctx = sites.get_course_for_current_request()
save_locale = ctx.get_current_locale()
# Untranslated
ctx.set_current_locale(None)
i18n_title = str(
student_groups.TranslatableResourceStudentGroups.get_i18n_title(
key))
self.assertEquals('My New Group', i18n_title)
# Translated
ctx.set_current_locale(self.LOCALE)
i18n_title = str(
student_groups.TranslatableResourceStudentGroups.get_i18n_title(
key))
self.assertEquals('MY NEW GROUP', i18n_title)
finally:
ctx.set_current_locale(save_locale)
sites.unset_path_info()
def test_with_blank_description(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', '')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.STUDENT_EMAIL])
self.execute_all_deferred_tasks()
# Student sees name, but not description.
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content('My New Group', None)
# Provide a translation for name but not descr; verify student view.
actions.login(self.ADMIN_EMAIL)
key = resource.Key(student_groups.ResourceHandlerStudentGroup.TYPE,
group_id)
self._put_translation(key, 'MY NEW GROUP', '')
self.execute_all_deferred_tasks()
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content('MY NEW GROUP', None)
# Set nonblank en_US description; verify that this shows through.
actions.login(self.ADMIN_EMAIL)
response = self._put_group(group_id, 'My New Group', 'this is my group')
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content('MY NEW GROUP', None)
# Set translation to explicitly blank; verify that this overrides.
actions.login(self.ADMIN_EMAIL)
self._put_translation(key, 'MY NEW GROUP', '')
self.execute_all_deferred_tasks()
actions.login(self.STUDENT_EMAIL)
self._verify_profile_content('MY NEW GROUP', None)
class AggregateEventTests(actions.TestBase):
ADMIN_EMAIL = '<EMAIL>'
STUDENT_EMAIL = '<EMAIL>'
COURSE_NAME = 'test_course'
NAMESPACE = 'ns_%s' % COURSE_NAME
GROUP_NAME = 'A Test Group'
def setUp(self):
super(AggregateEventTests, self).setUp()
self.base = '/' + self.COURSE_NAME
self.app_context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Title')
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
with common_utils.Namespace(self.NAMESPACE):
new_group = student_groups.StudentGroupDAO.create_new(
{student_groups.StudentGroupDTO.NAME_PROPERTY: self.GROUP_NAME})
self.group_id = new_group.id
def tearDown(self):
sites.remove_course(self.app_context)
super(AggregateEventTests, self).tearDown()
def _post_student_event(self):
actions.login(self.STUDENT_EMAIL)
with actions.OverriddenEnvironment({'course': {
'can_record_student_events': True}}):
self.post('rest/events', {
'request': transforms.dumps({
'xsrf_token': crypto.XsrfTokenManager.create_xsrf_token(
'event-post'),
'source': 'enter-page',
'payload': transforms.dumps({
'location': 'https://localhost:8081/test_course',
})
})
})
def _run_aggregator_job(self):
job = student_aggregate.StudentAggregateGenerator(self.app_context)
job.submit()
self.execute_all_deferred_tasks()
def test_student_group_in_aggregate(self):
self._post_student_event()
# Verify students not in groups get no content for student group section
with common_utils.Namespace(self.NAMESPACE):
self._run_aggregator_job()
entry = student_aggregate.StudentAggregateEntity.all().get()
content = transforms.loads(zlib.decompress(entry.data))
self.assertNotIn(student_groups.AddToStudentAggregate.SECTION, content)
# Verify students in groups get accurate ID, name for group
with common_utils.Namespace(self.NAMESPACE):
student_groups.StudentGroupMembership.set_members(
self.group_id, [self.STUDENT_EMAIL])
self._run_aggregator_job()
entry = student_aggregate.StudentAggregateEntity.all().get()
content = transforms.loads(zlib.decompress(entry.data))
self.assertEquals(
self.group_id,
content[
student_groups.AddToStudentAggregate.SECTION][
student_groups.AddToStudentAggregate.ID_FIELD])
self.assertEquals(
self.GROUP_NAME,
content[
student_groups.AddToStudentAggregate.SECTION][
student_groups.AddToStudentAggregate.NAME_FIELD])
class OverrideTests(actions.TestBase):
def test_override_defaults(self):
dto = student_groups.StudentGroupDTO(None, {})
self.assertIsNone(dto.get_override(['a']))
self.assertEquals(123, dto.get_override(['a'], 123))
self.assertEquals(345, dto.get_override(['a'], 345))
def test_override_in_memory_lifecycle(self):
dto = student_groups.StudentGroupDTO(None, {})
self.assertIsNone(dto.get_override(['a']))
dto.set_override(['a'], 123)
self.assertEquals(123, dto.get_override(['a']))
dto.set_override(['a'], 456)
self.assertEquals(456, dto.get_override(['a']))
dto.remove_override(['a'])
self.assertIsNone(dto.get_override(['a']))
def test_save_restore(self):
dto = student_groups.StudentGroupDAO.create_new()
dto.set_override(['a'], 123)
dto.set_override(['b'], 345)
student_groups.StudentGroupDAO.save(dto)
loaded = student_groups.StudentGroupDAO.load(dto.id)
self.assertEquals(dto.dict, loaded.dict)
def test_nested_settings_load_store(self):
dto = student_groups.StudentGroupDAO.create_new()
dto.set_override(['a'], 123)
dto.set_override(['b', 'c', 'd'], 234)
dto.set_override(['b', 'c', 'e'], 345)
dto.set_override(['b', 'd'], 456)
dto.set_override(['b', 'e'], 567)
student_groups.StudentGroupDAO.save(dto)
loaded = student_groups.StudentGroupDAO.load(dto.id)
self.assertEquals(dto.dict, loaded.dict)
def test_nested_lifecycle(self):
dto = student_groups.StudentGroupDAO.create_new({})
dto.set_override(['a'], 123)
dto.set_override(['b', 'c', 'd'], 234)
dto.set_override(['b', 'c', 'e'], 345)
dto.set_override(['b', 'd'], 456)
dto.set_override(['b', 'e'], 567)
self.assertEquals(dto.get_override(['b', 'c']), {'d': 234, 'e': 345})
dto.remove_override(['b', 'c', 'd'])
self.assertEquals(dto.get_override(['b', 'c']), {'e': 345})
dto.remove_override(['b', 'c', 'e'])
self.assertIsNone(dto.get_override(['b', 'c']))
self.assertEquals(dto.get_override(['b', 'd']), 456)
self.assertEquals(dto.get_override(['b', 'e']), 567)
dto.remove_override(['b'])
self.assertIsNone(dto.get_override(['b']))
class GradebookTests(StudentGroupsTestBase):
IN_GROUP_STUDENT_EMAIL = '<EMAIL>'
NON_GROUP_STUDENT_EMAIL = '<EMAIL>'
def setUp(self):
super(GradebookTests, self).setUp()
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(self.NAMESPACE)
self.question_id = models.QuestionEntity(
data=u'{"description": "a", "question": "aa"}').put().id()
self.instance_id = "6YXFKKxFTddd"
course = courses.Course(None, app_context=self.app_context)
self.assessment = course.add_assessment()
self.assessment.title = 'Top-Level Assessment'
self.assessment.html_content = (
'<question quid="%s" instanceid="%s">' % (
self.question_id, self.instance_id))
course.save()
def tearDown(self):
# Clean up app_context.
namespace_manager.set_namespace(self.old_namespace)
super(GradebookTests, self).tearDown()
def _post_event(self):
answers = {
"version":"1.5",
"individualScores": {
self.instance_id: 1,
},
"containedTypes": {
self.instance_id: "SaQuestion"
},
"answers":{
self.instance_id: "b"},
"quids":{
self.instance_id: str(self.question_id)
},
"rawScore": 1,
"totalWeight": 1,
"percentScore": 100,
self.instance_id: {
"response": "b"}
}
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
'assessment-post')
payload = {
'assessment_type': self.assessment.unit_id,
'score': '100.00',
'answers': transforms.dumps(answers),
'xsrf_token': xsrf_token,
}
response = self.post('answer', payload)
self.assertEquals(200, response.status_int)
def _get_gradebook_data(self, data_filter=None):
data_source_token = paginated_table._DbTableContext._build_secret(
{'data_source_token': 'xyzzy'})
parameters = {
'page_number': 0,
'chunk_size': 25,
'data_source_token': data_source_token,
}
if data_filter:
parameters['filters'] = data_filter
response = self.post('rest/data/raw_student_answers/items', parameters)
result = transforms.loads(response.body)
return result.get('data')
def test_no_data(self):
# Here, just looking to see that we don't get exceptions.
actions.login(self.ADMIN_EMAIL)
self.assertIsNone(self._get_gradebook_data())
self.assertIsNone(self._get_gradebook_data('student_group='))
self.assertIsNone(self._get_gradebook_data('student_group=1234'))
def test_filtering_with_no_groups_created(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, '<NAME>')
self._post_event()
gradebook.RawAnswersGenerator(self.app_context).submit()
self.execute_all_deferred_tasks()
actions.login(self.ADMIN_EMAIL)
self.assertEqual(1, len(self._get_gradebook_data()))
self.assertEqual(1, len(self._get_gradebook_data('student_group=')))
self.assertEqual(0, len(self._get_gradebook_data('student_group=3')))
def test_filtering_with_groups(self):
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', '')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.IN_GROUP_STUDENT_EMAIL])
actions.login(self.IN_GROUP_STUDENT_EMAIL)
actions.register(self, '<NAME>')
self._post_event()
actions.login(self.NON_GROUP_STUDENT_EMAIL)
actions.register(self, '<NAME>')
self._post_event()
actions.login(self.ADMIN_EMAIL)
gradebook.RawAnswersGenerator(self.app_context).submit()
self.execute_all_deferred_tasks()
self.assertEqual(3, len(self._get_gradebook_data()))
data = self._get_gradebook_data('student_group=')
self.assertEquals(2, len(data))
# Order cannot be assumed, so convert to a set of email addresses.
all_emails = frozenset([datum['user_email'] for datum in data])
non_group_email = self.NON_GROUP_STUDENT_EMAIL.lower()
self.assertIn(non_group_email, all_emails)
in_group_email = self.IN_GROUP_STUDENT_EMAIL.lower()
self.assertIn(in_group_email, all_emails)
data = self._get_gradebook_data('student_group=%s' % group_id)
self.assertEquals(1, len(data))
self.assertEquals(in_group_email, data[0]['user_email'])
data = self._get_gradebook_data('student_group=999')
self.assertEquals(0, len(data))
def test_user_changing_groups(self):
# Add event with no student_group_id.
actions.login(self.IN_GROUP_STUDENT_EMAIL)
actions.register(self, '<NAME>')
self._post_event()
# Add a student group.
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'My New Group', '')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.IN_GROUP_STUDENT_EMAIL])
# Add event with this group.
actions.login(self.IN_GROUP_STUDENT_EMAIL)
self._post_event()
# Another group, and move user to that group.
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, 'Another Group', '')
group_id = transforms.loads(response['payload'])['key']
self._put_availability(group_id, [self.IN_GROUP_STUDENT_EMAIL])
# Add event with different group.
actions.login(self.IN_GROUP_STUDENT_EMAIL)
self._post_event()
actions.login(self.ADMIN_EMAIL)
gradebook.RawAnswersGenerator(self.app_context).submit()
self.execute_all_deferred_tasks()
self.assertEqual(5, len(self._get_gradebook_data()))
self.assertEquals(3, len(self._get_gradebook_data('student_group=')))
data = self._get_gradebook_data('student_group=%s' % group_id)
self.assertEquals(1, len(data))
in_group_email = self.IN_GROUP_STUDENT_EMAIL.lower()
self.assertEquals(in_group_email, data[0]['user_email'])
class CourseStartEndDatesTests(triggers_tests.MilestoneTriggerTestsMixin,
StudentGroupsTestBase):
LOG_LEVEL = logging.DEBUG
COURSE_NAME = 'student_groups_course_dates_test'
COURSE_TITLE = 'Student Groups Course Dates Tests'
NAMESPACE = 'ns_%s' % COURSE_NAME
# COURSE_UNUSED_AVAIL is an alias for REGISTRATION_OPTIONAL and is not
# used by other triggers tests (hence "UNUSED" in the name).
TEST_AVAIL = triggers_tests.MilestoneTriggerTestsMixin.COURSE_UNUSED_AVAIL
SGCOT = student_groups.CourseOverrideTrigger
def setUp(self):
triggers_tests.MilestoneTriggerTestsMixin.setUp(self)
StudentGroupsTestBase.setUp(self)
self.course = courses.Course(None, app_context=self.app_context)
def tearDown(self):
self.clear_course_start_end_dates(
self.app_context.get_environ(), self.course)
StudentGroupsTestBase.tearDown(self)
def _create_group(self, name, description, student_email=None):
"""Creates a new student group."""
students = [student_email] if student_email else []
actions.login(self.ADMIN_EMAIL)
response = self._put_group(None, name, description)
group_id = transforms.loads(response['payload'])['key']
self._put_availability(
group_id, students, course_availability=self.TEST_AVAIL)
if student_email:
self._check_student_in_group(student_email, group_id)
return group_id
def _check_registered_student(self, student_email, group_id=None):
expected_email = student_email.lower()
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.all().get()
self.assertEquals(expected_email, student.email)
if group_id is not None:
self.assertEquals(group_id, student.group_id)
else:
self.assertFalse(student.group_id)
def _check_no_students_registered(self):
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(models.Student.all().get())
def _check_student_in_group(self, student_email, group_id):
expected_email = student_email.lower()
with common_utils.Namespace(self.NAMESPACE):
membership = student_groups.StudentGroupMembership.all().get()
self.assertEquals(expected_email, membership.key().name())
self.assertEquals(group_id, membership.group_id)
def _check_no_students_in_groups(self):
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(
student_groups.StudentGroupMembership.all().get())
def _set_group_start_end_dates(self, start_date, end_date, group_id):
"""Sets start/end dates for specified student group ID."""
with common_utils.Namespace(self.NAMESPACE):
dto_to_save = student_groups.StudentGroupDAO.load(group_id)
dto_to_save.start_date = start_date
dto_to_save.end_date = end_date
student_groups.StudentGroupDAO.save(dto_to_save)
def _check_group_start_end_dates(self, start_date, end_date, group_id):
with common_utils.Namespace(self.NAMESPACE):
dto = student_groups.StudentGroupDAO.load(group_id)
self.assertEquals(start_date, dto.start_date)
self.assertEquals(end_date, dto.end_date)
def _log_in_and_register_student(self, student_email, group_id,
student_name='<NAME>'):
""" Logs in using student_email and registers."""
actions.login(student_email)
actions.register(self, student_name)
self._check_registered_student(student_email, group_id=group_id)
def test_date_modified_in_course_env(self):
# Create a new student group, but initially containing no students.
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.group_id = self._create_group('Modified Group', 'modified group')
self._check_no_students_in_groups()
pre_save_env = self.app_context.get_environ()
self.check_course_start_end_dates(None, None, pre_save_env)
self.set_course_start_end_dates(self.past_hour_text,
self.next_hour_text, pre_save_env, self.course)
# Set start_date and end_date values different for student group.
self._set_group_start_end_dates(
self.LAST_CENTURY, self.FAR_FUTURE, self.group_id)
# _run_env_post_copy_hooks is run, but logged-in student is not in
# a student group, so should see values in Course 'course' dict.
actions.login(self.STUDENT_EMAIL)
post_save_env = courses.Course.get_environ(self.app_context)
self.check_course_start_end_dates(
self.past_hour_text, self.next_hour_text, post_save_env)
# Add student to previously created student group and make the course
# availability override non-private.
actions.login(self.ADMIN_EMAIL, is_admin=True)
self._put_availability(self.group_id,
[self.STUDENT_EMAIL], course_availability=self.TEST_AVAIL)
self._check_no_students_registered()
self._check_student_in_group(self.STUDENT_EMAIL, self.group_id)
# Remains logged in as student after this call.
self._log_in_and_register_student(self.STUDENT_EMAIL, self.group_id)
self._check_no_students_in_groups() # Non-admin cannot see membership.
self._check_group_start_end_dates(
self.LAST_CENTURY, self.FAR_FUTURE, self.group_id)
# Confirm that, once student is in the student group, start_date
# and end_date values are used from the student group, not the
# original Course 'course' settings values.
with common_utils.Namespace(self.NAMESPACE):
app_context = sites.get_app_context_for_namespace(self.NAMESPACE)
in_group_env = courses.Course.get_environ(app_context)
self.check_course_start_end_dates(
self.LAST_CENTURY, self.FAR_FUTURE, in_group_env)
def test_act_hooks(self):
# modules.student_groups.student_groups.notify_module_enabled()
# registers some ACT_HOOKS callbacks that save the `when` date/time of
# course start and end override triggers that are acted on into the
# corresponding StudentGroupDTO, as UTC ISO-8601 strings.
#
# This test confirms that the side effects of those callbacks occur.
# Create a new student group, but initially containing no students.
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.group_id = self._create_group('Hooks Group', 'hooks group')
self._check_no_students_in_groups()
# Add student to previously created student group and make the course
# availability override non-private.
actions.login(self.ADMIN_EMAIL, is_admin=True)
self._put_availability(self.group_id,
[self.STUDENT_EMAIL], course_availability=self.TEST_AVAIL)
self._check_no_students_registered()
self._check_student_in_group(self.STUDENT_EMAIL, self.group_id)
initial_env = self.app_context.get_environ()
# First, confirm there are no start_date or end_date values in the
# course settings or the student group.
self.check_course_start_end_dates(None, None, initial_env)
self._check_group_start_end_dates(None, None, self.group_id)
# Remains logged in as student after this call.
self._log_in_and_register_student(self.STUDENT_EMAIL, self.group_id)
# Confirm that, once student is in the student group, start_date
# and end_date values are still not set by either student group or
# course settings.
with common_utils.Namespace(self.NAMESPACE):
in_group_ctx = sites.get_app_context_for_namespace(self.NAMESPACE)
in_group_env = courses.Course.get_environ(in_group_ctx)
self.check_course_start_end_dates(None, None, in_group_env)
# Set a course_start override trigger for the student group.
actions.login(self.ADMIN_EMAIL, is_admin=True)
self._put_availability(self.group_id,
[self.STUDENT_EMAIL], course_triggers=self.only_course_start)
# Just one course start override trigger was POSTed into student group.
with common_utils.Namespace(self.NAMESPACE):
start_dto = student_groups.StudentGroupDAO.load(self.group_id)
self.assertEquals(
len(self.SGCOT.copy_from_settings(start_dto)), 1)
self.assertEquals(self.only_course_start, self.SGCOT.for_form(
start_dto, course=self.course))
# Check then remove start_date property, so the act() side-effects
# can also be confirmed, after run_availability_jobs.
logs = self.get_log()
self.check_and_clear_milestone_course_setting('course_start',
self.past_start_text, start_dto, self.SGCOT)
# Start override trigger is now in the student group DTO, so act on
# it in cron job.
self.run_availability_jobs(self.app_context)
# Confirm that update_start_date_from_start_override_when() was run,
# but that course settings were not affected (only student group
# properties should have changed).
with common_utils.Namespace(self.NAMESPACE):
after_run_ctx = sites.get_app_context_for_namespace(self.NAMESPACE)
after_run_env = courses.Course.get_environ(after_run_ctx)
self.check_course_start_end_dates(None, None, after_run_env)
# POSTed course_start `when` ended up as the 'start_date' in the
# course settings. 'end_date' should still be undefined.
self._check_group_start_end_dates(
self.past_start_text, None, self.group_id)
# All course start/end override triggers were acted on and consumed.
with common_utils.Namespace(self.NAMESPACE):
none_start_dto = student_groups.StudentGroupDAO.load(self.group_id)
# Now that the course_start override trigger should have been
# acted on, and thus the value of 'start_date' stored in the
# student group will have changed, provide the 'when' value for
# the expected default course_start override trigger in
# only_course_end, only_early_end, and defaults_only.
when_start = self.SGCOT.encoded_defaults(
availability=self.SGCOT.NONE_SELECTED, course=self.course,
milestone='course_start', settings=none_start_dto)
logs = self.get_log()
self.retrieve_logged('course_start', 'start_date',
self.past_start_text, self.SGCOT, logs, where='student group')
self.assertEquals(self.past_start_text, when_start['when'])
self.defaults_start = when_start
self.defaults_only['course_start'][0] = when_start
self.only_course_end['course_start'][0] = when_start
self.only_early_end['course_start'][0] = when_start
self.assertEquals(
len(self.SGCOT.copy_from_settings(none_start_dto)), 0)
self.assertEquals(self.defaults_only, self.SGCOT.for_form(
none_start_dto, course=self.course))
# Confirm that, logged in as the student, the start_date visible to
# the student is the one updated by the ACT_HOOKS callback.
actions.login(self.STUDENT_EMAIL, is_admin=False)
with common_utils.Namespace(self.NAMESPACE):
new_start_ctx = sites.get_app_context_for_namespace(self.NAMESPACE)
new_start_env = courses.Course.get_environ(new_start_ctx)
self.check_course_start_end_dates(
self.past_start_text, None, new_start_env)
# No change in availability (setting course_end['availability'] to the
# same as course_start['availability']) should still invoke ACT_HOOKS.
actions.login(self.ADMIN_EMAIL, is_admin=True)
self._put_availability(self.group_id,
[self.STUDENT_EMAIL], course_triggers=self.only_early_end)
# Just the one course_end trigger was POSTed into the student group.
with common_utils.Namespace(self.NAMESPACE):
early_dto = student_groups.StudentGroupDAO.load(self.group_id)
self.assertEquals(
len(self.SGCOT.copy_from_settings(early_dto)), 1)
self.assertEquals(self.only_early_end, self.SGCOT.for_form(
early_dto, course=self.course))
# Check then remove end_date property, so the act() side-effects
# can also be confirmed, after run_availability_jobs.
logs = self.get_log()
self.check_and_clear_milestone_course_setting('course_end',
self.an_earlier_end_hour_text, early_dto, self.SGCOT)
# End trigger is now in course settings, so act on it in cron job.
self.run_availability_jobs(self.app_context)
# Confirm that update_end_date_from_end_override_when() was run, but
# that course settings were not affected (only student group
# properties should have changed).
with common_utils.Namespace(self.NAMESPACE):
after_end_ctx = sites.get_app_context_for_namespace(self.NAMESPACE)
after_end_env = courses.Course.get_environ(after_end_ctx)
self.check_course_start_end_dates(None, None, after_end_env)
# All course start/end override triggers were acted on and consumed.
with common_utils.Namespace(self.NAMESPACE):
none_end_dto = student_groups.StudentGroupDAO.load(self.group_id)
# Now that the course_end override trigger should have been
# acted on, and thus the value of 'end_date' stored in the
# student group will have changed, provide the 'when' value for
# the expected default course_end override trigger in
# only_course_start and defaults_only.
when_end = self.SGCOT.encoded_defaults(
availability=self.SGCOT.NONE_SELECTED, course=self.course,
milestone='course_end', settings=none_end_dto)
logs = self.get_log()
self.retrieve_logged('course_end', 'end_date',
self.an_earlier_end_hour_text, self.SGCOT, logs,
where='student group')
self.assertEquals(self.an_earlier_end_hour_text, when_end['when'])
self.defaults_end = when_end
self.defaults_only['course_end'][0] = when_end
self.only_course_start['course_end'][0] = when_end
self.assertEquals(
len(self.SGCOT.copy_from_settings(none_end_dto)), 0)
self.assertEquals(self.defaults_only, self.SGCOT.for_form(
none_end_dto, course=self.course))
# A different end_date value should now be present in the student
# group. Previously-POSTed start_date should be unchanged.
self._check_group_start_end_dates(self.past_start_text,
self.an_earlier_end_hour_text, self.group_id)
# Confirm that, logged in as the student, the end_date visible to
# the student is the one updated by the ACT_HOOKS callback.
actions.login(self.STUDENT_EMAIL, is_admin=False)
with common_utils.Namespace(self.NAMESPACE):
new_end_ctx = sites.get_app_context_for_namespace(self.NAMESPACE)
new_end_env = courses.Course.get_environ(new_end_ctx)
self.check_course_start_end_dates(self.past_start_text,
self.an_earlier_end_hour_text, new_end_env)
|
achavan51/googleads-java-lib-master | modules/ads_lib/src/main/java/com/google/api/ads/common/lib/utils/AdsUtility.java | <reponame>achavan51/googleads-java-lib-master<gh_stars>0
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.common.lib.utils;
/**
* Enumeration of utilities available in the client library.
*/
public enum AdsUtility {
/** AdWords SelectorBuilder */
SELECTOR_BUILDER("SelectorBuilder"),
/** AdWords ReportDownloader */
REPORT_DOWNLOADER("ReportDownloader"),
/** AdWords SelectorField enums */
SELECTOR_FIELD("SelectorField"),
/** AdWords shopping utility for managing shopping product partition trees */
PRODUCT_PARTITION_TREE("ProductPartitionTree"),
/** AdWords BatchJobService upload/download utility */
BATCH_JOB_HELPER("BatchJobHelper");
private final String userAgentIdentifier;
private AdsUtility(String userAgentIdentifier) {
this.userAgentIdentifier = userAgentIdentifier;
}
/**
* Returns the String to use for this utility when generating the user agent of a request.
*/
public String getUserAgentIdentifier() {
return userAgentIdentifier;
}
}
|
MyJobGlasses/graphql_devise | spec/requests/mutations/register_spec.rb | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'Registration process' do
include_context 'with graphql query request'
let(:name) { Faker::Name.name }
let(:password) { <PASSWORD> }
let(:email) { Faker::Internet.email }
let(:redirect) { 'https://google.com' }
context 'when using the user model' do
let(:query) do
<<-GRAPHQL
mutation {
userRegister(
email: "#{email}"
name: "#{name}"
password: "#{password}"
passwordConfirmation: "#{password}"
confirmUrl: "#{redirect}"
) {
credentials { accessToken }
user {
email
name
}
}
}
GRAPHQL
end
context 'when redirect_url is not whitelisted' do
let(:redirect) { 'https://not-safe.com' }
it 'returns a not whitelisted redirect url error' do
expect { post_request }.to(
not_change(User, :count)
.and(not_change(ActionMailer::Base.deliveries, :count))
)
expect(json_response[:errors]).to containing_exactly(
hash_including(
message: "Redirect to '#{redirect}' not allowed.",
extensions: { code: 'USER_ERROR' }
)
)
end
end
context 'when params are correct' do
it 'creates a new resource that requires confirmation' do
expect { post_request }.to(
change(User, :count).by(1)
.and(change(ActionMailer::Base.deliveries, :count).by(1))
)
user = User.last
expect(user).not_to be_active_for_authentication
expect(user.confirmed_at).to be_nil
expect(user).to be_valid_password(password)
expect(json_response[:data][:userRegister]).to include(
credentials: nil,
user: {
email: email,
name: name
}
)
email = Nokogiri::HTML(ActionMailer::Base.deliveries.last.body.encoded)
confirm_link = email.css('a').first['href']
confirm_token = confirm_link.match(/\?confirmationToken\=(?<token>.+)\z/)[:token]
expect(User.confirm_by_token(confirm_token)).to eq(user)
end
context 'when email address uses different casing' do
let(:email) { '<EMAIL>' }
it 'honors devise configuration for case insensitive fields' do
expect { post_request }.to change(ActionMailer::Base.deliveries, :count).by(1)
expect(User.last.email).to eq('<EMAIL>')
expect(json_response[:data][:userRegister]).to include(user: { email: '<EMAIL>', name: name })
end
end
end
context 'when required params are missing' do
let(:email) { '' }
it 'does *NOT* create resource a resource nor send an email' do
expect { post_request }.to(
not_change(User, :count)
.and(not_change(ActionMailer::Base.deliveries, :count))
)
expect(json_response[:data][:userRegister]).to be_nil
expect(json_response[:errors]).to containing_exactly(
hash_including(
message: "User couldn't be registered",
extensions: { code: 'USER_ERROR', detailed_errors: ["Email can't be blank"] }
)
)
end
end
end
context 'when using the admin model' do
let(:query) do
<<-GRAPHQL
mutation {
adminRegister(
email: "#{email}"
password: <PASSWORD>}"
passwordConfirmation: "#{password}"
) {
authenticatable {
email
}
}
}
GRAPHQL
end
before { post_request }
it 'skips the register mutation' do
expect(json_response[:errors]).to contain_exactly(
hash_including(message: "Field 'adminRegister' doesn't exist on type 'Mutation'")
)
end
end
context 'when using the guest model' do
let(:query) do
<<-GRAPHQL
mutation {
guestRegister(
email: "#{email}"
password: <PASSWORD>}"
passwordConfirmation: "#{password}"
) {
credentials { accessToken client uid }
authenticatable {
email
}
}
}
GRAPHQL
end
it 'returns credentials as no confirmation is required' do
expect { post_request }.to change(Guest, :count).from(0).to(1)
expect(json_response[:data][:guestRegister]).to include(
authenticatable: { email: email },
credentials: hash_including(
uid: email,
client: Guest.last.tokens.keys.first
)
)
end
end
end
|
DronM/osbe | build/templates/js20.proj-tmpl/controls/GridAjxMaster.js | <reponame>DronM/osbe<gh_stars>0
/* Copyright (c) 2016-2017
<NAME>, Katren ltd.
*/
/*
Description
*/
/** Requirements
* @requires
* @requires core/extend.js
*/
/* constructor
@param string id
@param object options{
}
*/
function GridAjxMaster(id,options){
options = options || {};
this.setDetailControl(options.detailControl);
this.setDetailKeyIds(options.detailkeyIds);
GridAjxMaster.superclass.constructor.call(this,id,options);
}
extend(GridAjxMaster,GridAjx);
/* Constants */
/* private members */
GridAjxMaster.prototype.m_detailControl;
GridAjxMaster.prototype.m_detailKeyIds;
/* protected*/
/*Selects newNode and unselects oldNode*/
GridAjxMaster.prototype.selectNode = function(newNode,oldNode){
GridAjxMaster.superclass.selectNode.call(this,newNode,oldNode);
if (this.m_detailControl){
//set new keys && refresh
var keys = this.getSelectedNodeKeys();
var pm = this.m_detailControl.getReadPublicMethod();
if (pm){
var pm_upd,pm_ins;
if (this.m_detailControl.getUpdatePublicMethod){
pm_upd = this.m_detailControl.getUpdatePublicMethod();
}
if (this.m_detailControl.getInsertPublicMethod){
pm_ins = this.m_detailControl.getInsertPublicMethod();
}
var det_ids = this.getDetailKeyIds();
var ind = 0;
var contr = pm.getController();
/*
var fields = "";
var signs = "";
var vals = "";
var icase = "";
*/
for (var kid in keys){
this.m_detailControl.setFilter({
"field":det_ids[ind],
"sign":contr.PARAM_SGN_EQUAL,
"val":keys[kid],
"icase":"0"
});
/*
fields+= (fields=="")? "":contr.PARAM_FIELD_SEP;
fields+= det_ids[ind];
signs+= (signs=="")? "":contr.PARAM_FIELD_SEP;
signs+= contr.PARAM_SGN_EQUAL;
vals+= (vals=="")? "":contr.PARAM_FIELD_SEP;
vals+= keys[kid];
icase+= (icase=="")? "":contr.PARAM_FIELD_SEP;
*/
if (pm_upd){
pm_upd.setFieldValue(det_ids[ind],keys[kid]);
}
if (pm_ins){
pm_ins.setFieldValue(det_ids[ind],keys[kid]);
}
ind++;
}
/*
pm.setFieldValue(contr.PARAM_COND_FIELDS, fields);
pm.setFieldValue(contr.PARAM_COND_SGNS, signs);
pm.setFieldValue(contr.PARAM_COND_VALS, vals);
pm.setFieldValue(contr.PARAM_COND_ICASE, icase);
*/
this.m_detailControl.onRefresh();
}
}
}
/* public methods */
GridAjxMaster.prototype.getDetailControl = function(){
return this.m_detailControl;
}
GridAjxMaster.prototype.setDetailControl = function(v){
this.m_detailControl = v;
}
GridAjxMaster.prototype.getDetailKeyIds = function(){
return this.m_detailKeyIds;
}
GridAjxMaster.prototype.setDetailKeyIds = function(v){
this.m_detailKeyIds = v;
}
|
JoseAVallejo12/holbertonschool-web_react | 0x09-react_redux_reducer_selector/task_5/dashboard/src/schema/courses.js | <gh_stars>0
import { normalize, schema } from 'normalizr';
const courses = new schema.Entity('courses');
export const coursesNormalizer = (data) => {
return normalize(data, [courses]).entities.courses;
};
|
tirth1/JavaScript | logicalOps.js | <reponame>tirth1/JavaScript
let a1 = true
let a2 = false
let a3 = true
if(a1 && a2){
console.log('a1 and a2 are true')
}
else if(a2 || a3){
console.log('a2 or a3 are true')
}
else{
console.log('Nothing...')
} |
vladimirtkach/yesjob | src/sales/migrations/0006_auto_20190905_0857.py | <filename>src/sales/migrations/0006_auto_20190905_0857.py
# Generated by Django 2.2 on 2019-09-05 08:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sales', '0005_auto_20190904_1720'),
]
operations = [
migrations.AlterField(
model_name='contactsource',
name='description',
field=models.CharField(max_length=250, verbose_name='Описание с примерами'),
),
migrations.AlterField(
model_name='contactsource',
name='name',
field=models.CharField(max_length=50, verbose_name='Название источника'),
),
migrations.AlterField(
model_name='contactsource',
name='potential_profiles',
field=models.ManyToManyField(blank=True, default=None, to='sales.SkillProfile', verbose_name='Возможные профили'),
),
migrations.AlterField(
model_name='skillprofile',
name='name',
field=models.CharField(blank=True, max_length=50, verbose_name='Название профиля'),
),
]
|
paulevsGitch/BetterNetherBeta | src/main/java/paulevs/bnb/world/biome/CorruptedLandsBiome.java | <filename>src/main/java/paulevs/bnb/world/biome/CorruptedLandsBiome.java
package paulevs.bnb.world.biome;
import paulevs.bnb.block.types.NetherTerrainType;
import paulevs.bnb.listeners.BlockListener;
import paulevs.bnb.util.BlockState;
import paulevs.bnb.world.structures.NetherStructures;
import java.util.Random;
public class CorruptedLandsBiome extends NetherBiome {
public CorruptedLandsBiome(String name) {
super(name);
this.setFogColor("1c1323");
this.setTopBlock(new BlockState(BlockListener.getBlock("nether_terrain"), NetherTerrainType.CORRUPTED_NYLIUM));
this.addStructure(NetherStructures.BULBINE, 0.1F, 8);
this.addStructure(NetherStructures.VIOLEUM, 0.3F, 8);
this.addStructure(NetherStructures.SHATTERED_GRASS, 1F, 8);
this.setFire(false);
}
@Override
public float getParticleChance() {
return 0.3F;
}
@Override
public int getParticleID(Random random) {
return random.nextInt(3) + 9;
}
}
|
jacksonsr45/TravelAndCashControl | domain/use_cases/user_manager/address/UpdateAddress.java | package domain.use_cases.user_manager.address;
import domain.entity.UserAddressEntity;
import domain.gateway.UserAddressInterface;
import domain.presenter.UserAddressPresenterInterface;
import domain.requests.UpdateAddressRequest;
public class UpdateAddress extends UpdateAddressFactory {
public UpdateAddress(UserAddressInterface repository, UpdateAddressRequest request) {
super(repository, request);
}
@Override
public void execute(UserAddressPresenterInterface presenter) {
UserAddressEntity entity = new UserAddressEntity(this.getId(), this.getUserID(), this.getCountry(),
this.getState(), this.getCity(), this.getNeighborhood(), this.getStreet(), this.getNumber(), this.getCommit());
presenter.present(this.repository.updateAddress(entity));
}
}
|
ReCursia/Sonic | core/src/com/studentsteam/sonic/Main.java | <reponame>ReCursia/Sonic
package com.studentsteam.sonic;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.Game;
import com.studentsteam.sonic.screens.StartScreen;
public class Main extends Game {
public static int SAVE_INDEX; //Save index
public static final int WIDTH = 1024;
public static final int HEIGHT = 720;
//Box2D Collision Bits
public static final short NOTHING_BIT = 0;
public static final short GROUND_BIT = 1;
public static final short SONIC_BIT = 2;
public static final short BRICK_BIT = 4;
public static final short COIN_BIT = 8;
public static final short DESTROYED_BIT = 16;
public static final short OBJECT_BIT = 32;
public static final short ENEMY_BIT = 64;
public static final short ENEMY_HEAD_BIT = 128;
public static final short ITEM_BIT = 256;
public static final short MARIO_HEAD_BIT = 512;
public static final short FIREBALL_BIT = 1024;
public SpriteBatch batch; //Only one for game (delegating game object to each screen)
@Override
public void create () {
batch = new SpriteBatch();
setScreen(new StartScreen(this));
}
}
|
slaveuser/blade20170127 | blade-core/src/main/java/blade/BladeFilter.java | /**
* Copyright (c) 2015, biezhi 王爵 (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package blade;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import blade.kit.log.Logger;
import blade.route.RouteMatcherBuilder;
/**
* blade核心过滤器,mvc总线
* 匹配所有请求过滤
*
* @author <a href="mailto:<EMAIL>" target="_blank">biezhi</a>
* @since 1.0
*/
public class BladeFilter implements Filter {
private static final Logger LOGGER = Logger.getLogger(BladeFilter.class);
/**
* blade全局初始化类
*/
private static final String APPLCATION_CLASS = "applicationClass";
@Override
public void init(FilterConfig filterConfig) throws ServletException {
// 防止重复初始化
if(!Blade.IS_INIT){
BladeBase.webRoot(filterConfig.getServletContext().getRealPath("/"));
BladeWebContext.servletContext(filterConfig.getServletContext());
final BladeApplication application = getApplication(filterConfig);
application.init();
Blade.app(application);
// 构建所有路由
RequestHandler.routeMatcher = RouteMatcherBuilder.building();
// 全局初始化
IocApplication.init();
application.contextInitialized(BladeWebContext.servletContext());
LOGGER.info("blade init complete!");
BladeBase.init();
}
}
/**
* 获取全局初始化对象,初始化应用
*
* @param filterConfig 过滤器配置对象
* @return 一个全局初始化对象
* @throws ServletException
*/
private BladeApplication getApplication(FilterConfig filterConfig) throws ServletException {
try {
String applicationClassName = filterConfig.getInitParameter(APPLCATION_CLASS);
if(!BladeBase.runJetty && null != applicationClassName){
Class<?> applicationClass = Class.forName(applicationClassName);
return (BladeApplication) applicationClass.newInstance();
}
return BladeBase.bladeApplication;
} catch (Exception e) {
throw new ServletException(e);
}
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException{
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
httpRequest.setCharacterEncoding(BladeBase.encoding());
httpResponse.setCharacterEncoding(BladeBase.encoding());
/**
* 是否被RequestHandler执行
*/
boolean isHandler = RequestHandler.single().handler(httpRequest, httpResponse);
if(!isHandler && !httpResponse.isCommitted()){
chain.doFilter(httpRequest, httpResponse);
}
}
@Override
public void destroy() {
IocApplication.destroy();
LOGGER.info("blade destroy!");
}
}
|
HakuSen/report | src/components/icons/simple/images.js | import Icon from '../Icon';
Icon.register({
images: {
width: 576,
height: 512,
paths: [{
d: 'M480 416V432C480 458.5 458.5 480 432 480H48C21.5 480 0 458.5 0 432V176C0 149.5 21.5 128 48 128H64V336C64 380.1 99.9 416 144 416H480zM576 336V80C576 53.5 554.5 32 528 32H144C117.5 32 96 53.5 96 80V336C96 362.5 117.5 384 144 384H528C554.5 384 576 362.5 576 336zM256 128C256 154.5 234.5 176 208 176S160 154.5 160 128 181.5 80 208 80 256 101.5 256 128zM160 272L215.5 216.5C220.2 211.8 227.8 211.8 232.5 216.5L272 256 407.5 120.5C412.2 115.8 419.8 115.8 424.5 120.5L512 208V320H160V272z'
}]
}
});
|
harshp8l/deep-learning-lang-detection | data/test/cpp/37c879f732bb5301ed0114a2778626c2b966577aChunkHandler.cpp | <filename>data/test/cpp/37c879f732bb5301ed0114a2778626c2b966577aChunkHandler.cpp
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 3.0.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 3.0 for more details.
// Copyright (C) 2012-2013 filfat, xerpi, JoostinOnline
#include "ChunkHandler.hpp"
//Constructor
ChunkHandler::ChunkHandler(Vertex32 *pos)
{
position = pos;
int chunk_x, chunk_y, chunk_z;
for(int z = 0; z < WORLD_SIZE; z++)
{
for(int y = 0; y < WORLD_SIZE; y++)
{
for(int x = 0; x < WORLD_SIZE; x++)
{
chunk_x = position->x + x * CHUNK_SIZE * BLOCK_SIZE;
chunk_y = position->y + y * CHUNK_SIZE * BLOCK_SIZE;
chunk_z = position->z + z * CHUNK_SIZE * BLOCK_SIZE;
if(y < 2)
chunkList.push_back(new Chunk(chunk_x, chunk_y, chunk_z, x, y, z, false));
else
chunkList.push_back(new Chunk(chunk_x, chunk_y, chunk_z, x, y, z, true));
}
}
}
generateNeighbours();
updateChunks();
}
//Destructor
ChunkHandler::~ChunkHandler()
{
clearChunkList();
}
//Methods
Block *ChunkHandler::getBlockAtPosition(int x, int y, int z)
{
int chunk_x = floor(x / CHUNK_TOTAL_SIZE);
int chunk_y = floor(y / CHUNK_TOTAL_SIZE);
int chunk_z = floor(z / CHUNK_TOTAL_SIZE);
int block_x = floor((x - (chunk_x * CHUNK_TOTAL_SIZE)) / BLOCK_SIZE);
int block_y = floor((y - (chunk_y * CHUNK_TOTAL_SIZE)) / BLOCK_SIZE);
int block_z = floor((z - (chunk_z * CHUNK_TOTAL_SIZE)) / BLOCK_SIZE);
return chunkList[getWorldIndex(chunk_x, chunk_y, chunk_z)]->blockList[block_z][block_y][block_x];
}
void ChunkHandler::clearChunkList()
{
for(uint32_t i = 0; i < chunkList.size(); i++)
{
delete chunkList[i];
}
chunkList.clear();
}
void ChunkHandler::updateChunks()
{
std::vector<Chunk *>::iterator it;
for(it = chunkList.begin(); it != chunkList.end(); it++)
{
if((*it)->needsUpdate)
(*it)->generateMesh();
}
}
void ChunkHandler::generateNeighbours()
{
Chunk *chunkP;
for(int z = 0; z < WORLD_SIZE; z++)
{
for(int y = 0; y < WORLD_SIZE; y++)
{
for(int x = 0; x < WORLD_SIZE; x++)
{
chunkP = chunkList[getWorldIndex(x, y, z)];
//Left
if(x == 0)
chunkP->setLeftNeighbour(NULL);
else
chunkP->setLeftNeighbour(chunkList[getWorldIndex(x - 1, y, z)]);
//Right
if(x == (WORLD_SIZE-1))
chunkP->setRightNeighbour(NULL);
else
chunkP->setRightNeighbour(chunkList[getWorldIndex(x + 1, y, z)]);
//Up
if(y == (WORLD_SIZE-1))
chunkP->setUpNeighbour(NULL);
else
chunkP->setUpNeighbour(chunkList[getWorldIndex(x, y + 1, z)]);
//Down
if(y == 0)
chunkP->setDownNeighbour(NULL);
else
chunkP->setDownNeighbour(chunkList[getWorldIndex(x, y - 1, z)]);
//Front
if(z == (WORLD_SIZE-1))
chunkP->setFrontNeighbour(NULL);
else
chunkP->setFrontNeighbour(chunkList[getWorldIndex(x, y, z + 1)]);
//Back
if(z == 0)
chunkP->setBackNeighbour(NULL);
else
chunkP->setBackNeighbour(chunkList[getWorldIndex(x, y, z - 1)]);
}
}
}
}
bool ChunkHandler::chunkInBounds(Chunk *chunkPointer)
{
if(chunkPointer != NULL)
{
if(chunkPointer->index.x >= 0 && chunkPointer->index.x < WORLD_SIZE &&
chunkPointer->index.y >= 0 && chunkPointer->index.y < WORLD_SIZE &&
chunkPointer->index.z >= 0 && chunkPointer->index.z < WORLD_SIZE)
{
return true;
}
}
return false;
}
void ChunkHandler::draw()
{
std::vector<Chunk *>::iterator it;
for(it = chunkList.begin(); it != chunkList.end(); it++)
{
(*it)->draw();
}
}
|
krahman/ember-bootstrap | packages/ember-bootstrap/lib/views/breadcrumb.js | require("ember-bootstrap/mixins/item_view_title_support");
require("ember-bootstrap/mixins/first_last_view_support");
var get = Ember.get;
var Bootstrap = window.Bootstrap;
Bootstrap.Breadcrumb = Ember.CollectionView.extend(Bootstrap.FirstLastViewSupport, {
tagName: "ul",
classNames: "breadcrumb",
divider: "/",
itemViewClass: Ember.View.extend(Bootstrap.ItemViewTitleSupport, {
template: Ember.Handlebars.compile('<a href="#">{{title}}</a><span class="divider">{{view.parentView.divider}}</span>')
}),
lastItemViewClass: Ember.View.extend(Bootstrap.ItemViewTitleSupport, {
classNames: "active",
template: Ember.Handlebars.compile("{{title}}")
})
});
|
rajeshnaroth/curator | curator-web/src/js/actions/newVideo.js | <reponame>rajeshnaroth/curator
import { fetchVideoDetails } from '../api/youtube'
export const FETCHED_NEW_VIDEO = 'FETCHED_NEW_VIDEO'
// add new video
export const fetchNewVideoFromYouTube = (videoId) => {
return (dispatch) => {
return fetchVideoDetails(videoId).then(videoData => {
dispatch({type: FETCHED_NEW_VIDEO, result: videoData})
}).catch((err) => console.log(err))
}
}
|
claytonjwong/leetcode-js | contest204.js | <filename>contest204.js
/*
* Weekly Contest 204
*
* Rank Name Score Finish Time Q1 (3) Q2 (4) Q3 (6) Q4 (7)
* 2025 / 13949 claytonjwong 7 0:50:49 0:17:53 0:40:49 *2
*
* Ranking: https://leetcode.com/contest/weekly-contest-204/ranking/81/
* Screenshare: https://www.youtube.com/watch?v=fPmeK1rsSiY&feature=youtu.be
*/
/*
* 1566. Detect Pattern of Length M Repeated K or More Times
*
* Q: https://leetcode.com/problems/detect-pattern-of-length-m-repeated-k-or-more-times/
* A: https://leetcode.com/problems/detect-pattern-of-length-m-repeated-k-or-more-times/discuss/819276/Javascript-Python3-C%2B%2B-T-Pieces-Whole
*/
let containsPattern = (A, K, T) => {
for (let i = 0, j = K * T; j <= A.length; ++i, ++j) {
let piece = A.slice(i, i + K).join(''),
whole = piece.repeat(T);
if (whole == A.slice(i, j).join(''))
return true;
}
return false;
};
/*
* 1567. Maximum Length of Subarray With Positive Product
*
* Q: https://leetcode.com/problems/maximum-length-of-subarray-with-positive-product/
* A: https://leetcode.com/problems/maximum-length-of-subarray-with-positive-product/discuss/822464/Javascript-Python3-C%2B%2B-Sliding-Window
*/
let getMaxLen = (A, cnt = 0, max = 0) => {
A.push(0); // ⭐️ sentinel value
let N = A.length,
i = 0,
j = 0;
while (i != N) {
// case 1: ➖ collapse window [i 👉 ..j)
while (j < N && !A[j]) {
while (i < j) {
cnt = A[i++] < 0 ? cnt - 1 : cnt;
max = cnt & 1 ? max : Math.max(max, j - i);
}
i = ++j;
}
// case 2: ➕ expand window [i..j 👉 )
while (j < N && A[j]) {
cnt = A[j++] < 0 ? cnt + 1 : cnt;
max = cnt & 1 ? max : Math.max(max, j - i);
}
}
return max;
};
|
k-czajka/fh | fhdp/fhdp-commons/fhdp-commons-transport/src/main/java/pl/fhframework/dp/transport/endpoints/IEndpointCfgService.java | package pl.fhframework.dp.transport.endpoints;
import pl.fhframework.dp.commons.base.model.IEndpointCfgDefinition;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @version : $, : $
* @created 10/02/2021
*/
public interface IEndpointCfgService {
EndpointCfgDto getEndpointCfg(IEndpointCfgDefinition endpointCfgDefinition);
EndpointCfgDto storeEndpointCfg(EndpointCfgDto def);
}
|
xiaohua9/project1 | src/com/u1/day5/ChengFaBiao.java | <reponame>xiaohua9/project1
package com.u1.day5;
/*龙小华
2019-6-28
99乘法表*/
public class ChengFaBiao {
public static void main(String[] args) {
for (int i=1;i<=9;i++){
for (int j=1;j<=i;j++){
System.out.print(j+"*"+i+"="+(i*j)+"\t");
}
System.out.println(" ");
}
}
}
|
thewtex/scikit-image | skimage/io/_plugins/q_color_mixer.py | <gh_stars>1-10
# the module for the qt color_mixer plugin
from qtpy import QtCore
from qtpy.QtWidgets import (QWidget, QStackedWidget, QSlider, QGridLayout,
QLabel, QFrame, QComboBox, QRadioButton,
QPushButton)
from .util import ColorMixer
class IntelligentSlider(QWidget):
''' A slider that adds a 'name' attribute and calls a callback
with 'name' as an argument to the registerd callback.
This allows you to create large groups of sliders in a loop,
but still keep track of the individual events
It also prints a label below the slider.
The range of the slider is hardcoded from zero - 1000,
but it supports a conversion factor so you can scale the results'''
def __init__(self, name, a, b, callback):
QWidget.__init__(self)
self.name = name
self.callback = callback
self.a = a
self.b = b
self.manually_triggered = False
self.slider = QSlider()
self.slider.setRange(0, 1000)
self.slider.setValue(500)
self.slider.valueChanged.connect(self.slider_changed)
self.name_label = QLabel()
self.name_label.setText(self.name)
self.name_label.setAlignment(QtCore.Qt.AlignCenter)
self.value_label = QLabel()
self.value_label.setText('%2.2f' % (self.slider.value() * self.a
+ self.b))
self.value_label.setAlignment(QtCore.Qt.AlignCenter)
self.layout = QGridLayout(self)
self.layout.addWidget(self.name_label, 0, 0)
self.layout.addWidget(self.slider, 1, 0, QtCore.Qt.AlignHCenter)
self.layout.addWidget(self.value_label, 2, 0)
# bind this to the valueChanged signal of the slider
def slider_changed(self, val):
val = self.val()
self.value_label.setText(str(val)[:4])
if not self.manually_triggered:
self.callback(self.name, val)
def set_conv_fac(self, a, b):
self.a = a
self.b = b
def set_value(self, val):
self.manually_triggered = True
self.slider.setValue(int((val - self.b) / self.a))
self.value_label.setText('%2.2f' % val)
self.manually_triggered = False
def val(self):
return self.slider.value() * self.a + self.b
class MixerPanel(QFrame):
'''A color mixer to hook up to an image.
You pass the image you the panel to operate on
and it operates on that image in place. You also
pass a callback to be called to trigger a refresh.
This callback is called every time the mixer modifies
your image.'''
def __init__(self, img):
QFrame.__init__(self)
# self.setFrameStyle(QFrame.Box | QFrame.Sunken)
self.img = img
self.mixer = ColorMixer(self.img)
self.callback = None
#---------------------------------------------------------------
# ComboBox
#---------------------------------------------------------------
self.combo_box_entries = ['RGB Color', 'HSV Color',
'Brightness/Contrast',
'Gamma',
'Gamma (Sigmoidal)']
self.combo_box = QComboBox()
for entry in self.combo_box_entries:
self.combo_box.addItem(entry)
self.combo_box.currentIndexChanged.connect(self.combo_box_changed)
#---------------------------------------------------------------
# RGB color sliders
#---------------------------------------------------------------
# radio buttons
self.rgb_add = QRadioButton('Additive')
self.rgb_mul = QRadioButton('Multiplicative')
self.rgb_mul.toggled.connect(self.rgb_radio_changed)
self.rgb_add.toggled.connect(self.rgb_radio_changed)
# sliders
rs = IntelligentSlider('R', 0.51, -255, self.rgb_changed)
gs = IntelligentSlider('G', 0.51, -255, self.rgb_changed)
bs = IntelligentSlider('B', 0.51, -255, self.rgb_changed)
self.rs = rs
self.gs = gs
self.bs = bs
self.rgb_widget = QWidget()
self.rgb_widget.layout = QGridLayout(self.rgb_widget)
self.rgb_widget.layout.addWidget(self.rgb_add, 0, 0, 1, 3)
self.rgb_widget.layout.addWidget(self.rgb_mul, 1, 0, 1, 3)
self.rgb_widget.layout.addWidget(self.rs, 2, 0)
self.rgb_widget.layout.addWidget(self.gs, 2, 1)
self.rgb_widget.layout.addWidget(self.bs, 2, 2)
#---------------------------------------------------------------
# HSV sliders
#---------------------------------------------------------------
# radio buttons
self.hsv_add = QRadioButton('Additive')
self.hsv_mul = QRadioButton('Multiplicative')
self.hsv_mul.toggled.connect(self.hsv_radio_changed)
self.hsv_mul.toggled.connect(self.hsv_radio_changed)
# sliders
hs = IntelligentSlider('H', 0.36, -180, self.hsv_changed)
ss = IntelligentSlider('S', 0.002, 0, self.hsv_changed)
vs = IntelligentSlider('V', 0.002, 0, self.hsv_changed)
self.hs = hs
self.ss = ss
self.vs = vs
self.hsv_widget = QWidget()
self.hsv_widget.layout = QGridLayout(self.hsv_widget)
self.hsv_widget.layout.addWidget(self.hsv_add, 0, 0, 1, 3)
self.hsv_widget.layout.addWidget(self.hsv_mul, 1, 0, 1, 3)
self.hsv_widget.layout.addWidget(self.hs, 2, 0)
self.hsv_widget.layout.addWidget(self.ss, 2, 1)
self.hsv_widget.layout.addWidget(self.vs, 2, 2)
#---------------------------------------------------------------
# Brightness/Contrast sliders
#---------------------------------------------------------------
# sliders
cont = IntelligentSlider('x', 0.002, 0, self.bright_changed)
bright = IntelligentSlider('+', 0.51, -255, self.bright_changed)
self.cont = cont
self.bright = bright
# layout
self.bright_widget = QWidget()
self.bright_widget.layout = QGridLayout(self.bright_widget)
self.bright_widget.layout.addWidget(self.cont, 0, 0)
self.bright_widget.layout.addWidget(self.bright, 0, 1)
#----------------------------------------------------------------------
# Gamma Slider
#----------------------------------------------------------------------
gamma = IntelligentSlider('gamma', 0.005, 0, self.gamma_changed)
self.gamma = gamma
# layout
self.gamma_widget = QWidget()
self.gamma_widget.layout = QGridLayout(self.gamma_widget)
self.gamma_widget.layout.addWidget(self.gamma, 0, 0)
#---------------------------------------------------------------
# Sigmoid Gamma sliders
#---------------------------------------------------------------
# sliders
alpha = IntelligentSlider('alpha', 0.011, 1, self.sig_gamma_changed)
beta = IntelligentSlider('beta', 0.012, 0, self.sig_gamma_changed)
self.a_gamma = alpha
self.b_gamma = beta
# layout
self.sig_gamma_widget = QWidget()
self.sig_gamma_widget.layout = QGridLayout(self.sig_gamma_widget)
self.sig_gamma_widget.layout.addWidget(self.a_gamma, 0, 0)
self.sig_gamma_widget.layout.addWidget(self.b_gamma, 0, 1)
#---------------------------------------------------------------
# Buttons
#---------------------------------------------------------------
self.commit_button = QPushButton('Commit')
self.commit_button.clicked.connect(self.commit_changes)
self.revert_button = QPushButton('Revert')
self.revert_button.clicked.connect(self.revert_changes)
#---------------------------------------------------------------
# Mixer Layout
#---------------------------------------------------------------
self.sliders = QStackedWidget()
self.sliders.addWidget(self.rgb_widget)
self.sliders.addWidget(self.hsv_widget)
self.sliders.addWidget(self.bright_widget)
self.sliders.addWidget(self.gamma_widget)
self.sliders.addWidget(self.sig_gamma_widget)
self.layout = QGridLayout(self)
self.layout.addWidget(self.combo_box, 0, 0)
self.layout.addWidget(self.sliders, 1, 0)
self.layout.addWidget(self.commit_button, 2, 0)
self.layout.addWidget(self.revert_button, 3, 0)
#---------------------------------------------------------------
# State Initialization
#---------------------------------------------------------------
self.combo_box.setCurrentIndex(0)
self.rgb_mul.setChecked(True)
self.hsv_mul.setChecked(True)
def set_callback(self, callback):
self.callback = callback
def combo_box_changed(self, index):
self.sliders.setCurrentIndex(index)
self.reset()
def rgb_radio_changed(self):
self.reset()
def hsv_radio_changed(self):
self.reset()
def reset(self):
self.reset_sliders()
self.mixer.set_to_stateimg()
if self.callback:
self.callback()
def reset_sliders(self):
# handle changing the conversion factors necessary
if self.rgb_add.isChecked():
self.rs.set_conv_fac(0.51, -255)
self.rs.set_value(0)
self.gs.set_conv_fac(0.51, -255)
self.gs.set_value(0)
self.bs.set_conv_fac(0.51, -255)
self.bs.set_value(0)
else:
self.rs.set_conv_fac(0.002, 0)
self.rs.set_value(1.)
self.gs.set_conv_fac(0.002, 0)
self.gs.set_value(1.)
self.bs.set_conv_fac(0.002, 0)
self.bs.set_value(1.)
self.hs.set_value(0)
if self.hsv_add.isChecked():
self.ss.set_conv_fac(0.002, -1)
self.ss.set_value(0)
self.vs.set_conv_fac(0.002, -1)
self.vs.set_value(0)
else:
self.ss.set_conv_fac(0.002, 0)
self.ss.set_value(1.)
self.vs.set_conv_fac(0.002, 0)
self.vs.set_value(1.)
self.bright.set_value(0)
self.cont.set_value(1.)
self.gamma.set_value(1)
self.a_gamma.set_value(1)
self.b_gamma.set_value(0.5)
def rgb_changed(self, name, val):
if name == 'R':
channel = self.mixer.RED
elif name == 'G':
channel = self.mixer.GREEN
else:
channel = self.mixer.BLUE
if self.rgb_mul.isChecked():
self.mixer.multiply(channel, val)
elif self.rgb_add.isChecked():
self.mixer.add(channel, val)
else:
pass
if self.callback:
self.callback()
def hsv_changed(self, name, val):
h = self.hs.val()
s = self.ss.val()
v = self.vs.val()
if self.hsv_mul.isChecked():
self.mixer.hsv_multiply(h, s, v)
elif self.hsv_add.isChecked():
self.mixer.hsv_add(h, s, v)
else:
pass
if self.callback:
self.callback()
def bright_changed(self, name, val):
b = self.bright.val()
c = self.cont.val()
self.mixer.brightness(c, b)
if self.callback:
self.callback()
def gamma_changed(self, name, val):
self.mixer.gamma(val)
if self.callback:
self.callback()
def sig_gamma_changed(self, name, val):
ag = self.a_gamma.val()
bg = self.b_gamma.val()
self.mixer.sigmoid_gamma(ag, bg)
if self.callback:
self.callback()
def commit_changes(self):
self.mixer.commit_changes()
self.reset_sliders()
def revert_changes(self):
self.mixer.revert()
self.reset_sliders()
if self.callback:
self.callback()
|
Trydamere/shardingsphere | shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/datanode/DataNodes.java | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.infra.datanode;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.infra.rule.ShardingSphereRule;
import org.apache.shardingsphere.infra.rule.identifier.type.DataNodeContainedRule;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
/**
* Data nodes.
*/
@RequiredArgsConstructor
public final class DataNodes {
private final Collection<ShardingSphereRule> rules;
@SuppressWarnings("rawtypes")
private final Map<ShardingSphereRule, DataNodeBuilder> dataNodeBuilders;
public DataNodes(final Collection<ShardingSphereRule> rules) {
this.rules = rules;
dataNodeBuilders = DataNodeBuilderFactory.getInstances(rules);
}
/**
* Get data nodes.
*
* @param tableName table name
* @return data nodes
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public Collection<DataNode> getDataNodes(final String tableName) {
Optional<DataNodeContainedRule> dataNodeContainedRule = findDataNodeContainedRule(tableName);
if (!dataNodeContainedRule.isPresent()) {
return Collections.emptyList();
}
Collection<DataNode> result = new LinkedList<>(dataNodeContainedRule.get().getDataNodesByTableName(tableName));
for (Entry<ShardingSphereRule, DataNodeBuilder> entry : dataNodeBuilders.entrySet()) {
result = entry.getValue().build(result, entry.getKey());
}
return result;
}
private Optional<DataNodeContainedRule> findDataNodeContainedRule(final String tableName) {
return rules.stream().filter(each -> isDataNodeContainedRuleContainsTable(each, tableName)).findFirst().map(optional -> (DataNodeContainedRule) optional);
}
private boolean isDataNodeContainedRuleContainsTable(final ShardingSphereRule each, final String tableName) {
return each instanceof DataNodeContainedRule && !((DataNodeContainedRule) each).getDataNodesByTableName(tableName).isEmpty();
}
}
|
nowkoai/test | ee/spec/controllers/sitemap_controller_spec.rb | <reponame>nowkoai/test
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe SitemapController do
describe '#show' do
subject { get :show, format: :xml }
before do
allow(Gitlab).to receive(:com?).and_return(dot_com)
end
context 'when not Gitlab.com?' do
let(:dot_com) { false }
it 'returns :not_found' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when Gitlab.com?' do
let(:dot_com) { true }
context 'with an authenticated user' do
before do
allow(Sitemap::CreateService).to receive_message_chain(:new, :execute).and_return(result)
subject
end
context 'when the sitemap generation raises an error' do
let(:result) { ServiceResponse.error(message: 'foo') }
it 'returns an xml error' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to include('<error>foo</error>')
end
end
context 'when the sitemap was created suscessfully' do
let(:result) { ServiceResponse.success(payload: { sitemap: 'foo' }) }
it 'returns sitemap' do
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq('foo')
end
end
end
end
end
end
|
layered-pieces/objc-pieces | xcode-templates/rib.xctemplate/ViewControllerpresenterinteractorDelegate/___FILEBASENAME___ViewController.h | //
// ___FILENAME___
// ___PROJECTNAME___
//
// Created by ___FULLUSERNAME___ on ___DATE___.
// Copyright ___YEAR___ ___ORGANIZATIONNAME___. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@class ___VARIABLE_productName___ViewController;
@protocol ___VARIABLE_productName___ViewControllerDelegate <NSObject>
@end
__attribute__((objc_subclassing_restricted))
@interface ___VARIABLE_productName___ViewController : UIViewController
@property (nonatomic, weak) id<___VARIABLE_productName___ViewControllerDelegate> delegate;
- (instancetype)init NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithNibName:(nullable NSString *)nibNameOrNil bundle:(nullable NSBundle *)nibBundleOrNil NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE;
- (instancetype)initWithCoder:(NSCoder *)aDecoder NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END
|
ch-jin/bundle-app | client/test/components/splash/session_form_test.js | import sinon from "sinon";
import {
renderComponent,
renderAndUnmountComponent,
expect,
} from "../../test_helper";
import * as actions from "../../../actions/session_actions";
import SessionFormContainer from "../../../components/splash/SessionFormContainer";
describe("SessionFormContainer", () => {
let component;
beforeEach(() => {
component = renderComponent(SessionFormContainer);
});
it("renders component", () => {
expect(component).to.exist;
});
it("has a form element", () => {
expect(component.find("form")).to.exist;
});
it("renders errors", () => {
component = renderComponent(SessionFormContainer, null, {
session: { errors: ["error"] },
});
expect(component.find("ul")).to.exist;
});
describe("lifecycle hooks", () => {
beforeEach(() => {
sinon.stub(actions, "clearErrors").returns({ type: "" });
});
afterEach(() => {
actions.clearErrors.restore();
});
it("clears errors when unmounted", () => {
renderAndUnmountComponent(SessionFormContainer);
expect(actions.clearErrors.calledOnce).to.be.true;
});
});
describe("input-group", () => {
let inputGroups, input;
beforeEach(() => {
inputGroups = component.find(".input-group");
input = component.find("input");
});
it("renders onto the DOM", () => {
expect(inputGroups).to.exist;
});
it("renders 2 input groups", () => {
expect(inputGroups.length).to.equal(2);
});
it("shows text in the input area", () => {
input.simulate("change", "new input value");
expect(input).to.have.value("new input value");
});
});
describe("signup form", () => {
beforeEach(() => {
sinon.stub(actions, "signup").returns(() => Promise.resolve({}));
component = renderComponent(SessionFormContainer, { formType: "signup" });
});
afterEach(() => {
actions.signup.restore();
});
it("renders site name", () => {
expect(component).to.contain("BundleMe");
});
it("dispatches signup action on submit", () => {
const form = component.find("form");
form.simulate("submit");
expect(actions.signup.calledOnce).to.be.true;
});
});
describe("login form", () => {
beforeEach(() => {
sinon.stub(actions, "login").returns(() => Promise.resolve({}));
component = renderComponent(SessionFormContainer, { formType: "login" });
});
afterEach(() => {
actions.login.restore();
});
it("renders site name", () => {
expect(component).to.contain("Log in");
});
it("dispatches login action on submit", () => {
const form = component.find("form");
form.simulate("submit");
expect(actions.login.calledOnce).to.be.true;
});
});
});
|
hicham1987/CapexApp | node_modules/ngx-charts/release/area-chart/index.js | "use strict";
function __export(m) {
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
}
__export(require('./area-chart.module'));
__export(require('./area-chart.component'));
__export(require('./area-chart-normalized.component'));
__export(require('./area-chart-stacked.component'));
__export(require('./area-series.component'));
//# sourceMappingURL=index.js.map |
rien/nanoc | nanoc/spec/nanoc/regressions/gh_761_spec.rb | <gh_stars>1000+
# frozen_string_literal: true
describe 'GH-761', site: true do
before do
File.write('content/donkey.md', 'Compiled content donkey!')
File.write('layouts/foo.erb', '[<%= @item.compiled_content %>]')
File.write('Rules', <<EOS)
compile '/**/*' do
layout '/foo.*'
write '/donkey.html'
end
layout '/foo.*', :erb
EOS
end
it 'supports #compiled_content instead of yield' do
site = Nanoc::Core::SiteLoader.new.new_from_cwd
Nanoc::Core::Compiler.compile(site)
expect(File.read('output/donkey.html')).to eql('[Compiled content donkey!]')
end
end
|
Ujjawalgupta42/Hacktoberfest2021-DSA | 04. Arrays/distribute_n_candies_k_kids.cpp | <reponame>Ujjawalgupta42/Hacktoberfest2021-DSA<gh_stars>100-1000
#include <bits/stdc++.h>
using namespace std;
// Function to find out the number of
// candies every person received
void candies(int n, int k)
{
// Count number of complete turns
int count = 0;
// Get the last term
int ind = 1;
// Stores the number of candies
int arr[k];
memset(arr, 0, sizeof(arr));
int low = 0, high = n;
// Do a binary search to find the number whose
// sum is less than N.
while (low <= high) {
// Get mide
int mid = (low + high) >> 1;
int sum = (mid * (mid + 1)) >> 1;
// If sum is below N
if (sum <= n) {
// Find number of complete turns
count = mid / k;
// Right halve
low = mid + 1;
}
else {
// Left halve
high = mid - 1;
}
}
// Last term of last complete series
int last = (count * k);
// Subtract the sum till
n -= (last * (last + 1)) / 2;
int i = 0;
// First term of incomplete series
int term = (count * k) + 1;
while (n) {
if (term <= n) {
arr[i++] = term;
n -= term;
term++;
}
else {
arr[i] += n;
n = 0;
}
}
// Count the total candies
for (int i = 0; i < k; i++)
arr[i] += (count * (i + 1))
+ (k * (count * (count - 1)) / 2);
// Print the total candies
for (int i = 0; i < k; i++)
cout << arr[i] << " ";
}
// Driver Code
int main()
{
int n = 7, k = 4;
candies(n, k);
return 0;
}
|
ridi/chromium-aw | src/main/java/org/chromium/network/mojom/WebSandboxFlags.java | // WebSandboxFlags.java is auto generated by mojom_bindings_generator.py, do not edit
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by:
// mojo/public/tools/bindings/mojom_bindings_generator.py
// For:
// services/network/public/mojom/web_sandbox_flags.mojom
//
package org.chromium.network.mojom;
import androidx.annotation.IntDef;
public final class WebSandboxFlags {
private static final boolean IS_EXTENSIBLE = true;
@IntDef({
WebSandboxFlags.NONE,
WebSandboxFlags.NAVIGATION,
WebSandboxFlags.PLUGINS,
WebSandboxFlags.ORIGIN,
WebSandboxFlags.FORMS,
WebSandboxFlags.SCRIPTS,
WebSandboxFlags.TOP_NAVIGATION,
WebSandboxFlags.POPUPS,
WebSandboxFlags.AUTOMATIC_FEATURES,
WebSandboxFlags.POINTER_LOCK,
WebSandboxFlags.DOCUMENT_DOMAIN,
WebSandboxFlags.ORIENTATION_LOCK,
WebSandboxFlags.PROPAGATES_TO_AUXILIARY_BROWSING_CONTEXTS,
WebSandboxFlags.MODALS,
WebSandboxFlags.PRESENTATION_CONTROLLER,
WebSandboxFlags.TOP_NAVIGATION_BY_USER_ACTIVATION,
WebSandboxFlags.DOWNLOADS,
WebSandboxFlags.STORAGE_ACCESS_BY_USER_ACTIVATION,
WebSandboxFlags.ALL})
public @interface EnumType {}
public static final int NONE = 0;
public static final int NAVIGATION = 1;
public static final int PLUGINS = 2;
public static final int ORIGIN = 4;
public static final int FORMS = 8;
public static final int SCRIPTS = 16;
public static final int TOP_NAVIGATION = 32;
public static final int POPUPS = 64;
public static final int AUTOMATIC_FEATURES = 128;
public static final int POINTER_LOCK = 256;
public static final int DOCUMENT_DOMAIN = 512;
public static final int ORIENTATION_LOCK = 1024;
public static final int PROPAGATES_TO_AUXILIARY_BROWSING_CONTEXTS = 2048;
public static final int MODALS = 4096;
public static final int PRESENTATION_CONTROLLER = 8192;
public static final int TOP_NAVIGATION_BY_USER_ACTIVATION = 16384;
public static final int DOWNLOADS = 32768;
public static final int STORAGE_ACCESS_BY_USER_ACTIVATION = 65536;
public static final int ALL = -1;
public static final int MIN_VALUE = -1;
public static final int MAX_VALUE = 65536;
public static boolean isKnownValue(int value) {
switch (value) {
case -1:
case 0:
case 1:
case 2:
case 4:
case 8:
case 16:
case 32:
case 64:
case 128:
case 256:
case 512:
case 1024:
case 2048:
case 4096:
case 8192:
case 16384:
case 32768:
case 65536:
return true;
}
return false;
}
public static void validate(int value) {
if (IS_EXTENSIBLE || isKnownValue(value)) return;
throw new org.chromium.mojo.bindings.DeserializationException("Invalid enum value.");
}
public static int toKnownValue(int value) {
return value;
}
private WebSandboxFlags() {}
} |
Acidburn0zzz/peridot | bin/ledger/tests/integration/merging_tests.cc | <reponame>Acidburn0zzz/peridot<filename>bin/ledger/tests/integration/merging_tests.cc<gh_stars>1-10
// Copyright 2016 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <map>
#include <utility>
#include <vector>
#include "gtest/gtest.h"
#include "lib/fidl/cpp/bindings/binding.h"
#include "lib/fsl/tasks/message_loop.h"
#include "lib/fsl/vmo/sized_vmo.h"
#include "lib/fsl/vmo/strings.h"
#include "lib/fxl/functional/make_copyable.h"
#include "lib/fxl/macros.h"
#include "lib/fxl/strings/string_printf.h"
#include "lib/ledger/fidl/ledger.fidl.h"
#include "peridot/bin/ledger/storage/public/types.h"
#include "peridot/bin/ledger/tests/integration/integration_test.h"
#include "peridot/bin/ledger/tests/integration/test_utils.h"
#include "peridot/lib/callback/capture.h"
#include "peridot/lib/convert/convert.h"
namespace test {
namespace integration {
namespace {
class MergingIntegrationTest : public IntegrationTest {
public:
MergingIntegrationTest() {}
~MergingIntegrationTest() override {}
private:
FXL_DISALLOW_COPY_AND_ASSIGN(MergingIntegrationTest);
};
class Watcher : public ledger::PageWatcher {
public:
Watcher(fidl::InterfaceRequest<ledger::PageWatcher> request,
fxl::Closure change_callback)
: binding_(this, std::move(request)),
change_callback_(std::move(change_callback)) {}
uint changes_seen = 0;
ledger::PageSnapshotPtr last_snapshot_;
ledger::PageChangePtr last_page_change_;
private:
// PageWatcher:
void OnChange(ledger::PageChangePtr page_change,
ledger::ResultState result_state,
const OnChangeCallback& callback) override {
FXL_DCHECK(page_change);
FXL_DCHECK(result_state == ledger::ResultState::COMPLETED)
<< "Handling OnChange pagination not implemented yet";
changes_seen++;
last_page_change_ = std::move(page_change);
last_snapshot_.Unbind();
callback(last_snapshot_.NewRequest());
change_callback_();
}
fidl::Binding<PageWatcher> binding_;
fxl::Closure change_callback_;
};
enum class MergeType {
SIMPLE,
MULTIPART,
};
class ConflictResolverImpl : public ledger::ConflictResolver {
public:
explicit ConflictResolverImpl(
fidl::InterfaceRequest<ConflictResolver> request)
: binding_(this, std::move(request)) {
binding_.set_error_handler([this] {
this->disconnected = true;
fsl::MessageLoop::GetCurrent()->PostQuitTask();
});
}
~ConflictResolverImpl() override {}
struct ResolveRequest {
fidl::InterfaceHandle<ledger::PageSnapshot> left_version;
fidl::InterfaceHandle<ledger::PageSnapshot> right_version;
fidl::InterfaceHandle<ledger::PageSnapshot> common_version;
ledger::MergeResultProviderPtr result_provider;
ResolveRequest(
fidl::InterfaceHandle<ledger::PageSnapshot> left_version,
fidl::InterfaceHandle<ledger::PageSnapshot> right_version,
fidl::InterfaceHandle<ledger::PageSnapshot> common_version,
fidl::InterfaceHandle<ledger::MergeResultProvider> result_provider)
: left_version(std::move(left_version)),
right_version(std::move(right_version)),
common_version(std::move(common_version)),
result_provider(result_provider.Bind()) {}
// Returns the full list of changes.
// Returns the full list of changes between branches and makes sure that at
// least |min_queries| of partial results are returned before retrieving the
// complete result for the left and for the right changes.
::testing::AssertionResult GetFullDiff(
fidl::Array<ledger::DiffEntryPtr>* entries,
int min_queries = 0) {
return GetDiff(
nullptr,
[this](fidl::Array<uint8_t> token,
std::function<void(ledger::Status,
fidl::Array<ledger::DiffEntryPtr>,
fidl::Array<uint8_t>)> callback) mutable {
result_provider->GetFullDiff(std::move(token), callback);
},
entries, 0, min_queries);
}
::testing::AssertionResult GetConflictingDiff(
fidl::Array<ledger::DiffEntryPtr>* entries,
int min_queries = 0) {
return GetDiff(
nullptr,
[this](fidl::Array<uint8_t> token,
std::function<void(ledger::Status,
fidl::Array<ledger::DiffEntryPtr>,
fidl::Array<uint8_t>)> callback) mutable {
result_provider->GetConflictingDiff(std::move(token), callback);
},
entries, 0, min_queries);
}
// Resolves the conflict by sending the given merge results. If
// |merge_type| is MULTIPART, the merge will be send in two parts, each
// sending half of |results|' elements.
::testing::AssertionResult Merge(
fidl::Array<ledger::MergedValuePtr> results,
MergeType merge_type = MergeType::SIMPLE) {
FXL_DCHECK(merge_type == MergeType::SIMPLE || results.size() >= 2);
if (merge_type == MergeType::SIMPLE) {
::testing::AssertionResult merge_status =
PartialMerge(std::move(results));
if (!merge_status) {
return merge_status;
}
} else {
size_t part1_size = results.size() / 2;
fidl::Array<ledger::MergedValuePtr> part2;
for (size_t i = part1_size; i < results.size(); ++i) {
part2.push_back(std::move(results[i]));
}
results.resize(part1_size);
::testing::AssertionResult merge_status =
PartialMerge(std::move(results));
if (!merge_status) {
return merge_status;
}
merge_status = PartialMerge(std::move(part2));
if (!merge_status) {
return merge_status;
}
}
ledger::Status status;
result_provider->Done([&status](ledger::Status s) { status = s; });
if (!result_provider.WaitForResponse()) {
return ::testing::AssertionFailure() << "Done failed.";
}
if (status != ledger::Status::OK) {
return ::testing::AssertionFailure()
<< "Done failed with status " << status;
}
return ::testing::AssertionSuccess();
}
::testing::AssertionResult MergeNonConflictingEntries() {
ledger::Status status;
result_provider->MergeNonConflictingEntries(
callback::Capture([] {}, &status));
if (!result_provider.WaitForResponse()) {
return ::testing::AssertionFailure()
<< "MergeNonConflictingEntries failed.";
}
if (status != ledger::Status::OK) {
return ::testing::AssertionFailure()
<< "MergeNonConflictingEntries failed with status " << status
<< ".";
}
return ::testing::AssertionSuccess();
}
private:
::testing::AssertionResult GetDiff(
fidl::Array<uint8_t> token,
std::function<void(fidl::Array<uint8_t>,
std::function<void(ledger::Status,
fidl::Array<ledger::DiffEntryPtr>,
fidl::Array<uint8_t>)>)> get_diff,
fidl::Array<ledger::DiffEntryPtr>* entries,
int num_queries,
int min_queries) {
ledger::Status status;
fidl::Array<uint8_t> next_token;
do {
get_diff(
std::move(token),
[&status, entries, &next_token](
ledger::Status s, fidl::Array<ledger::DiffEntryPtr> changes,
fidl::Array<uint8_t> next) {
status = s;
for (auto& change : changes) {
entries->push_back(std::move(change));
}
next_token = std::move(next);
});
if (!result_provider.WaitForResponse()) {
return ::testing::AssertionFailure() << "GetDiff failed.";
}
if (status != ledger::Status::OK &&
status != ledger::Status::PARTIAL_RESULT) {
return ::testing::AssertionFailure()
<< "GetDiff failed with status " << status;
}
if (!next_token != (status == ledger::Status::OK)) {
return ::testing::AssertionFailure()
<< "next_token is " << convert::ToString(next_token)
<< ", but status is:" << status;
}
++num_queries;
token = std::move(next_token);
next_token = nullptr; // Suppress misc-use-after-move.
} while (token);
if (num_queries < min_queries) {
return ::testing::AssertionFailure()
<< "Only " << num_queries
<< " partial results were found, but at least " << min_queries
<< " were expected";
}
return ::testing::AssertionSuccess();
}
::testing::AssertionResult PartialMerge(
fidl::Array<ledger::MergedValuePtr> partial_result) {
ledger::Status status;
result_provider->Merge(std::move(partial_result),
[&status](ledger::Status s) { status = s; });
if (!result_provider.WaitForResponse()) {
return ::testing::AssertionFailure() << "Merge failed.";
}
if (status != ledger::Status::OK) {
return ::testing::AssertionFailure()
<< "Merge failed with status " << status;
}
return ::testing::AssertionSuccess();
}
};
std::vector<ResolveRequest> requests;
bool disconnected = false;
private:
// ConflictResolver:
void Resolve(fidl::InterfaceHandle<ledger::PageSnapshot> left_version,
fidl::InterfaceHandle<ledger::PageSnapshot> right_version,
fidl::InterfaceHandle<ledger::PageSnapshot> common_version,
fidl::InterfaceHandle<ledger::MergeResultProvider>
result_provider) override {
requests.emplace_back(std::move(left_version), std::move(right_version),
std::move(common_version),
std::move(result_provider));
fsl::MessageLoop::GetCurrent()->PostQuitTask();
}
fidl::Binding<ConflictResolver> binding_;
};
// Custom conflict resolver that doesn't resolve any conflicts.
class DummyConflictResolver : public ledger::ConflictResolver {
public:
explicit DummyConflictResolver(
fidl::InterfaceRequest<ConflictResolver> request)
: binding_(this, std::move(request)) {}
~DummyConflictResolver() override {}
private:
// ledger::ConflictResolver:
void Resolve(fidl::InterfaceHandle<ledger::PageSnapshot> /*left_version*/,
fidl::InterfaceHandle<ledger::PageSnapshot> /*right_version*/,
fidl::InterfaceHandle<ledger::PageSnapshot> /*common_version*/,
fidl::InterfaceHandle<ledger::MergeResultProvider>
/*result_provider*/) override {
// Do nothing.
}
fidl::Binding<ConflictResolver> binding_;
};
class TestConflictResolverFactory : public ledger::ConflictResolverFactory {
public:
TestConflictResolverFactory(
ledger::MergePolicy policy,
fidl::InterfaceRequest<ledger::ConflictResolverFactory> request,
fxl::Closure on_get_policy_called_callback,
fxl::TimeDelta response_delay = fxl::TimeDelta::FromMilliseconds(0))
: policy_(policy),
binding_(this, std::move(request)),
callback_(std::move(on_get_policy_called_callback)),
response_delay_(response_delay) {}
uint get_policy_calls = 0;
std::map<storage::PageId, ConflictResolverImpl> resolvers;
void set_use_dummy_resolver(bool use_dummy_resolver) {
use_dummy_resolver_ = use_dummy_resolver;
}
private:
// ConflictResolverFactory:
void GetPolicy(fidl::Array<uint8_t> /*page_id*/,
const GetPolicyCallback& callback) override {
get_policy_calls++;
fsl::MessageLoop::GetCurrent()->task_runner()->PostDelayedTask(
[this, callback] {
callback(policy_);
if (callback_) {
callback_();
}
},
response_delay_);
}
void NewConflictResolver(
fidl::Array<uint8_t> page_id,
fidl::InterfaceRequest<ledger::ConflictResolver> resolver) override {
if (use_dummy_resolver_) {
dummy_resolvers_.emplace(
std::piecewise_construct,
std::forward_as_tuple(convert::ToString(page_id)),
std::forward_as_tuple(std::move(resolver)));
return;
}
resolvers.emplace(std::piecewise_construct,
std::forward_as_tuple(convert::ToString(page_id)),
std::forward_as_tuple(std::move(resolver)));
}
ledger::MergePolicy policy_;
bool use_dummy_resolver_ = false;
std::map<storage::PageId, DummyConflictResolver> dummy_resolvers_;
fidl::Binding<ConflictResolverFactory> binding_;
fxl::Closure callback_;
fxl::TimeDelta response_delay_;
};
// Optional is an object that optionally contains another object.
template <typename T>
class Optional {
public:
Optional() : obj_() {}
explicit Optional(T obj) : valid_(true), obj_(std::move(obj)) {}
constexpr const T& operator*() const& { return obj_; }
constexpr const T* operator->() const { return &obj_; }
constexpr explicit operator bool() const { return valid_; }
private:
bool const valid_ = false;
T const obj_;
};
::testing::AssertionResult ValueMatch(const std::string& type,
const ledger::ValuePtr& value,
const Optional<std::string>& expected) {
if (expected) {
if (!value) {
return ::testing::AssertionFailure()
<< type << " has no value but expected \"" << *expected << "\".";
}
if (ToString(value->value) != *expected) {
return ::testing::AssertionFailure()
<< type << " has value \"" << ToString(value->value)
<< "\" but expected \"" << *expected << "\".";
}
} else if (!expected && value) {
return ::testing::AssertionFailure()
<< type << " has value \"" << ToString(value->value)
<< "\" but expected no value.";
}
return ::testing::AssertionSuccess();
}
::testing::AssertionResult ChangeMatch(std::string expected_key,
Optional<std::string> expected_base,
Optional<std::string> expected_left,
Optional<std::string> expected_right,
const ledger::DiffEntryPtr& entry) {
convert::ExtendedStringView found_key(entry->key);
if (expected_key != convert::ExtendedStringView(found_key)) {
return ::testing::AssertionFailure()
<< "Expected key \"" << expected_key << "\" but found \""
<< found_key << "\"";
}
::testing::AssertionResult result =
ValueMatch("Base", entry->base, expected_base);
if (!result) {
return result;
}
result = ValueMatch("Left", entry->left, expected_left);
if (!result) {
return result;
}
return ValueMatch("Right", entry->right, expected_right);
}
TEST_F(MergingIntegrationTest, Merging) {
auto instance = NewLedgerAppInstance();
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
ledger::PageWatcherPtr watcher1_ptr;
Watcher watcher1(watcher1_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot1;
page1->GetSnapshot(
snapshot1.NewRequest(), nullptr, std::move(watcher1_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
ledger::PageWatcherPtr watcher2_ptr;
Watcher watcher2(watcher2_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot2;
page2->GetSnapshot(
snapshot2.NewRequest(), nullptr, std::move(watcher2_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Bob"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("phone"), convert::ToArray("0123456789"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
// Verify that each change is seen by the right watcher.
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
ASSERT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(1u, watcher1.changes_seen);
ledger::PageChangePtr change = std::move(watcher1.last_page_change_);
ASSERT_EQ(2u, change->changes.size());
EXPECT_EQ("city", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Paris", ToString(change->changes[0]->value));
EXPECT_EQ("name", convert::ToString(change->changes[1]->key));
EXPECT_EQ("Alice", ToString(change->changes[1]->value));
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
ASSERT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(1u, watcher2.changes_seen);
change = std::move(watcher2.last_page_change_);
ASSERT_EQ(2u, change->changes.size());
EXPECT_EQ("name", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Bob", ToString(change->changes[0]->value));
EXPECT_EQ("phone", convert::ToString(change->changes[1]->key));
EXPECT_EQ("0123456789", ToString(change->changes[1]->value));
ASSERT_FALSE(RunLoopWithTimeout());
ASSERT_FALSE(RunLoopWithTimeout());
// Each change is seen once, and by the correct watcher only.
EXPECT_EQ(2u, watcher1.changes_seen);
change = std::move(watcher1.last_page_change_);
ASSERT_EQ(2u, change->changes.size());
EXPECT_EQ("name", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Bob", ToString(change->changes[0]->value));
EXPECT_EQ("phone", convert::ToString(change->changes[1]->key));
EXPECT_EQ("0123456789", ToString(change->changes[1]->value));
EXPECT_EQ(2u, watcher2.changes_seen);
change = std::move(watcher2.last_page_change_);
ASSERT_EQ(1u, change->changes.size());
EXPECT_EQ("city", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Paris", ToString(change->changes[0]->value));
}
TEST_F(MergingIntegrationTest, MergingWithConflictResolutionFactory) {
auto instance = NewLedgerAppInstance();
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
// Set up a resolver configured not to resolve any conflicts.
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
auto resolver_factory = std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
resolver_factory->set_use_dummy_resolver(true);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
ledger::PageWatcherPtr watcher1_ptr;
Watcher watcher1(watcher1_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot1;
page1->GetSnapshot(
snapshot1.NewRequest(), nullptr, std::move(watcher1_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
ledger::PageWatcherPtr watcher2_ptr;
Watcher watcher2(watcher2_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot2;
page2->GetSnapshot(
snapshot2.NewRequest(), nullptr, std::move(watcher2_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Bob"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("phone"), convert::ToArray("0123456789"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
// Verify that each change is seen by the right watcher.
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(1u, watcher1.changes_seen);
ledger::PageChangePtr change = std::move(watcher1.last_page_change_);
ASSERT_EQ(2u, change->changes.size());
EXPECT_EQ("city", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Paris", ToString(change->changes[0]->value));
EXPECT_EQ("name", convert::ToString(change->changes[1]->key));
EXPECT_EQ("Alice", ToString(change->changes[1]->value));
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(1u, watcher2.changes_seen);
change = std::move(watcher2.last_page_change_);
ASSERT_EQ(2u, change->changes.size());
EXPECT_EQ("name", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Bob", ToString(change->changes[0]->value));
EXPECT_EQ("phone", convert::ToString(change->changes[1]->key));
EXPECT_EQ("0123456789", ToString(change->changes[1]->value));
EXPECT_TRUE(RunLoopWithTimeout());
EXPECT_EQ(1u, resolver_factory->get_policy_calls);
// Change the merge strategy, triggering resolution of the conflicts.
resolver_factory_ptr = nullptr; // Suppress misc-use-after-move.
resolver_factory = std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::LAST_ONE_WINS, resolver_factory_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_FALSE(RunLoopWithTimeout());
// Each change is seen once, and by the correct watcher only.
EXPECT_EQ(2u, watcher1.changes_seen);
change = std::move(watcher1.last_page_change_);
ASSERT_EQ(2u, change->changes.size());
EXPECT_EQ("name", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Bob", ToString(change->changes[0]->value));
EXPECT_EQ("phone", convert::ToString(change->changes[1]->key));
EXPECT_EQ("0123456789", ToString(change->changes[1]->value));
EXPECT_EQ(2u, watcher2.changes_seen);
change = std::move(watcher2.last_page_change_);
ASSERT_EQ(1u, change->changes.size());
EXPECT_EQ("city", convert::ToString(change->changes[0]->key));
EXPECT_EQ("Paris", ToString(change->changes[0]->value));
EXPECT_EQ(1u, resolver_factory->get_policy_calls);
}
TEST_F(MergingIntegrationTest, CustomConflictResolutionNoConflict) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("phone"), convert::ToArray("0123456789"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("email"), convert::ToArray("<EMAIL>"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
fidl::Array<ledger::DiffEntryPtr> changes;
ASSERT_TRUE(resolver_impl->requests[0].GetFullDiff(&changes));
EXPECT_EQ(4u, changes.size());
EXPECT_TRUE(ChangeMatch("city", Optional<std::string>(),
Optional<std::string>(),
Optional<std::string>("Paris"), changes[0]));
EXPECT_TRUE(ChangeMatch("email", Optional<std::string>(),
Optional<std::string>("<EMAIL>"),
Optional<std::string>(), changes[1]));
EXPECT_TRUE(ChangeMatch("name", Optional<std::string>(),
Optional<std::string>(),
Optional<std::string>("Alice"), changes[2]));
EXPECT_TRUE(ChangeMatch("phone", Optional<std::string>(),
Optional<std::string>("0123456789"),
Optional<std::string>(), changes[3]));
// Common ancestor is empty.
ledger::PageSnapshotPtr snapshot =
resolver_impl->requests[0].common_version.Bind();
fidl::Array<ledger::EntryPtr> entries =
SnapshotGetEntries(&snapshot, fidl::Array<uint8_t>());
EXPECT_EQ(0u, entries.size());
// Prepare the merged values
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("name");
merged_value->source = ledger::ValueSource::RIGHT;
merged_values.push_back(std::move(merged_value));
}
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("email");
merged_value->source = ledger::ValueSource::DELETE;
merged_values.push_back(std::move(merged_value));
}
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("pager");
merged_value->source = ledger::ValueSource::NEW;
ledger::BytesOrReferencePtr value = ledger::BytesOrReference::New();
value->set_bytes(convert::ToArray("<EMAIL>"));
merged_value->new_value = std::move(value);
merged_values.push_back(std::move(merged_value));
}
// Watch for the change.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot2;
page1->GetSnapshot(
snapshot2.NewRequest(), nullptr, std::move(watcher_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values)));
// Wait for the watcher to be called.
EXPECT_FALSE(RunLoopWithTimeout());
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(3u, final_entries.size());
EXPECT_EQ("name", convert::ExtendedStringView(final_entries[0]->key));
EXPECT_EQ("pager", convert::ExtendedStringView(final_entries[1]->key));
EXPECT_EQ("phone", convert::ExtendedStringView(final_entries[2]->key));
}
TEST_F(MergingIntegrationTest, CustomConflictResolutionGetDiffMultiPart) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
auto resolver_factory = std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(), nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
std::function<void(ledger::Status)> status_ok_callback =
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); };
ledger_ptr->SetConflictResolverFactory(std::move(resolver_factory_ptr),
status_ok_callback);
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(status_ok_callback);
EXPECT_TRUE(page1.WaitForResponse());
int N = 50;
std::vector<std::string> page1_keys;
for (int i = 0; i < N; ++i) {
page1_keys.push_back(fxl::StringPrintf("page1_key_%02d", i));
page1->Put(convert::ToArray(page1_keys.back()), convert::ToArray("value"),
status_ok_callback);
EXPECT_TRUE(page1.WaitForResponse());
}
page2->StartTransaction(status_ok_callback);
EXPECT_TRUE(page2.WaitForResponse());
std::vector<std::string> page2_keys;
for (int i = 0; i < N; ++i) {
page2_keys.push_back(fxl::StringPrintf("page2_key_%02d", i));
page2->Put(convert::ToArray(page2_keys.back()), convert::ToArray("value"),
status_ok_callback);
EXPECT_TRUE(page2.WaitForResponse());
}
page1->Commit(status_ok_callback);
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(status_ok_callback);
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
fidl::Array<ledger::DiffEntryPtr> changes;
ASSERT_TRUE(resolver_impl->requests[0].GetFullDiff(&changes, 1));
EXPECT_EQ(2u * N, changes.size());
// Keys are in order, so we expect to have all the page1_key_* keys before the
// page2_key_* keys.
for (int i = 0; i < N; ++i) {
// Left change is the most recent, so the one made on |page2|; right change
// comes from |page1|.
EXPECT_TRUE(ChangeMatch(page1_keys[i], Optional<std::string>(),
Optional<std::string>(),
Optional<std::string>("value"), changes[i]));
EXPECT_TRUE(ChangeMatch(page2_keys[i], Optional<std::string>(),
Optional<std::string>("value"),
Optional<std::string>(), changes[N + i]));
}
}
TEST_F(MergingIntegrationTest, CustomConflictResolutionClosingPipe) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Bob"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
EXPECT_EQ(1u, resolver_impl->requests.size());
// Kill the resolver
resolver_factory->resolvers.clear();
EXPECT_EQ(0u, resolver_factory->resolvers.size());
EXPECT_FALSE(RunLoopWithTimeout());
// We should ask again for a resolution.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
// Remove all references to a page:
page1 = nullptr;
page2 = nullptr;
EXPECT_TRUE(RunLoopWithTimeout(fxl::TimeDelta::FromMilliseconds(500)));
// Resolution should not crash the Ledger
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values)));
EXPECT_TRUE(RunLoopWithTimeout(fxl::TimeDelta::FromMilliseconds(200)));
}
TEST_F(MergingIntegrationTest, CustomConflictResolutionResetFactory) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Bob"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
EXPECT_FALSE(resolver_impl->disconnected);
EXPECT_EQ(1u, resolver_impl->requests.size());
// Change the factory.
ledger::ConflictResolverFactoryPtr resolver_factory_ptr2;
std::unique_ptr<TestConflictResolverFactory> resolver_factory2 =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr2.NewRequest(),
nullptr);
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr2),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
// Two runs of the loop: one for the conflict resolution request, one for the
// disconnect.
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_FALSE(RunLoopWithTimeout());
// The previous resolver should have been disconnected.
EXPECT_TRUE(resolver_impl->disconnected);
// It shouldn't have been called again.
EXPECT_EQ(1u, resolver_impl->requests.size());
// We should ask again for a resolution on a new resolver.
EXPECT_EQ(1u, resolver_factory2->resolvers.size());
ASSERT_NE(resolver_factory2->resolvers.end(),
resolver_factory2->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl2 =
&(resolver_factory2->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl2->requests.size());
// Remove all references to a page:
page1 = nullptr;
page2 = nullptr;
EXPECT_TRUE(RunLoopWithTimeout(fxl::TimeDelta::FromMilliseconds(500)));
// Resolution should not crash the Ledger
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
EXPECT_TRUE(resolver_impl2->requests[0].Merge(std::move(merged_values)));
EXPECT_TRUE(RunLoopWithTimeout(fxl::TimeDelta::FromMilliseconds(200)));
}
// Tests for a race between setting the new conflict resolver and sending the
// resolution request. Specifically, the resolution request must be sent to the
// new resolver, not the old one.
TEST_F(MergingIntegrationTest,
CustomConflictResolutionResetFactory_FactoryRace) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Bob"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
EXPECT_FALSE(resolver_impl->disconnected);
EXPECT_EQ(1u, resolver_impl->requests.size());
// Change the factory.
ledger::ConflictResolverFactoryPtr resolver_factory_ptr2;
std::unique_ptr<TestConflictResolverFactory> resolver_factory2 =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr2.NewRequest(),
nullptr, fxl::TimeDelta::FromMilliseconds(250));
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr2),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
// Two runs of the loop: one for the conflict resolution request, one for the
// disconnect.
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_FALSE(RunLoopWithTimeout());
// The previous resolver should have been disconnected.
EXPECT_TRUE(resolver_impl->disconnected);
// It shouldn't have been called again.
EXPECT_EQ(1u, resolver_impl->requests.size());
// We should ask again for a resolution on a new resolver.
EXPECT_EQ(1u, resolver_factory2->resolvers.size());
ASSERT_NE(resolver_factory2->resolvers.end(),
resolver_factory2->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl2 =
&(resolver_factory2->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl2->requests.size());
}
TEST_F(MergingIntegrationTest, CustomConflictResolutionMultipartMerge) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("email"), convert::ToArray("<EMAIL>"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
// Prepare the merged values
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("name");
merged_value->source = ledger::ValueSource::RIGHT;
merged_values.push_back(std::move(merged_value));
}
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("email");
merged_value->source = ledger::ValueSource::DELETE;
merged_values.push_back(std::move(merged_value));
}
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("pager");
merged_value->source = ledger::ValueSource::NEW;
ledger::BytesOrReferencePtr value = ledger::BytesOrReference::New();
value->set_bytes(convert::ToArray("<EMAIL>"));
merged_value->new_value = std::move(value);
merged_values.push_back(std::move(merged_value));
}
// Watch for the change.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot;
page1->GetSnapshot(
snapshot.NewRequest(), nullptr, std::move(watcher_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values),
MergeType::MULTIPART));
// Wait for the watcher to be called.
EXPECT_FALSE(RunLoopWithTimeout());
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(2u, final_entries.size());
EXPECT_EQ("name", convert::ExtendedStringView(final_entries[0]->key));
EXPECT_EQ("pager", convert::ExtendedStringView(final_entries[1]->key));
}
TEST_F(MergingIntegrationTest, AutoConflictResolutionNoConflict) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::AUTOMATIC_WITH_FALLBACK,
resolver_factory_ptr.NewRequest(), nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
// Watch for changes.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(),
[]() { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot2;
page1->GetSnapshot(
snapshot2.NewRequest(), nullptr, std::move(watcher_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("email"), convert::ToArray("<EMAIL>"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("phone"), convert::ToArray("0123456789"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We should have seen the first commit at this point.
EXPECT_EQ(1u, watcher.changes_seen);
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have an automatically-resolved conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
// We haven't been asked to resolve anything.
EXPECT_EQ(0u, resolver_impl->requests.size());
EXPECT_EQ(2u, watcher.changes_seen);
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(4u, final_entries.size());
EXPECT_EQ("city", convert::ExtendedStringView(final_entries[0]->key));
EXPECT_EQ("email", convert::ExtendedStringView(final_entries[1]->key));
EXPECT_EQ("name", convert::ExtendedStringView(final_entries[2]->key));
EXPECT_EQ("phone", convert::ExtendedStringView(final_entries[3]->key));
}
TEST_F(MergingIntegrationTest, AutoConflictResolutionWithConflict) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::AUTOMATIC_WITH_FALLBACK,
resolver_factory_ptr.NewRequest(), nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("city"), convert::ToArray("San Francisco"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
fidl::Array<ledger::DiffEntryPtr> changes;
ASSERT_TRUE(resolver_impl->requests[0].GetFullDiff(&changes));
EXPECT_EQ(2u, changes.size());
// Left change is the most recent, so the one made on |page2|.
EXPECT_TRUE(ChangeMatch("city", Optional<std::string>(),
Optional<std::string>("San Francisco"),
Optional<std::string>("Paris"), changes[0]));
EXPECT_TRUE(ChangeMatch("name", Optional<std::string>(),
Optional<std::string>("Alice"),
Optional<std::string>(), changes[1]));
// Common ancestor is empty.
ledger::PageSnapshotPtr snapshot =
resolver_impl->requests[0].common_version.Bind();
fidl::Array<ledger::EntryPtr> entries =
SnapshotGetEntries(&snapshot, fidl::Array<uint8_t>());
EXPECT_EQ(0u, entries.size());
// Prepare the merged values
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("city");
merged_value->source = ledger::ValueSource::RIGHT;
merged_values.push_back(std::move(merged_value));
}
// Watch for the change.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(),
[]() { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot2;
page1->GetSnapshot(
snapshot2.NewRequest(), nullptr, std::move(watcher_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values)));
// Wait for the watcher to be called.
EXPECT_FALSE(RunLoopWithTimeout());
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(2u, final_entries.size());
EXPECT_EQ("city", convert::ExtendedStringView(final_entries[0]->key));
EXPECT_EQ("name", convert::ExtendedStringView(final_entries[1]->key));
}
TEST_F(MergingIntegrationTest, AutoConflictResolutionMultipartMerge) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::AUTOMATIC_WITH_FALLBACK,
resolver_factory_ptr.NewRequest(), nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("city"), convert::ToArray("San Francisco"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
// Prepare the merged values
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("city");
merged_value->source = ledger::ValueSource::RIGHT;
merged_values.push_back(std::move(merged_value));
}
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("previous_city");
merged_value->source = ledger::ValueSource::NEW;
merged_value->new_value = ledger::BytesOrReference::New();
merged_value->new_value->set_bytes(convert::ToArray("San Francisco"));
merged_values.push_back(std::move(merged_value));
}
// Watch for the change.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(),
[]() { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot;
page1->GetSnapshot(
snapshot.NewRequest(), nullptr, std::move(watcher_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values),
MergeType::MULTIPART));
// Wait for the watcher to be called.
EXPECT_FALSE(RunLoopWithTimeout());
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(3u, final_entries.size());
EXPECT_EQ("city", convert::ExtendedStringView(final_entries[0]->key));
EXPECT_EQ("name", convert::ExtendedStringView(final_entries[1]->key));
EXPECT_EQ("previous_city",
convert::ExtendedStringView(final_entries[2]->key));
}
// Tests a merge in which the right side contains no change (e.g. a change was
// made in a commit, then reverted in another commit).
TEST_F(MergingIntegrationTest, AutoConflictResolutionNoRightChange) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::AUTOMATIC_WITH_FALLBACK,
resolver_factory_ptr.NewRequest(), nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger::Status status;
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId(callback::Capture(MakeQuitTask(), &test_page_id));
EXPECT_FALSE(RunLoopWithTimeout());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
// Watch for changes.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(), MakeQuitTask());
ledger::PageSnapshotPtr snapshot1;
page1->GetSnapshot(snapshot1.NewRequest(), nullptr, std::move(watcher_ptr),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->StartTransaction(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page2->StartTransaction(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->Put(convert::ToArray("name"), convert::ToArray("Alice"),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->Commit(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
// We should have seen the first commit of page 1.
EXPECT_EQ(1u, watcher.changes_seen);
page1->StartTransaction(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->Delete(convert::ToArray("name"),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->Commit(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
// We should have seen the second commit of page 1.
EXPECT_EQ(2u, watcher.changes_seen);
page2->Put(convert::ToArray("email"), convert::ToArray("<EMAIL>"),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page2->Commit(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
EXPECT_FALSE(RunLoopWithTimeout());
// We now have an automatically-resolved conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
ASSERT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
// We haven't been asked to resolve anything.
EXPECT_EQ(0u, resolver_impl->requests.size());
EXPECT_EQ(3u, watcher.changes_seen);
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(1u, final_entries.size());
EXPECT_EQ("email", convert::ExtendedStringView(final_entries[0]->key));
}
TEST_F(MergingIntegrationTest, DeleteDuringConflictResolution) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId(callback::Capture(MakeQuitTask(), &test_page_id));
EXPECT_FALSE(RunLoopWithTimeout());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
ledger::Status status = ledger::Status::UNKNOWN_ERROR;
page1->StartTransaction(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->Put(convert::ToArray("name"), convert::ToArray("Alice"),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page2->StartTransaction(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page2->Put(convert::ToArray("name"), convert::ToArray("Bob"),
callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page1->Commit(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
page2->Commit(callback::Capture(MakeQuitTask(), &status));
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_EQ(ledger::Status::OK, status);
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
instance->DeletePage(test_page_id, ledger::Status::OK);
EXPECT_FALSE(resolver_impl->requests[0].Merge(
fidl::Array<ledger::MergedValuePtr>::New(0)));
}
TEST_F(MergingIntegrationTest, WaitForCustomMerge) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
// Create a conflict: two pointers to the same page.
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
// Parallel put in transactions.
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("email"), convert::ToArray("<EMAIL>"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// Check that we have a resolver and pending conflict resolution request.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
// Try to wait for conflicts resolution.
bool merged = false;
bool conflicts_resolved_callback_called = false;
ledger::ConflictResolutionWaitStatus wait_status;
auto conflicts_resolved_callback = [&merged,
&conflicts_resolved_callback_called]() {
conflicts_resolved_callback_called = true;
EXPECT_TRUE(merged);
fsl::MessageLoop::GetCurrent()->PostQuitTask();
};
page1->WaitForConflictResolution(
callback::Capture(conflicts_resolved_callback, &wait_status));
// Check that conflicts_resolved_callback is not called, as there are merge
// requests pending.
EXPECT_TRUE(RunLoopWithTimeout(fxl::TimeDelta::FromMilliseconds(250)));
EXPECT_FALSE(conflicts_resolved_callback_called);
// Merge manually.
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values),
MergeType::SIMPLE));
merged = true;
// Now conflict_resolved_callback can run.
EXPECT_FALSE(RunLoopWithTimeout());
EXPECT_TRUE(conflicts_resolved_callback_called);
EXPECT_EQ(ledger::ConflictResolutionWaitStatus::CONFLICTS_RESOLVED,
wait_status);
}
TEST_F(MergingIntegrationTest, CustomConflictResolutionConflictingMerge) {
auto instance = NewLedgerAppInstance();
ledger::ConflictResolverFactoryPtr resolver_factory_ptr;
std::unique_ptr<TestConflictResolverFactory> resolver_factory =
std::make_unique<TestConflictResolverFactory>(
ledger::MergePolicy::CUSTOM, resolver_factory_ptr.NewRequest(),
nullptr);
ledger::LedgerPtr ledger_ptr = instance->GetTestLedger();
ledger_ptr->SetConflictResolverFactory(
std::move(resolver_factory_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(ledger_ptr.WaitForResponse());
ledger::PagePtr page1 = instance->GetTestPage();
fidl::Array<uint8_t> test_page_id;
page1->GetId([&test_page_id](fidl::Array<uint8_t> page_id) {
test_page_id = std::move(page_id);
});
EXPECT_TRUE(page1.WaitForResponse());
ledger::PagePtr page2 = instance->GetPage(test_page_id, ledger::Status::OK);
page1->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("name"), convert::ToArray("Alice"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page1->Put(
convert::ToArray("city"), convert::ToArray("Paris"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->StartTransaction(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("name"), convert::ToArray("Bob"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page2->Put(
convert::ToArray("phone"), convert::ToArray("0123456789"),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
page1->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
page2->Commit(
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page2.WaitForResponse());
EXPECT_FALSE(RunLoopWithTimeout());
// We now have a conflict.
EXPECT_EQ(1u, resolver_factory->resolvers.size());
EXPECT_NE(resolver_factory->resolvers.end(),
resolver_factory->resolvers.find(convert::ToString(test_page_id)));
ConflictResolverImpl* resolver_impl =
&(resolver_factory->resolvers.find(convert::ToString(test_page_id))
->second);
ASSERT_EQ(1u, resolver_impl->requests.size());
fidl::Array<ledger::DiffEntryPtr> changes;
ASSERT_TRUE(resolver_impl->requests[0].GetConflictingDiff(&changes));
EXPECT_EQ(1u, changes.size());
EXPECT_TRUE(ChangeMatch("name", Optional<std::string>(),
Optional<std::string>("Bob"),
Optional<std::string>("Alice"), changes[0]));
// Prepare the merged values
fidl::Array<ledger::MergedValuePtr> merged_values =
fidl::Array<ledger::MergedValuePtr>::New(0);
{
ledger::MergedValuePtr merged_value = ledger::MergedValue::New();
merged_value->key = convert::ToArray("name");
merged_value->source = ledger::ValueSource::RIGHT;
merged_values.push_back(std::move(merged_value));
}
ASSERT_TRUE(resolver_impl->requests[0].MergeNonConflictingEntries());
// Watch for the change.
ledger::PageWatcherPtr watcher_ptr;
Watcher watcher(watcher_ptr.NewRequest(),
[] { fsl::MessageLoop::GetCurrent()->PostQuitTask(); });
ledger::PageSnapshotPtr snapshot2;
page1->GetSnapshot(
snapshot2.NewRequest(), nullptr, std::move(watcher_ptr),
[](ledger::Status status) { EXPECT_EQ(ledger::Status::OK, status); });
EXPECT_TRUE(page1.WaitForResponse());
EXPECT_TRUE(resolver_impl->requests[0].Merge(std::move(merged_values)));
// Wait for the watcher to be called.
EXPECT_FALSE(RunLoopWithTimeout());
fidl::Array<ledger::EntryPtr> final_entries =
SnapshotGetEntries(&watcher.last_snapshot_, fidl::Array<uint8_t>());
ASSERT_EQ(3u, final_entries.size());
EXPECT_EQ("city", convert::ExtendedStringView(final_entries[0]->key));
EXPECT_EQ("Paris", ToString(final_entries[0]->value));
EXPECT_EQ("name", convert::ExtendedStringView(final_entries[1]->key));
EXPECT_EQ("Alice", ToString(final_entries[1]->value));
EXPECT_EQ("phone", convert::ExtendedStringView(final_entries[2]->key));
EXPECT_EQ("0123456789", ToString(final_entries[2]->value));
}
} // namespace
} // namespace integration
} // namespace test
|
lilsweetcaligula/MIT6.00.1x | Puzzles/checkio/Home/Median/mySolution.py | <filename>Puzzles/checkio/Home/Median/mySolution.py<gh_stars>0
def findAv(*args):
return sum(args)/len(args)
def checkio(data):
data.sort()
if ( len(data) % 2 ) == 0:
return findAv( float(data[ len(data) / 2 ]), float(data[ len(data) / 2 - 1 ]) )
return data[ len(data) / 2 ]
|
SenthilKumarGS/TizenRT | external/include/libcxx/wctype.h | /****************************************************************************
*
* Copyright 2018 Samsung Electronics All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*
****************************************************************************/
// -*- C++ -*-
//===--------------------------- wctype.h ---------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef _LIBCPP_WCTYPE_H
#define _LIBCPP_WCTYPE_H
/*
wctype.h synopsis
Macros:
WEOF
Types:
wint_t
wctrans_t
wctype_t
int iswalnum(wint_t wc);
int iswalpha(wint_t wc);
int iswblank(wint_t wc); // C99
int iswcntrl(wint_t wc);
int iswdigit(wint_t wc);
int iswgraph(wint_t wc);
int iswlower(wint_t wc);
int iswprint(wint_t wc);
int iswpunct(wint_t wc);
int iswspace(wint_t wc);
int iswupper(wint_t wc);
int iswxdigit(wint_t wc);
int iswctype(wint_t wc, wctype_t desc);
wctype_t wctype(const char* property);
wint_t towlower(wint_t wc);
wint_t towupper(wint_t wc);
wint_t towctrans(wint_t wc, wctrans_t desc);
wctrans_t wctrans(const char* property);
*/
#include <__config>
#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
#pragma GCC system_header
#endif
#include_next <wctype.h>
#ifdef __cplusplus
#undef iswalnum
#undef iswalpha
#undef iswblank
#undef iswcntrl
#undef iswdigit
#undef iswgraph
#undef iswlower
#undef iswprint
#undef iswpunct
#undef iswspace
#undef iswupper
#undef iswxdigit
#undef iswctype
#undef wctype
#undef towlower
#undef towupper
#undef towctrans
#undef wctrans
#endif // __cplusplus
#endif // _LIBCPP_WCTYPE_H
|
TheBlankArrays/Scrapinit | server/specs/addUrlSpecs.js | <reponame>TheBlankArrays/Scrapinit
var serverHost = 'http://localhost:3000';
var should = require('should');
var assert = require('assert');
var supertest = require('supertest');
var request = supertest(serverHost);
var Sequelize = require('sequelize');
var db = require('../dbConfig');
var bcrypt = require('bcrypt');
var utils = {
testUser: {
email: "<EMAIL>",
password: '<PASSWORD>'
},
newUrl: {
url: 'http://www.google.com',
urlImg: '../client/test_assets/test/google_com.png',
crop: {
x: 2,
y: 2,
w: 1,
h: 1
},
urlType: 'text',
freq: '5 min',
filter: 'greate than',
compareVal: 'string',
stopOnTrig: false
},
createAgent: function(server) {
var server = server || serverHost
return supertest.agent(server);
},
signUpUser: function (credentials, callback) {
request.post('/api/users/signup')
.send(credentials)
.end(function(err, res) {
if (callback) {
if (err) {
callback(err);
}
else {
callback();
}
}
});
},
destroyUser: function (schema, credentials, callback) {
schema.find({where: {email: this.testUser.email}})
.then(function(foundUser) {
if (foundUser) {
foundUser.destroy().then(function() {
callback();
});
}
else {
callback();
}
});
},
logInAgent: function (agent, credentials, callback) {
agent.post('/api/users/login')
.send(credentials)
.end(function(err, res) {
if (callback) {
if (err) {
callback(err);
}
else {
//user object can be accessed in callback
callback(res.body);
}
}
});
},
logOutAgent: function (agent, callback) {
agent.get('/api/users/logout')
.end(function(err, res) {
if (callback) {
if (err) {
callback(err);
}
else {
callback();
}
}
});
}
};
describe('API add url', function () {
var sequelize = db.connect('../db/db.sqlite');
//pass the the second parameter as false so that the function does not execute sync()
var schemas = db.createSchemas(sequelize,false);
//schemas that will be used to execute queries
var User = schemas.User;
var Url = schemas.Url;
var UserUrl = schemas.UserUrl;
before(function (done) {
utils.signUpUser(utils.testUser, function (err) {
done();
});
});
after(function (done) {
utils.destroyUser(User, utils.testUser, function () {
done();
});
});
describe('POST URL', function () {
it('Return 401 if user is not logged', function (done) {
request.post('/api/users/url')
.send(utils.newUrl)
.end(function (err, res) {
res.status.should.be.equal(401);
done();
});
});
it('Return 400 if user is logged but the URL parameters without url property', function (done) {
var agent = utils.createAgent();
var badUrl = {
url: '',
urlImg: '../client/assets/test/www.google.com.jpg',
crop: {
x: 2,
y: 2,
w: 1,
h: 1
}
};
utils.logInAgent(agent, utils.testUser, function (user) {
agent.post('/api/users/url')
.send(badUrl)
.end(function (err, res) {
res.status.should.be.equal(400);
done();
});
});
});
it('Return 400 if user is logged but the URL parameters without urlImage property', function (done) {
var agent = utils.createAgent();
var badUrl = {
url: 'http://www.google.com',
urlImg: '',
crop: {
x: 2,
y: 2,
w: 1,
h: 1
}
};
utils.logInAgent(agent, utils.testUser, function (user) {
agent.post('/api/users/url')
.send(badUrl)
.end(function (err, res) {
res.status.should.be.equal(400);
done();
});
});
});
it('Return 400 if user is logged but the URL parameters without crop property', function (done) {
var agent = utils.createAgent();
var badUrl = {
url: 'http://www.google.com',
urlImg: '../client/assets/test/www.google.com.jpg',
crop: {}
};
utils.logInAgent(agent, utils.testUser, function (user) {
agent.post('/api/users/url')
.send(badUrl)
.end(function (err, res) {
res.status.should.be.equal(400);
done();
});
});
});
it('Return 400 if user is logged but the URL parameters without freq', function (done) {
var agent = utils.createAgent();
var badUrl = {
url: 'http://www.google.com',
urlImg: '../client/test_assets/test/google_com.png',
crop: {
x: 2,
y: 2,
w: 1,
h: 1
},
urlType: 'string',
filter: 'greate than',
compareVal: 'string',
stopOnTrig: false
};
utils.logInAgent(agent, utils.testUser, function (user) {
agent.post('/api/users/url')
.send(badUrl)
.end(function (err, res) {
res.status.should.be.equal(400);
done();
});
});
});
it('Return 201 if user is logged and the parameters is good', function (done) {
var agent = utils.createAgent();
utils.logInAgent(agent, utils.testUser, function (user) {
agent.post('/api/users/url')
.send(utils.newUrl)
.end(function (err, res) {
res.status.should.be.equal(201);
var url = res.body;
url.should.have.property('UserUrl').and.should.be.an.Object;
url.should.have.property('url').and.should.be.a.String;
url.should.have.property('id').and.should.be.an.Number;
url.UserUrl.cropImage.should.be.a.String;
url.UserUrl.should.have.property('status').and.should.be.a.Boolean;
url.UserUrl.frequency.should.be.a.Number;
done();
});
});
});
});
});
|
codyseibert/ef-cms | web-client/src/presenter/actions/setViewerCorrespondenceToDisplayAction.test.js | <filename>web-client/src/presenter/actions/setViewerCorrespondenceToDisplayAction.test.js
import { applicationContextForClient as applicationContext } from '../../../../shared/src/business/test/createTestApplicationContext';
import { presenter } from '../presenter-mock';
import { runAction } from 'cerebral/test';
import { setViewerCorrespondenceToDisplayAction } from './setViewerCorrespondenceToDisplayAction';
describe('setViewerCorrespondenceToDisplayAction', () => {
beforeAll(() => {
applicationContext
.getUseCases()
.getDocumentDownloadUrlInteractor.mockReturnValue({
url: 'www.example.com',
});
presenter.providers.applicationContext = applicationContext;
});
it('sets the viewerCorrespondenceToDisplay from props on state and sets the iframeSrc url from the return from the use case', async () => {
const result = await runAction(setViewerCorrespondenceToDisplayAction, {
modules: {
presenter,
},
props: {
viewerCorrespondenceToDisplay: { correspondenceId: '1234' },
},
state: {
caseDetail: {
docketNumber: '123-45',
},
viewerCorrespondenceToDisplay: null,
},
});
expect(result.state.viewerCorrespondenceToDisplay).toEqual({
correspondenceId: '1234',
});
expect(result.state.iframeSrc).toEqual('www.example.com');
});
it('does not set iframeSrc if props.viewerCorrespondenceToDisplay is null', async () => {
const result = await runAction(setViewerCorrespondenceToDisplayAction, {
modules: {
presenter,
},
props: {
viewerCorrespondenceToDisplay: null,
},
state: {
caseDetail: {
docketNumber: '123-45',
},
viewerCorrespondenceToDisplay: null,
},
});
expect(result.state.iframeSrc).toBeUndefined();
});
});
|
Jiyong-GAL/NodeShopping | testSchema.js | <reponame>Jiyong-GAL/NodeShopping<filename>testSchema.js
var mongoose = require('mongoose');
var testSchema = new mongoose.Schema({
connectIP:{
type : String
,required :true
}
,connectTime:String
});
mongoose.model('test2',testSchema);
module.exports = mongoose.model('test2'); |
hakandrmz/CampNotes | Hafta22OOP/src/oop2/Main.java | <reponame>hakandrmz/CampNotes
package oop2;
public class Main {
public static void main(String[] args) {
Customer corporateCustomer1 = new CorporateCustomer(1,"123456","<EMAIL>","Turkcell","1232123");
IndividualCustomer individualCustomer = new IndividualCustomer(1,"123456","<EMAIL>","Turkcell","1232123","123123");
Customer[] customers = {corporateCustomer1,individualCustomer};
IndividualCustomerManager individualCustomerManager = new IndividualCustomerManager();
individualCustomerManager.add(individualCustomer);
}
}
|
SIIS-cloud/pileus | libvirt/pileus-libvirt-1.2.12/src/esx/esx_nwfilter_driver.h | /*
* esx_nwfilter_driver.h: nwfilter driver functions for managing VMware ESX
* firewall rules
*
* Copyright (C) 2010 <NAME> <<EMAIL>>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#ifndef __ESX_NWFILTER_DRIVER_H__
# define __ESX_NWFILTER_DRIVER_H__
int esxNWFilterRegister(void);
#endif /* __ESX_NWFILTER_DRIVER_H__ */
|
yufan022/tidb | expression/distsql_builtin_serial_test.go | <filename>expression/distsql_builtin_serial_test.go<gh_stars>1-10
// Copyright 2021 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package expression
import (
"testing"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/sessionctx/stmtctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/collate"
"github.com/pingcap/tipb/go-tipb"
"github.com/stretchr/testify/require"
)
func TestPBToExprWithNewCollation(t *testing.T) {
sc := new(stmtctx.StatementContext)
fieldTps := make([]*types.FieldType, 1)
cases := []struct {
name string
expName string
id int32
pbID int32
}{
{"utf8_general_ci", "utf8_general_ci", 33, 33},
{"UTF8MB4_BIN", "utf8mb4_bin", 46, 46},
{"utf8mb4_bin", "utf8mb4_bin", 46, 46},
{"utf8mb4_general_ci", "utf8mb4_general_ci", 45, 45},
{"", "utf8mb4_bin", 46, 46},
{"some_error_collation", "utf8mb4_bin", 46, 46},
{"utf8_unicode_ci", "utf8_unicode_ci", 192, 192},
{"utf8mb4_unicode_ci", "utf8mb4_unicode_ci", 224, 224},
{"utf8mb4_zh_pinyin_tidb_as_cs", "utf8mb4_zh_pinyin_tidb_as_cs", 2048, 2048},
}
for _, cs := range cases {
ft := types.NewFieldType(mysql.TypeString)
ft.Collate = cs.name
expr := new(tipb.Expr)
expr.Tp = tipb.ExprType_String
expr.FieldType = toPBFieldType(ft)
require.Equal(t, cs.pbID, expr.FieldType.Collate)
e, err := PBToExpr(expr, fieldTps, sc)
require.NoError(t, err)
cons, ok := e.(*Constant)
require.True(t, ok)
require.Equal(t, cs.expName, cons.Value.Collation())
}
collate.SetNewCollationEnabledForTest(true)
defer collate.SetNewCollationEnabledForTest(false)
for _, cs := range cases {
ft := types.NewFieldType(mysql.TypeString)
ft.Collate = cs.name
expr := new(tipb.Expr)
expr.Tp = tipb.ExprType_String
expr.FieldType = toPBFieldType(ft)
require.Equal(t, -cs.pbID, expr.FieldType.Collate)
e, err := PBToExpr(expr, fieldTps, sc)
require.NoError(t, err)
cons, ok := e.(*Constant)
require.True(t, ok)
require.Equal(t, cs.expName, cons.Value.Collation())
}
}
|
lieff/lvg | tests/swf/trace/netstream-onmetadata.c | /* gcc `pkg-config --libs --cflags libming` netstream-onmetadata.c -o netstream-onmetadata && ./netstream-onmetadata
*/
#include <ming.h>
int
main (int argc, char **argv)
{
SWFMovie movie;
SWFVideoStream video;
SWFDisplayItem item;
SWFAction action;
if (Ming_init ())
return 1;
Ming_useSWFVersion (7);
movie = newSWFMovie();
SWFMovie_setRate (movie, 1);
SWFMovie_setDimension (movie, 200, 150);
video = newSWFVideoStream ();
SWFVideoStream_setDimension (video, 200, 150);
item = SWFMovie_add (movie, (SWFBlock) video);
SWFDisplayItem_setName (item, "video");
action = compileSWFActionCode (""
"trace (\"Test parsing of onMetaData\");"
"nc = new NetConnection ();"
"nc.connect (null);"
"ns = new NetStream (nc);"
"ns.onMetaData = function (info)"
"{"
" trace ('onMetaData');"
" var props = [];"
" for (var prop in info) {"
" props.push (prop);"
" }"
" props.sort ();"
" for (var i = 0; i < props.length; i++) {"
" if (typeof (info[props[i]]) == 'object') {"
" var keys = [];"
" for (var key in info[props[i]]) {"
" keys.push (key);"
" }"
" keys.sort ();"
" trace (props[i] + ' = ');"
" for (var j = 0; j < keys.length; j++) {"
" trace (' ' + keys[j] + ' = ' + typeof (info[props[i]][keys[j]]) + ': ' + info[props[i]][keys[j]]);"
" }"
" } else {"
" trace (props[i] + ' = ' + typeof (info[props[i]]) + ': ' + info[props[i]]);"
" }"
" }"
" loadMovie ('FSCommand:quit', '');"
"};"
"video.attachVideo (ns);"
"ns.setBufferTime (5);"
"trace (\"Calling play\");"
"ns.play (\"video.flv\");"
"trace (\"done calling play\");"
"");
SWFMovie_add (movie, (SWFBlock) action);
SWFMovie_save (movie, "netstream-onmetadata.swf");
return 0;
}
|
murilopolese/learning-platform-gatsby | src/utils/jsonToReact.js | import React from 'react'
import { Container, Grid, Box } from '@material-ui/core'
import Palette from '../components/palette'
import Typography from '../components/typography'
import Card from '../components/card'
import Button from '../components/button'
import Youtube from '../components/youtubedisplay'
import ImageDisplay from '../components/imagedisplay'
import Gallery from '../components/gallery'
import categoryColors from './categoryColors'
import createMarkup from '../utils/createMarkup'
import { querySelectorAll, querySelector, contains } from './jsonSelector'
const resolveColorFromClass = (el, prefix, fallback = 'transparent') => {
const color = getColorNameFromClass(el, prefix)
if (Palette[color]) {
return Palette[color]
}
return fallback
}
const getColorNameFromClass = (el, prefix, fallback) => {
let color
for (let i = 0; i < el.classList.length; i++) {
const c = el.classList[i]
if (c.indexOf(prefix) === 0) {
color = c.replace(prefix, '')
break
}
}
return color || fallback
}
const jsonToReact = (el, posts, key) => {
switch(el.tagName.toLowerCase()) {
case 'body':
return el.children.map((child, i) => jsonToReact(child, posts, i))
case 'iframe':
case 'figure':
case 'div':
return <MySection el={el} posts={posts} key={key}/>
case 'img':
return (
<ImageDisplay src={el.src} alt={el.alt} />
)
case 'a':
return (
<TypographyWrap el={el} key={key}>
<a href={el.href} dangerouslySetInnerHTML={{__html:el.innerHTML}}></a>
</TypographyWrap>
)
case 'h1':
return (
<TypographyWrap el={el} key={key}>
<h1 dangerouslySetInnerHTML={{__html:el.innerHTML}}></h1>
</TypographyWrap>
)
case 'h2':
return (
<TypographyWrap el={el} key={key}>
<h2 dangerouslySetInnerHTML={{__html:el.innerHTML}}></h2>
</TypographyWrap>
)
case 'h3':
return (
<TypographyWrap el={el} key={key}>
<h3 dangerouslySetInnerHTML={{__html:el.innerHTML}}></h3>
</TypographyWrap>
)
case 'h4':
return (
<TypographyWrap el={el} key={key}>
<h4 dangerouslySetInnerHTML={{__html:el.innerHTML}}></h4>
</TypographyWrap>
)
case 'h5':
return (
<TypographyWrap el={el} key={key}>
<h5 dangerouslySetInnerHTML={{__html:el.innerHTML}}></h5>
</TypographyWrap>
)
case 'p':
if (el.innerHTML) {
return (
<TypographyWrap el={el} key={key}>
<p dangerouslySetInnerHTML={{__html:el.innerHTML}}></p>
</TypographyWrap>
)
} else { return null }
case 'ul':
return (
<TypographyWrap el={el} key={key}>
<ul dangerouslySetInnerHTML={{__html:el.innerHTML}}></ul>
</TypographyWrap>
)
case 'ol':
return (
<TypographyWrap el={el} key={key}>
<ol dangerouslySetInnerHTML={{__html:el.innerHTML}}></ol>
</TypographyWrap>
)
default:
// return <div>{el.tagName} {el.className} {el.innerHTML}</div>
return null
}
}
const TypographyWrap = ({children, el}) => {
let align = 'left'
if (contains(el.classList, 'has-text-align-center')) {
align = 'center'
} else if (contains(el.classList, 'has-text-align-right')) {
align = 'right'
}
const bgcolor = resolveColorFromClass(el, 'bgcolor-')
const textcolor = resolveColorFromClass(el, 'textcolor-', Palette.black)
return (
<Box bgcolor={bgcolor} color={textcolor}>
<Container maxWidth='md' align={align}>
<Typography>
{children}
</Typography>
</Container>
</Box>
)
}
const MySection = ({ el, posts }) => {
let bgcolor = resolveColorFromClass(el, 'bgcolor-', 'transparent')
let textcolor = resolveColorFromClass(el, 'textcolor-', Palette.black)
// Youtube display
if (el.tagName.toLowerCase() === 'iframe') {
return (
<Box py={3} m={0} bgcolor={bgcolor}>
<Container maxWidth="md">
<Box>
<Youtube url={el.src} />
</Box>
</Container>
</Box>
)
}
// Single image
if (contains(el.classList, 'wp-block-image')) {
let image = querySelector(el, 'img')
let caption = querySelector(el, 'figcaption')
return (
<Box py={3} m={0} bgcolor={bgcolor}>
<Container maxWidth="md">
<Box>
<ImageDisplay src={image.src} alt={caption.innerHTML} />
</Box>
</Container>
</Box>
)
}
// Gallery
if (contains(el.classList, 'wp-block-gallery')) {
let figures = querySelectorAll(el, '.blocks-gallery-item')
return (
<Box py={3} m={0} bgcolor={bgcolor}>
<Container maxWidth="md">
<Box>
<Gallery>
{figures.map((figure) => {
let image = querySelector(figure, 'img')
let caption = querySelector(figure, 'figcaption')
return <ImageDisplay src={image.src} alt={caption.innerHTML} />
})}
</Gallery>
</Box>
</Container>
</Box>
)
}
// Related posts container
if (contains(el.classList, 'wp-block-strawbees-learning-related')) {
return (
<Box py={3} m={0} bgcolor={bgcolor}>
<Container maxWidth="lg">
<Box>
<Grid container spacing={3} direction="row" wrap="wrap" justify="center">
{el.children.map((child, i) => jsonToReact(child, posts, i))}
</Grid>
</Box>
</Container>
</Box>
)
}
// Related post item/card
if (contains(el.classList, 'related-post')) {
let id = querySelector(el, '.id')
let post = posts.find(p => p.id === parseInt(id.innerText))
if (!post) return null
return (
<Grid item xs={12} sm={6} md={4}>
<a href={post.path}>
<Card hover
labelText={post.category}
labelBgcolor={categoryColors[post.category]}
image={post.thumbnail}
tags={post.tags}>
<Box px={3} py={3} pb={4}>
<Typography variant="card-h1">
{post.title}
</Typography>
<Box pb={1} />
<Typography variant="card-body">
<div dangerouslySetInnerHTML={createMarkup(post.description)} />
</Typography>
</Box>
</Card>
</a>
</Grid>
)
}
// Horizontal section
if (contains(el.classList, 'wp-block-strawbees-learning-horizontal')) {
let resolvedBgcolor = Palette.lightGrey
if (getColorNameFromClass(el, 'bgcolor-')) {
resolvedBgcolor = bgcolor
}
return (
<Box className="horizontal" py={3} m={0} bgcolor={resolvedBgcolor}>
<Container maxWidth="md" align='center'>
{el.children.map((child, i) => jsonToReact(child, posts, i))}
</Container>
</Box>
)
}
// File block (Download)
if (contains(el.classList, 'wp-block-file')) {
let download = el.children[0]
bgcolor = resolveColorFromClass(el, 'bgcolor-', Palette.white)
return (
<Box p={1} display="inline-block">
<a href={download.href} target="_blank" rel="noreferrer noopener">
<Button
icon="download"
foregroundColor={textcolor}
backgroundColor={bgcolor}>
{download.innerText}
</Button>
</a>
</Box>
)
}
// Button block
if (contains(el.classList, 'wp-block-buttons')) {
let align = 'center'
if (contains(el.classList, 'alignleft')) {
align = 'left'
} else if (contains(el.classList, 'alignright')) {
align = 'right'
}
return (
<Container maxWidth="md" align={align}>
{el.children.map((child, i) => jsonToReact(child, posts, i))}
</Container>
)
}
if (contains(el.classList, 'wp-block-button')) {
let download = el.children[0]
bgcolor = resolveColorFromClass(el, 'bgcolor-', Palette.white)
return (
<Box px={1} component="span">
<a href={download.href} target={download.target} rel="noreferrer noopener">
<Button
outline={contains(el.classList, 'is-style-outline')}
foregroundColor={textcolor}
backgroundColor={bgcolor}>
<span dangerouslySetInnerHTML={{__html: download.innerHTML}} />
</Button>
</a>
</Box>
)
}
// generic container
if (el.children) {
return <div className={el.className}>{el.children.map((child, i) => jsonToReact(child, null, i))}</div>
}
// In doubt, put in a div
return <div className={el.className} dangerouslySetInnerHTML={{__html:el.innerHTML}}></div>
}
export {
jsonToReact
}
|
xxxmian/mediator-time | src/main/java/org/fmgroup/mediator/language/statement/PauseStatement.java | package org.fmgroup.mediator.language.statement;
import org.antlr.v4.runtime.ParserRuleContext;
import org.fmgroup.mediator.language.RawElement;
import org.fmgroup.mediator.language.ValidationException;
import org.fmgroup.mediator.language.generated.MediatorLangParser;
import org.fmgroup.mediator.language.term.NullValue;
import org.fmgroup.mediator.language.term.Term;
import org.fmgroup.mediator.language.type.Type;
import java.util.Map;
public class PauseStatement implements Statement {
private RawElement parent;
private Term pauseValue;
public Term getPauseValue() {
return pauseValue;
}
public PauseStatement setPauseValue(Term pauseValue) {
this.pauseValue = pauseValue;
pauseValue.setParent(this);
return this;
}
@Override
public PauseStatement fromContext(ParserRuleContext context, RawElement parent) throws ValidationException {
if (!(context instanceof MediatorLangParser.PauseStatementContext)) {
throw ValidationException.IncompatibleContextType(this.getClass(), "PauseStatementContext", context.toString());
}
setParent(parent);
setPauseValue(Term.parse(((MediatorLangParser.PauseStatementContext) context).term(), this));
return this;
}
@Override
public boolean equals(Object obj) {
return
this.toString().equals(obj.toString()) &&
obj instanceof Statement;
}
@Override
public String toString() {
return "pause " + pauseValue.toString() + ";";
}
@Override
public RawElement getParent() {
return parent;
}
@Override
public RawElement setParent(RawElement parent) {
this.parent = parent;
return this;
}
@Override
public RawElement copy(RawElement parent) throws ValidationException {
PauseStatement nrs = new PauseStatement();
nrs.setParent(parent);
nrs.setPauseValue(getPauseValue().copy(nrs));
return nrs;
}
@Override
public Statement refactor(Map<String, Type> typeRewriteMap, Map<String, Term> termRewriteMap) throws ValidationException {
setPauseValue(getPauseValue().refactor(typeRewriteMap, termRewriteMap));
return this;
}
}
|
MOPCON/App-Android | src/containers/MissionDetail/QRCode.js | <gh_stars>1-10
import React, { Component } from 'react';
import { Text } from 'react-native';
import I18n from '../../locales';
import { MISSION_STATUS, Consumer } from '../../store';
import iconCoinImg from '../../images/icon/iconCoin.png';
import * as Style from './style';
@Consumer('missionStore')
export default class QRCode extends Component {
handleSubmit = (task) => {
this.props.navigation.navigate('QRCode', { task });
}
render() {
const { id } = this.props;
const { quizs } = this.props.context.missionStore;
const task = quizs.find(o => o.id === id);
return (
<Style.QRCodeContainer>
<Style.CardView>
<Style.CardImg
source={{ uri: task.banner_url }}
/>
</Style.CardView>
<Style.QRCodeTitle>{task.title}</Style.QRCodeTitle>
<Style.QRCodeText>{task.description}</Style.QRCodeText>
{
// 未答題
(task.status === MISSION_STATUS.NOT_CHALLANGE) && (
<Style.Button onPress={() => this.handleSubmit(task)}>
<Style.ButtonText>{I18n.t('missionTable.scanQRCode')}</Style.ButtonText>
</Style.Button>
)
}
{
// 答對
(task.status === MISSION_STATUS.SUCCESS) && (
<Style.QuizSuccess>
<Style.QuizSuccessText>{I18n.t('missionTable.successMessage')}</Style.QuizSuccessText>
<Style.QuizReward>
<Style.QuizRewardCoin source={iconCoinImg} />
<Style.QuizRewardText>{task.reward}</Style.QuizRewardText>
</Style.QuizReward>
</Style.QuizSuccess>
)
}
</Style.QRCodeContainer>
);
}
}
|
asiboro/asiboro.github.io | vsdoc/search--/s_329.js | search_result['329']=["topic_00000000000000A7_vars--.html","AuthenticationScheme Fields",""]; |
karreypradeep/BlueSpaceTechEmailApp | EmailApp/src/main/java/com/bluespacetech/security/service/UserAuthenticationProvider.java | package com.bluespacetech.security.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.authentication.dao.AbstractUserDetailsAuthenticationProvider;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Component;
@Component
public class UserAuthenticationProvider extends AbstractUserDetailsAuthenticationProvider {
@Autowired
UserService userService;
@Autowired
PasswordEncoder passwordEncoder;
@Override
protected void additionalAuthenticationChecks(UserDetails userDetails,
UsernamePasswordAuthenticationToken authentication) throws AuthenticationException {
if (authentication.getCredentials() == null || userDetails.getPassword() == null) {
throw new BadCredentialsException("Credentials may not be null.");
}
if (!passwordEncoder.matches((String) authentication.getCredentials(), userDetails.getPassword())) {
throw new BadCredentialsException("Invalid Credentials.");
}
}
@Override
protected UserDetails retrieveUser(String username, UsernamePasswordAuthenticationToken authentication)
throws AuthenticationException {
final UserDetails userDetails = userService.loadUserByUsername(username);
return userDetails;
}
}
|
coderZsq/coderZsq.practice.server | study-notes/j2ee-collection/architecture/02-分布式事务/mysql-demo/src/main/java/com/sq/demo/mysqldemo/demo/JdbcDemo.java | package com.sq.demo.mysqldemo.demo;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
public class JdbcDemo {
public static void main(String[] args) throws Exception {
String url = "jdbc:mysql://localhost:3306/test";
String user = "root";
String password = "<PASSWORD>";
// 创建一个连接
Connection connection = DriverManager.getConnection(url, user, password);
PreparedStatement st = null;
try {
// 设置手动提交事务
connection.setAutoCommit(false);
// 执行的sql语句, 业务操作
st = connection.prepareStatement("insert into user(name) values ('test007')");
// 执行
int count = st.executeUpdate();
System.out.println("count = " + count);
connection.commit(); // 持久化 提交事务
} catch (Exception e) {
e.printStackTrace();
connection.rollback(); // 回滚事务
} finally {
if (st != null) {
st.close();
}
if (connection != null) {
connection.close();
}
}
}
}
|
Arronzheng/roncoo-education | roncoo-education-course/roncoo-education-course-service/src/main/java/com/roncoo/education/course/service/common/resq/ActivityCourseViewRESQ.java | <gh_stars>1-10
package com.roncoo.education.course.service.common.resq;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.math.BigDecimal;
/**
* ActivityCourseViewRESQ对象
*
* @author husend
* @since 2020-04-10
*/
@Data
@Accessors(chain = true)
@ApiModel(value = "ActivityCourseViewREQ分页请求对象", description = "活动专区课程关联表")
public class ActivityCourseViewRESQ implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "状态(1:正常;0:禁用)")
private Integer statusId;
@ApiModelProperty(value = "排序")
private Integer sort;
@ApiModelProperty(value = "专区编号")
@JsonSerialize(using = ToStringSerializer.class)
private Long activityId;
@ApiModelProperty(value = "位置(0电脑端,1微信端)")
private Integer activityLocation;
@ApiModelProperty(value = "课程ID")
@JsonSerialize(using = ToStringSerializer.class)
private Long courseId;
@ApiModelProperty(value = "活动类别(1:拼团,2:砍价,3:秒杀)")
private Integer activityCategory;
@ApiModelProperty(value = "价格(拼团、秒杀)")
private BigDecimal coursePrice;
@ApiModelProperty(value = "拼团人数")
private Integer groupNum;
@ApiModelProperty(value = "库存(砍价、秒杀)")
private Integer courseStock;
@ApiModelProperty(value = "砍价刀数")
private Integer knifeNum;
@ApiModelProperty(value = "限时时间(整数小时)")
private Integer limitTime;
}
|
hoojaoh/pick | safe/edit_test.go | <reponame>hoojaoh/pick
package safe
import (
"testing"
)
func TestEdit(t *testing.T) {
safe, err := createTestSafe(t, true)
if err != nil {
t.Error(err)
}
account, err := safe.Edit("foo", "Bubbles", "kitt3ns")
if err != nil {
t.Error(err)
}
if account.Username != "Bubbles" {
t.Errorf("Expected username Bubbles, got %s", account.Username)
}
}
|
cotobadesign/cotoba-agent-oss | dialogue-engine/test/programytest/utils/geo/test_geonames.py | <gh_stars>100-1000
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
import os
from programy.utils.geo.geonames import GeoNamesApi
from programy.utils.license.keys import LicenseKeys
from programytest.client import TestClient
class GeoNamesTests(unittest.TestCase):
def test_geonames_no_license_keys(self):
license_keys = LicenseKeys()
with self.assertRaises(Exception):
GeoNamesApi(license_keys)
def test_geonames_no_account_name(self):
license_keys = LicenseKeys()
license_keys.add_key('GEO_NAMES_COUNTRY', "DummyValue")
with self.assertRaises(Exception):
GeoNamesApi(license_keys)
def test_geonames_no_country(self):
license_keys = LicenseKeys()
license_keys.add_key('GEO_NAMES_ACCOUNTNAME', "DummyValue")
with self.assertRaises(Exception):
GeoNamesApi(license_keys)
def test_geonames(self):
client = TestClient()
client.add_license_keys_store()
geonames = GeoNamesApi()
self.assertIsNotNone(geonames)
GeoNamesApi.get_latlong_for_postcode_response_file = os.path.dirname(__file__) + os.sep + "geonames_latlong.json"
latlng = geonames.get_latlong_for_postcode('KY39UR')
self.assertIsNotNone(latlng)
self.assertEqual(latlng.latitude, 56.07206267570594)
self.assertEqual(latlng.longitude, -3.175233048730664)
|
huifer/action-flow | action-flow-storage/action-flow-mysql-storage/src/main/java/com/github/brick/action/flow/storage/mysql/util/MybatisUtil.java | <gh_stars>0
/*
* Copyright [2022] [brick-team]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.brick.action.flow.storage.mysql.util;
import org.apache.ibatis.builder.xml.XMLMapperBuilder;
import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.ibatis.transaction.TransactionFactory;
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
import javax.sql.DataSource;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URL;
/**
* @author <NAME>
*/
public class MybatisUtil {
static ThreadLocal<SqlSession> sqlSessionThreadLocal = new InheritableThreadLocal<>();
private static MybatisUtil mybatisUtil;
private final String user;
private final String password;
private final String url;
private final String dbDriver;
private SqlSessionFactory sqlSessionFactory;
public static final String MAPPER_PATH = "mapper";
public MybatisUtil(String user, String password, String url, String dbDriver,
Class<?>... clazz) {
this.user = user;
this.password = password;
this.url = url;
this.dbDriver = dbDriver;
initSqlSessionFactory(clazz);
mybatisUtil = this;
}
public static MybatisUtil gen() {
return mybatisUtil;
}
public static SqlSession getThreadLocalSqlSession() {
return sqlSessionThreadLocal.get();
}
private void initSqlSessionFactory(Class<?>... clazz) {
TransactionFactory transactionFactory = new JdbcTransactionFactory();
Environment environment = new Environment("development", transactionFactory,
dataSource());
Configuration configuration = new Configuration(environment);
for (Class<?> aClass : clazz) {
configuration.addMapper(aClass);
}
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(
configuration);
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL resource = cl.getResource(MAPPER_PATH);
File file = new File(resource.getFile());
File[] files = file.listFiles();
for (File file1 : files) {
try {
FileInputStream fileInputStream = new FileInputStream(file1);
XMLMapperBuilder xmlMapperBuilder = new XMLMapperBuilder(fileInputStream,
configuration, file1.getAbsolutePath(),
configuration.getSqlFragments());
xmlMapperBuilder.parse();
}
catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
this.sqlSessionFactory = sqlSessionFactory;
}
private DataSource dataSource() {
return new org.apache.ibatis.datasource.pooled.PooledDataSource(dbDriver, url, user, password);
}
private void close(SqlSession session) {
sqlSessionThreadLocal.remove();
session.close();
}
private SqlSession open() {
SqlSession sqlSession1 = sqlSessionThreadLocal.get();
if (sqlSession1 == null) {
SqlSession sqlSession = this.sqlSessionFactory.openSession();
sqlSessionThreadLocal.set(sqlSession);
return sqlSession;
}
else {
return sqlSession1;
}
}
public void work(ExecuteMapper executeMapper) throws Exception {
SqlSession open = open();
try {
executeMapper.work(open);
open.commit();
} catch (Exception e) {
open.rollback();
throw e;
} finally {
close(open);
}
}
}
|
nerkarso/demo | pages/_app.js | <reponame>nerkarso/demo
import { AuthProvider } from '@/contexts/AuthContext';
import '@/styles/index.css';
import { pageview } from '@/utils/gtag';
import { ThemeProvider } from 'next-themes';
import Head from 'next/head';
import { useRouter } from 'next/router';
import { useEffect } from 'react';
import { Slide, ToastContainer } from 'react-toastify';
import 'react-toastify/dist/ReactToastify.css';
import { SWRConfig } from 'swr';
export default function MyApp({ Component, pageProps }) {
const router = useRouter();
useEffect(() => {
const handleRouteChange = (url) => pageview(url);
router.events.on('routeChangeComplete', handleRouteChange);
return () => router.events.off('routeChangeComplete', handleRouteChange);
}, [router.events]);
return (
<ThemeProvider attribute="class">
<AuthProvider>
<SWRConfig
value={{
revalidateOnFocus: false,
revalidateOnReconnect: false,
}}>
<Head>
<title>
{Component.title && `${Component.title} - `}
{process.env.NEXT_PUBLIC_SITE_TITLE}
</title>
<meta name="title" content={process.env.NEXT_PUBLIC_SITE_TITLE} />
<meta name="description" content={process.env.NEXT_PUBLIC_SITE_DESCRIPTION} />
<meta property="og:type" content="website" />
<meta property="og:url" content={process.env.NEXT_PUBLIC_SITE_URL} />
<meta property="og:title" content={process.env.NEXT_PUBLIC_SITE_TITLE} />
<meta property="og:description" content={process.env.NEXT_PUBLIC_SITE_DESCRIPTION} />
<meta property="og:image" content={`${process.env.NEXT_PUBLIC_SITE_URL}/og-image.png`} />
<meta property="twitter:card" content="summary_large_image" />
<meta property="twitter:url" content={process.env.NEXT_PUBLIC_SITE_URL} />
<meta property="twitter:title" content={process.env.NEXT_PUBLIC_SITE_TITLE} />
<meta property="twitter:description" content={process.env.NEXT_PUBLIC_SITE_DESCRIPTION} />
<meta property="twitter:image" content={`${process.env.NEXT_PUBLIC_SITE_URL}/og-image.png`} />
<meta name="theme-color" content="#4f46e5" />
<link rel="manifest" href="/manifest.json" />
<link rel="icon" href="/icon-192.png" />
</Head>
<Component {...pageProps} />
<ToastContainer
position="bottom-center"
transition={Slide}
draggable={false}
autoClose={3000}
closeButton={false}
closeOnClick
/>
</SWRConfig>
</AuthProvider>
</ThemeProvider>
);
}
|
jimbethancourt/topic-viewer | TopicViewer/src/br/ufmg/aserg/topicviewer/control/semantic/SemanticTopicsCalculator.java | package br.ufmg.aserg.topicviewer.control.semantic;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import cern.jet.math.tdouble.DoubleFunctions;
import cern.jet.math.tdouble.DoublePlusMultFirst;
import org.splabs.vocabulary.filter.IdentifierFilter;
import ptstemmer.exceptions.PTStemmerException;
import br.ufmg.aserg.topicviewer.util.DoubleMatrix2D;
import br.ufmg.aserg.topicviewer.util.Properties;
import cern.colt.function.tdouble.DoubleDoubleFunction;
import cern.colt.matrix.tdouble.DoubleMatrix1D;
import cern.colt.matrix.tdouble.impl.DenseDoubleMatrix1D;
import cern.colt.matrix.tdouble.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.tdouble.impl.SparseDoubleMatrix1D;
import cern.colt.matrix.tdouble.algo.DenseDoubleAlgebra;
public class SemanticTopicsCalculator {
private static final int NUM_SELECTED_TERMS = 10;
public static String[][] generateSemanticTopicsFromVocabulary(int[][] clusters, DoubleMatrix2D lsiTermDocMatrix, DoubleMatrix2D lsiTransform, String[] termIds, boolean mostRelevant) throws IOException {
DenseDoubleAlgebra matrixAlgebra = DenseDoubleAlgebra.ZERO;
final int numTerms = termIds.length;
final int numClusters = clusters.length;
cern.colt.matrix.tdouble.DoubleMatrix2D lsiTransformCopy = getLsiTransformCopy(lsiTransform);
DoubleMatrix2D clusterSimilarity = new DoubleMatrix2D(numTerms, numClusters);
for (int i = 0; i < numTerms; i++) {
DoubleMatrix1D termQuery = new SparseDoubleMatrix1D(numTerms); termQuery.set(i, 1D);
termQuery = matrixAlgebra.mult(lsiTransformCopy, termQuery);
for (int j = 0; j < numClusters; j++) {
double clusterSize = clusters[j].length;
double similarity = 0D;
double avgSimilarity = 0D;
for (int documentId : clusters[j]) {
similarity = calculateSimilarity(termQuery, lsiTermDocMatrix.viewColumn(documentId));
if (Double.isNaN(similarity)) clusterSize--;
avgSimilarity += (Double.isNaN(similarity) ? 0D : similarity);
}
clusterSimilarity.set(i, j, clusterSize == 0D ? 0D : avgSimilarity / clusterSize);
}
}
if (mostRelevant)
clusterSimilarity = getRelevanceMatrix(numTerms, numClusters, clusterSimilarity);
return getBestTerms(clusterSimilarity, termIds);
}
public static String[][] generateSemanticTopicsFromClasses(int[][] clusters, String[] termIds, String[] documentIds) throws IOException, PTStemmerException {
IdentifierFilter filter = new IdentifierFilter(Properties.getProperties());
final int numTerms = termIds.length;
final int numClusters = clusters.length;
DoubleMatrix2D similarityMatrix = new DoubleMatrix2D(numTerms, numClusters);
for (int i = 0; i < clusters.length; i++)
for (int j = 0; j < clusters[i].length; j++) {
String documentName = documentIds[clusters[i][j]];
documentName = documentName.substring(documentName.lastIndexOf('.')+1);
for (String term : filter.filterIdentifiers(new String[] {documentName})) {
int index = indexOf(term, termIds);
if (index != -1) similarityMatrix.set(index, i, similarityMatrix.get(index, i) + 1);
}
}
return getBestTerms(similarityMatrix, termIds);
}
private static int indexOf(String term, String[] termIds) {
for (int i = 0; i < termIds.length; i++)
if (termIds[i].equals(term)) return i;
return -1;
}
private static DoubleMatrix2D getRelevanceMatrix(int numTerms, int numClusters, DoubleMatrix2D similarityMatrix) throws IOException {
final DoubleDoubleFunction sumFunction = DoubleFunctions.plusMultFirst(1);
final DoubleDoubleFunction relevanceFunction = DoublePlusMultFirst.minusDiv(numClusters-1);
DoubleMatrix2D clusterRelevance = new DoubleMatrix2D(numTerms, numClusters);
for (int i = 0; i < numClusters; i++) {
DoubleMatrix1D termRelevance = similarityMatrix.viewColumn(i);
DoubleMatrix1D termInterSimilarity = new DenseDoubleMatrix1D(numTerms);
for (int j = 0; j < numClusters; j++)
if (i != j) termInterSimilarity.assign(similarityMatrix.viewColumn(j), sumFunction);
termRelevance.assign(termInterSimilarity, relevanceFunction);
for (int j = 0; j < numTerms; j++)
clusterRelevance.set(j, i, termRelevance.get(j));
}
return clusterRelevance;
}
private static String[][] getBestTerms(DoubleMatrix2D similarityMatrix, String[] termIds) {
String[][] topics = new String[similarityMatrix.columns()][0];
for (int i = 0; i < similarityMatrix.columns(); i++) {
int[] topicIds = getMostRelevantTerms(similarityMatrix.viewColumn(i));
String[] topic = new String[NUM_SELECTED_TERMS];
for (int j = 0; j < NUM_SELECTED_TERMS; j++)
topic[j] = termIds[topicIds[j]];
topics[i] = topic;
}
return topics;
}
public static String[][] generateSemanticTopics(int[][] clusters, DoubleMatrix2D lsiTermDocMatrix, DoubleMatrix2D lsiTransform, String[] termIds) throws IOException {
DenseDoubleAlgebra matrixAlgebra = DenseDoubleAlgebra.ZERO;
final int numTerms = termIds.length;
// final int numDocuments = lsiTermDocMatrix.columns();
final int numClusters = clusters.length;
cern.colt.matrix.tdouble.DoubleMatrix2D lsiTransformCopy = getLsiTransformCopy(lsiTransform);
DoubleMatrix2D clusterSimilarity = new DoubleMatrix2D(numTerms, numClusters);
for (int i = 0; i < numTerms; i++) {
DoubleMatrix1D termQuery = new SparseDoubleMatrix1D(numTerms); termQuery.set(i, 1D);
termQuery = matrixAlgebra.mult(lsiTransformCopy, termQuery);
for (int j = 0; j < numClusters; j++) {
double clusterSize = clusters[j].length;
double similarity = 0D;
double avgSimilarity = 0D;
for (int documentId : clusters[j]) {
similarity = calculateSimilarity(termQuery, lsiTermDocMatrix.viewColumn(documentId));
if (Double.isNaN(similarity)) clusterSize--;
avgSimilarity += (Double.isNaN(similarity) ? 0D : similarity);
}
clusterSimilarity.set(i, j, clusterSize == 0D ? 0D : avgSimilarity / clusterSize);
}
}
// calculating similarity between terms and documents
// DoubleMatrix2D documentSimilarity = new DoubleMatrix2D(numTerms, numDocuments);
// for (int i = 0; i < numTerms; i++) {
// DoubleMatrix1D termQuery = new SparseDoubleMatrix1D(numTerms); termQuery.set(i, 1D);
//
// for (int j = 0; j < numDocuments; j++) {
// Double similarity = calculateSimilarity(matrixAlgebra.mult(getLsiTransformCopy(lsiTransform), termQuery), lsiTermDocMatrix.viewColumn(j));
// documentSimilarity.set(i, j, similarity);
//
// }
// }
//
// // calculating similarity between terms and clusters
// final DoubleDoubleFunction sumFunction = PlusMult.plusMult(1);
//
// DoubleMatrix2D clusterSimilarity = new DoubleMatrix2D(numTerms, numClusters);
// for (int i = 0; i < numClusters; i++) {
// DoubleMatrix1D similarity = new DenseDoubleMatrix1D(numTerms);
//
// for (int documentId : clusters[i])
// similarity.assign(documentSimilarity.viewColumn(documentId), sumFunction);
// similarity.assign(Mult.div(clusters[i].length));
//
// for (int j = 0; j < numTerms; j++)
// clusterSimilarity.set(j, i, similarity.get(j));
// }
// calculating relevance between terms and clusters
// final DoubleDoubleFunction relevanceFunction = PlusMult.minusDiv(numClusters-1);
//
// documentSimilarity = null;
// DoubleMatrix2D clusterRelevance = new DoubleMatrix2D(numTerms, numClusters);
// for (int i = 0; i < numClusters; i++) {
// DoubleMatrix1D termRelevance = clusterSimilarity.viewColumn(i);
// DoubleMatrix1D termInterSimilarity = new DenseDoubleMatrix1D(numTerms);
//
// for (int j = 0; j < numClusters; j++)
// if (i != j) termInterSimilarity.assign(clusterSimilarity.viewColumn(j), sumFunction);
//
// termRelevance.assign(termInterSimilarity, relevanceFunction);
//
// for (int j = 0; j < numTerms; j++)
// clusterRelevance.set(j, i, termRelevance.get(j));
// }
// calculating most relevant terms
// clusterSimilarity = null;
String[][] topics = new String[numClusters][0];
for (int i = 0; i < numClusters; i++) {
// int[] topicIds = getMostRelevantTerms(clusterRelevance.viewColumn(i));
int[] topicIds = getMostRelevantTerms(clusterSimilarity.viewColumn(i));
String[] topic = new String[NUM_SELECTED_TERMS];
for (int j = 0; j < NUM_SELECTED_TERMS; j++)
topic[j] = termIds[topicIds[j]];
topics[i] = topic;
}
return topics;
}
private static double calculateSimilarity(DoubleMatrix1D vector1, DoubleMatrix1D vector2) {
double cosineSimilarity = vector1.zDotProduct(vector2);
cosineSimilarity /= Math.sqrt(vector1.zDotProduct(vector1) * vector2.zDotProduct(vector2));
return cosineSimilarity;
}
private static int[] getMostRelevantTerms(final DoubleMatrix1D termRelevance) {
int[] relevantTerms = new int[NUM_SELECTED_TERMS];
int newTermIndex = 0;
long numTerms = termRelevance.size();
Set<Integer> visitedTerms = new HashSet<Integer>();
while (newTermIndex < NUM_SELECTED_TERMS && visitedTerms.size() < numTerms) {
int maxFrequencyIndex = -1;
double maxFrequency = Double.NEGATIVE_INFINITY;
for (int i = 0; i < termRelevance.size(); i++)
if (!visitedTerms.contains(i) && termRelevance.get(i) > maxFrequency) {
maxFrequencyIndex = i;
maxFrequency = termRelevance.get(i);
}
if (maxFrequencyIndex != -1) {
relevantTerms[newTermIndex] = maxFrequencyIndex;
newTermIndex++;
}
visitedTerms.add(maxFrequencyIndex);
}
return relevantTerms;
}
public static cern.colt.matrix.tdouble.DoubleMatrix2D getLsiTransformCopy(DoubleMatrix2D lsiTransform) {
cern.colt.matrix.tdouble.DoubleMatrix2D matrix = new DenseDoubleMatrix2D(lsiTransform.rows(), lsiTransform.columns());
for (int i = 0; i < lsiTransform.rows(); i++)
for (int j = 0; j < lsiTransform.columns(); j++)
matrix.set(i, j, lsiTransform.get(i, j));
return matrix;
}
} |
agpoulsen/aem-orchestrator | src/test/java/com/shinesolutions/aemorchestrator/config/AwsConfigTest.java | <filename>src/test/java/com/shinesolutions/aemorchestrator/config/AwsConfigTest.java
package com.shinesolutions.aemorchestrator.config;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.shinesolutions.aemorchestrator.model.ProxyDetails;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
import static org.springframework.test.util.ReflectionTestUtils.setField;
public class AwsConfigTest {
private AwsConfig awsConfig;
@Before
public void setup() {
awsConfig = new AwsConfig();
}
@Test
public void testAwsClientConfig_EmptyProxy() {
// Setup HTTP proxy
String httpProxyHost = "";
ProxyDetails proxyDetails = new ProxyDetails();
proxyDetails.setHost(httpProxyHost);
String clientProtocol = "http";
int clientConnectionTimeout = 10;
int clientMaxErrorRetry = 20;
setField(awsConfig, "clientProtocol", clientProtocol);
setField(awsConfig, "clientConnectionTimeout", clientConnectionTimeout);
setField(awsConfig, "clientMaxErrorRetry", clientMaxErrorRetry);
setField(awsConfig, "useProxy", false);
ClientConfiguration clientConfiguration = awsConfig.awsClientConfig(proxyDetails);
assertThat(clientConfiguration.getProxyHost(), nullValue());
assertThat(clientConfiguration.getProtocol().toString(), equalTo(clientProtocol));
assertThat(clientConfiguration.getConnectionTimeout(), equalTo(clientConnectionTimeout));
assertThat(clientConfiguration.getMaxErrorRetry(), equalTo(clientMaxErrorRetry));
}
@Test
public void testAwsClientConfig_NoProxy() {
String clientProtocol = "http";
int clientConnectionTimeout = 10;
int clientMaxErrorRetry = 20;
setField(awsConfig, "clientProtocol", clientProtocol);
setField(awsConfig, "clientConnectionTimeout", clientConnectionTimeout);
setField(awsConfig, "clientMaxErrorRetry", clientMaxErrorRetry);
setField(awsConfig, "useProxy", false);
ClientConfiguration clientConfiguration = awsConfig.awsClientConfig(null);
assertThat(clientConfiguration.getProxyHost(), nullValue());
assertThat(clientConfiguration.getProxyPort(), equalTo(-1));
assertThat(clientConfiguration.getProtocol().toString(), equalTo(clientProtocol));
assertThat(clientConfiguration.getConnectionTimeout(), equalTo(clientConnectionTimeout));
assertThat(clientConfiguration.getMaxErrorRetry(), equalTo(clientMaxErrorRetry));
}
@Test
public void testAwsClientConfig_UseProxy() {
String clientProtocol = "http";
int clientConnectionTimeout = 10;
int clientMaxErrorRetry = 20;
setField(awsConfig, "clientProtocol", clientProtocol);
setField(awsConfig, "clientConnectionTimeout", clientConnectionTimeout);
setField(awsConfig, "clientMaxErrorRetry", clientMaxErrorRetry);
// Setup client proxy
String clientProxyHost = "clientProxyHost";
Integer clientProxyPort = 1;
setField(awsConfig, "clientProxyHost", clientProxyHost);
setField(awsConfig, "clientProxyPort", clientProxyPort);
// Setup HTTP proxy
String httpProxyHost = "httpProxyHost";
Integer httpProxyPort = 2;
ProxyDetails proxyDetails = new ProxyDetails();
proxyDetails.setHost(httpProxyHost);
proxyDetails.setPort(httpProxyPort);
// Use client proxy
setField(awsConfig, "useProxy", true);
ClientConfiguration clientConfiguration = awsConfig.awsClientConfig(proxyDetails);
assertThat(clientConfiguration.getProxyHost(), equalTo(clientProxyHost));
assertThat(clientConfiguration.getProxyPort(), equalTo(clientProxyPort));
assertThat(clientConfiguration.getProtocol().toString(), equalTo(clientProtocol));
assertThat(clientConfiguration.getConnectionTimeout(), equalTo(clientConnectionTimeout));
assertThat(clientConfiguration.getMaxErrorRetry(), equalTo(clientMaxErrorRetry));
// Use HTTP proxy
setField(awsConfig, "useProxy", false);
clientConfiguration = awsConfig.awsClientConfig(proxyDetails);
assertThat(clientConfiguration.getProxyHost(), equalTo(httpProxyHost));
assertThat(clientConfiguration.getProxyPort(), equalTo(httpProxyPort));
assertThat(clientConfiguration.getProtocol().toString(), equalTo(clientProtocol));
assertThat(clientConfiguration.getConnectionTimeout(), equalTo(clientConnectionTimeout));
assertThat(clientConfiguration.getMaxErrorRetry(), equalTo(clientMaxErrorRetry));
}
@Test
public void testAwsCredentialsProvider() {
AWSCredentialsProvider awsCredentialsProvider = awsConfig.awsCredentialsProvider();
assertThat(awsCredentialsProvider, notNullValue());
}
}
|
carrenolg/golang | courses/1.Go(golang)/section_06/004-bit-shifting/main.go | package main
import "fmt"
// using iota with bit shift
func main() {
// base 10
kb := 1024
mb := kb * 1024
gb := mb * 1024
fmt.Printf("decimal:%d,\t\tbinary:%b\n", kb, kb)
fmt.Printf("decimal:%d,\tbinary:%b\n", mb, mb)
fmt.Printf("decimal:%d,\tbinary:%b\n", gb, gb)
// using bit shift with iota
const (
_ = iota
kbit = 1 << (iota * 10)
mbit = 1 << (iota * 10)
gbit = 1 << (iota * 10)
)
fmt.Printf("decimal:%d,\t\tbinary:%b\n", kbit, kbit)
fmt.Printf("decimal:%d,\tbinary:%b\n", mbit, mbit)
fmt.Printf("decimal:%d,\tbinary:%b\n", gbit, gbit)
// types
fmt.Printf("%T, %v\n", kbit, kbit)
fmt.Printf("%T, %v\n", mbit, mbit)
fmt.Printf("%T, %v\n", gbit, gbit)
}
|
wangshankun/Tengine_Atlas | executor/operator/ref/kernel/argmax/ref_argmax_int8.c | <reponame>wangshankun/Tengine_Atlas<gh_stars>10-100
static int ref_argmax_int8(int8_t* input, int8_t* output, ref_argmax_param* param)
{
return 0;
}
|
stoman/CompetitiveProgramming | problems/primality/submissions/accepted/Stefan.java | <reponame>stoman/CompetitiveProgramming
//Author: <NAME>
import java.io.*;
import java.util.*;
import java.text.*;
import java.math.*;
import java.util.regex.*;
public class Stefan {
public static boolean isPrime(int n) {
if(n <= 1) {
return false;
}
for(int i = 2; i*i <= n; i++) {
if(n % i == 0) {
return false;
}
}
return true;
}
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
int p = in.nextInt();
for(int a0 = 0; a0 < p; a0++){
int n = in.nextInt();
System.out.println(isPrime(n) ? "Prime" : "Not prime");
}
}
}
|
R-YaTian/LC.VC.SA.Plugin | wm_sachs/dllmain.cpp | #include <windows.h>
#include "CPlugin.h"
BOOL WINAPI DllMain(HMODULE hDllHandle, DWORD dwReason, LPVOID lpreserved)
{
if (dwReason == DLL_PROCESS_ATTACH)
{
return (CPlugin::CheckResourceFile(hDllHandle) && CPlugin::CheckGameVersion());
}
return TRUE;
}
|
braswelljr/alGOrithm | utils/getArray.go | package utils
import "fmt"
func GetArray() ([]int, int, error) {
// get array of integers
var arrayLen int
fmt.Printf("Please enter the length of the Array : ")
// read the length of the array by input
_, err := fmt.Scan(&arrayLen)
// handle input error
if err != nil {
fmt.Printf("\nError : %v", err)
}
// create a new slice by using the array length
array := make([]int, arrayLen)
// add elements to the slice using length of the array
for i := 0; i < arrayLen; i++ {
fmt.Printf("Please enter the element %d : \n", i+1)
// read the length of the array by input
_, err := fmt.Scan(&array[i])
// handle error
if err != nil {
fmt.Printf("\nError : %v", err)
}
}
return array, arrayLen, err
}
|
silenc3502/PersonalProject-1 | junhurkahn/BackEnd/demo/src/main/java/com/example/demo/repository/jpa/order58/VueJpaMemberWithAuthRepository.java | <gh_stars>10-100
package com.example.demo.repository.jpa.order58;
import com.example.demo.entity.jpa.VueJpaMemberWithAuth;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import java.util.Optional;
public interface VueJpaMemberWithAuthRepository extends JpaRepository<VueJpaMemberWithAuth, Long> {
@Query("select m from VueJpaMemberWithAuth m where m.userId = :userId")
Optional<VueJpaMemberWithAuth> findByUserId(String userId);
} |
Anioko/TestApp | app/blueprints/marketplace/apis.py | from flask import session, render_template
from flask_login import current_user
from flask_restful import Resource, reqparse
from app.models import User, MCart, MProduct, MShippingMethod, MCartItem, MSellerCart
from app.utils import jsonify_object, db
def get_current_cart():
session_id = session['cart_id']
if current_user.is_authenticated:
cart = MCart.query.filter_by(user_id=current_user.id).first()
if cart:
MCart.query.filter_by(user_id=current_user.id).filter(MCart.id != cart.id).delete()
else:
cart = MCart(user_id=current_user.id)
db.session.add(cart)
db.session.commit()
db.session.refresh(cart)
else:
cart = MCart.query.filter_by(session_id=session_id).first()
if cart:
MCart.query.filter_by(session_id=session_id).filter(MCart.id != cart.id).delete()
else:
cart = MCart(session_id=session_id)
db.session.add(cart)
db.session.commit()
db.session.refresh(cart)
return cart
class CartCount(Resource):
def get(self):
cart = get_current_cart()
return {
'status': 1,
'count': len(cart.cart_items)
}
class OrderSummary(Resource):
def get(self, step, delivery):
cart = get_current_cart()
delivery = MShippingMethod.query.filter_by(id=delivery).first()
return render_template('marketplace/cart/order_summary.html', step=step, cart=cart, delivery=delivery)
class AddToCart(Resource):
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('product_id', help='This field cannot be blank', required=True)
def post(self):
data = self.parser.parse_args()
product = MProduct.query.get(data['product_id'])
if not product:
return {
'status': 0,
'title': "Error",
'message': "Couldn't find product to add"
}
user_id = None
if current_user.is_authenticated:
user_id = current_user.id
cart = get_current_cart()
cart_currency = cart.currency
if cart_currency:
if cart_currency != product.price_currency:
return {
'status': 0,
'title': "Error",
'message': "Cannot add product of currency {} because cart currency is {}".format(product.price_currency.name, cart_currency.name)
}
cart.user_id = user_id
seller_cart = MSellerCart.query.filter_by(cart=cart).filter_by(seller=product.seller).first()
if not seller_cart:
seller_cart = MSellerCart(
cart=cart,
seller=product.seller,
currency=cart_currency,
buyer=current_user if current_user.is_authenticated else None,
)
db.session.add(seller_cart)
db.session.commit()
db.session.refresh(seller_cart)
cart_item = MCartItem.query.filter_by(product=product).filter_by(cart=cart).first()
if cart_item:
cart_item.count += 1
else:
cart_item = MCartItem(
cart=cart,
seller_cart=seller_cart,
product=product,
seller=product.seller,
buyer=current_user if current_user.is_authenticated else None,
count=1
)
db.session.add(cart)
db.session.add(cart_item)
db.session.commit()
count = cart.product_count(product.id)
return {
'status': 1,
'title': "Cart Change",
'message': "{} pieces of {} are in the cart now".format(product.name, count),
'count': count
}
class SubFromCart(Resource):
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('product_id', help='This field cannot be blank', required=True)
def post(self):
data = self.parser.parse_args()
product = MProduct.query.get(data['product_id'])
if not product:
return {
'status': 0,
'title': "Error",
'message': "Couldn't find product to add"
}
user_id = None
if current_user.is_authenticated:
user_id = current_user.id
cart = get_current_cart()
cart.user_id = user_id
cart_item = MCartItem.query.filter_by(product=product).filter_by(cart=cart).first()
if cart_item:
cart_item_seller = cart_item.seller
if cart_item.count > 1:
cart_item.count -= 1
db.session.add(cart_item)
else:
db.session.delete(cart_item)
seller_cart = MSellerCart.query.filter_by(cart=cart, seller=cart_item_seller).first()
if seller_cart:
if len(seller_cart.cart_items) < 1:
db.session.delete(seller_cart)
db.session.commit()
count = cart.product_count(product.id)
return {
'status': 1,
'title': "Cart Change",
'message': "Item Removed From Cart Successfully : {}".format(count),
'count': count
}
|
11676670/webmanager | src/main/java/com/jspxcms/common/file/LocalFileHandler.java | package com.jspxcms.common.file;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.imageio.ImageIO;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.multipart.MultipartFile;
import com.jspxcms.common.file.CommonFile.FileType;
import com.jspxcms.common.image.Images;
import com.jspxcms.common.web.PathResolver;
import freemarker.template.Template;
/**
* 本地文件处理
*
* @author liufang
*
*/
public class LocalFileHandler extends FileHandler {
private static final Logger logger = LoggerFactory.getLogger(LocalFileHandler.class);
private PathResolver pathResolver;
public LocalFileHandler(PathResolver pathResolver, String prefix) {
this.pathResolver = pathResolver;
this.prefix = prefix;
}
@Override
public boolean mkdir(String name, String id) {
File parent = new File(pathResolver.getPath(id, prefix));
File dir = new File(parent, name);
return dir.mkdirs();
}
@Override
public boolean rename(String dest, String id) {
File file = new File(pathResolver.getPath(id, prefix));
return file.renameTo(new File(file.getParentFile(), dest));
}
@Override
public void move(String dest, String[] ids) {
File file, destDir;
for (String id : ids) {
file = new File(pathResolver.getPath(id, prefix));
destDir = new File(pathResolver.getPath(dest, prefix));
try {
FileUtils.moveToDirectory(file, destDir, true);
} catch (IOException e) {
logger.error("move direcotry error.", e);
}
}
}
@Override
public void move(String dest, String id) {
File file = new File(pathResolver.getPath(id, prefix));
File destDir = new File(pathResolver.getPath(dest, prefix));
try {
FileUtils.moveToDirectory(file, destDir, true);
} catch (IOException e) {
logger.error("move direcotry error.", e);
}
}
@Override
public void store(String text, String name, String path) throws IOException {
File parent = new File(pathResolver.getPath(path, prefix));
if (!parent.exists()) {
parent.mkdirs();
}
File file = new File(parent, name);
FileUtils.write(file, text, "UTF-8");
}
@Override
public void store(MultipartFile file, String path) throws IllegalStateException, IOException {
File parent = new File(pathResolver.getPath(path, prefix));
if (!parent.exists()) {
parent.mkdirs();
}
File dest = new File(parent, file.getOriginalFilename());
file.transferTo(dest);
}
@Override
public void storeFile(InputStream source, String filename) throws IllegalStateException, IOException {
File dest = new File(pathResolver.getPath(filename, prefix));
FileUtils.copyInputStreamToFile(source, dest);
}
@Override
public void storeFile(File file, String filename) throws IllegalStateException, IOException {
File dest = new File(pathResolver.getPath(filename, prefix));
FileUtils.moveFile(file, dest);
}
@Override
public void storeFile(List<File> files, List<String> filenames) throws IllegalStateException, IOException {
for (int i = 0, len = files.size(); i < len; i++) {
storeFile(files.get(i), filenames.get(i));
}
}
@Override
public void storeFile(MultipartFile file, String filename) throws IllegalStateException, IOException {
File dest = new File(pathResolver.getPath(filename, prefix));
File parent = dest.getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
file.transferTo(dest);
}
@Override
public void storeFile(Template template, Object rootMap, String filename) {
File dest = new File(pathResolver.getPath(filename, prefix));
File parent = dest.getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
try {
OutputStream os = null;
Writer writer = null;
try {
os = new FileOutputStream(dest);
writer = new OutputStreamWriter(os, "UTF-8");
template.process(rootMap, writer);
} finally {
IOUtils.closeQuietly(writer);
IOUtils.closeQuietly(os);
}
} catch (Exception e) {
logger.error(null, e);
}
}
@Override
public void storeImage(BufferedImage image, String formatName, String filename) throws IOException {
File dest = new File(pathResolver.getPath(filename, prefix));
FilesEx.makeParentDir(dest);
ImageIO.write(image, formatName, dest);
}
@Override
public void storeImages(List<BufferedImage> images, String formatName, List<String> filenames) throws IOException {
for (int i = 0, len = images.size(); i < len; i++) {
storeImage(images.get(i), formatName, filenames.get(i));
}
}
@Override
public boolean delete(String[] ids) {
boolean result = false;
for (String id : ids) {
result = delete(id);
}
return result;
}
@Override
public boolean delete(String id) {
File file = new File(pathResolver.getPath(id, prefix));
return FileUtils.deleteQuietly(file);
}
@Override
public List<String> list(String path) {
ArrayList<String> list = new ArrayList<String>();
File parent = new File(pathResolver.getPath(path, prefix));
String[] names = parent.list();
if (names != null) {
list.addAll(Arrays.asList(names));
}
return list;
}
@Override
public File getFile(String id) {
File file = new File(pathResolver.getPath(id, prefix));
return file;
}
@Override
public CommonFile get(String id, String displayPath) {
File file = new File(pathResolver.getPath(id, prefix));
CommonFile commonFile = new CommonFile(id, displayPath, file);
if (commonFile.getType() == FileType.text) {
try {
commonFile.setText(FileUtils.readFileToString(file, "UTF-8"));
} catch (IOException e) {
logger.error("read file error!", e);
}
}
return commonFile;
}
@Override
public InputStream getInputStream(String id) {
File file = new File(pathResolver.getPath(id, prefix));
InputStream is = null;
try {
is = new FileInputStream(file);
} catch (FileNotFoundException e) {
logger.error("file not found!", e);
}
return is;
}
@Override
public BufferedImage readImage(String id) {
File file = new File(pathResolver.getPath(id, prefix));
BufferedImage image = null;
try {
image = ImageIO.read(file);
} catch (IOException e) {
logger.error("read image error!", e);
}
return image;
}
@Override
public String getFormatName(String id) {
File file = new File(pathResolver.getPath(id, prefix));
return Images.getFormatName(file);
}
@Override
public List<CommonFile> listFiles(String path, String displayPath) {
return listFiles((CommonFileFilter) null, path, displayPath);
}
@Override
public List<CommonFile> listFiles(String search, String path, String displayPath) {
return listFiles(new SearchCommonFileFilter(search), path, displayPath);
}
@Override
public List<CommonFile> listFiles(CommonFileFilter filter, String path, String displayPath) {
File parent = new File(pathResolver.getPath(path, prefix));
List<CommonFile> list = new ArrayList<CommonFile>();
CommonFile commonFile;
String id;
File[] files = parent.listFiles();
if (files != null) {
for (File file : parent.listFiles()) {
id = path + "/" + file.getName();
commonFile = new CommonFile(id, displayPath, file);
if (filter == null || filter.accept(commonFile)) {
list.add(commonFile);
}
}
}
return list;
}
}
|
brunomendola/querity | querity-common/src/main/java/net/brunomendola/querity/common/mapping/condition/NotConditionMapper.java | <gh_stars>1-10
package net.brunomendola.querity.common.mapping.condition;
import net.brunomendola.querity.api.Condition;
import net.brunomendola.querity.api.NotCondition;
import net.brunomendola.querity.common.mapping.PropertyNameMapper;
class NotConditionMapper implements ConditionMapper<NotCondition> {
@Override
public boolean canMap(Condition condition) {
return NotCondition.class.isAssignableFrom(condition.getClass());
}
@Override
public NotCondition mapCondition(NotCondition condition, PropertyNameMapper propertyNameMapper) {
Condition c = condition.getCondition();
return condition.toBuilder()
.condition(ConditionMapperFactory.getConditionMapper(c)
.mapCondition(c, propertyNameMapper))
.build();
}
}
|
bhnybrohn/turbo-sniffle | src/components/home/midsection.js | import React from 'react'
import Consult from '../img/services/consult.svg'
import Maintananace from '../img/services/repair.svg'
import Building from '../img/services/building.svg'
import Settings from '../img/services/settings.svg'
import './slider.css'
export default function midSection() {
return (
<div className="w-screen md:px-8 lg:px-24 py-12 text-white">
<div className="grid grid-cols-1 md:grid-cols-3 lg:grid-cols-3">
<div className="grid grid-cols-1">
<div className="mx-auto px-4 py-4 transition duration-500 ease-in-out rounded-lg hover:bg-red-400 hover:font-bold transform hover:-translate-y-1 hover:scale-100 ">
<img src={Consult} alt="Consult" width="80px" height="80px" className="mx-auto" />
<h1 className="my-auto font-bold mx-4 text-base uppercase justify-start head text-red-500 ">Consulting</h1>
<h2 className="mx-auto px-4 head text-black text-xl font-medium text-left ">
Our Engineering consulting department work with
various clients to provide engineering advice and
technical solutions on different projects.
</h2>
</div>
<div className="mx-auto px-4 py-4 transition duration-500 ease-in-out rounded-lg hover:bg-red-400 hover:font-bold transform hover:-translate-y-1 hover:scale-100 ">
<img src={Maintananace} alt="Repair" width="80px" height="80px" className="mx-auto" />
<h1 className="my-auto font-bold text-base uppercase mx-4 justify-start head text-red-500 ">Maintananace</h1>
<h2 className="mx-auto px-4 head text-xl font-medium text-black text-left">
We employ engineering concepts for the optimaization of equipments,
prodecures and departmental budgets to achieve better maintainabilty, reliabllity &
availaibilty of equipments
</h2>
</div>
<div>
</div>
</div>
<div className="grid grid-cols-1">
<div className="mx-auto px-4 py-4 transition duration-500 ease-in-out rounded-lg hover:bg-red-400 hover:font-bold transform hover:-translate-y-1 hover:scale-100 ">
<img src={Building} alt="Consult" width="80px" height="80px" className="mx-auto" />
<h1 className=" font-bold text-base mx-4 uppercase justify-start head text-red-500">Construction</h1>
<h2 className="mx-auto px-4 head text-xl text-black font-medium text-left">
Our Engineers are professionally disciplined in dealing with the designing, planning,
construction and management of infrastructures
</h2>
</div>
<div className="mx-auto px-4 py-4 transition duration-500 ease-in-out hover:rounded-lg hover:bg-red-400 hover:font-bold transform hover:-translate-y-1 hover:scale-100">
<img src={Settings} alt="Repair" width="80px" height="80px" className="mx-auto pt-8" />
<h1 className="my-auto mx-4 font-bold text-base flex uppercase justify-start head text-red-500">Installation</h1>
<h2 className="mx-auto px-4 head text-black text-xl font-medium text-left">
We
Oversee installation, operation, maintenance,
or repair to ensure that machines or equipment are
installed and functionin g according to specifications.
</h2>
</div>
<div>
</div>
</div>
<div className="mx-auto px-8 py-8 text-right">
<h1 className="mx-auto my-auto font-bold flex justify-end py-4 head text-red-500">OUR SERVICES</h1>
<h2 className="head text-2xl text-black font-bold ">We develop creative, comprehensive and sustainable engineering
solutions for a future where society can thrive.</h2>
<button type="button" class="mx-auto font-mono transition duration-500 ease-in-out hover:bg-red-400 hover:font-bold transform hover:-translate-y-1 hover:scale-100 focus:outline-none outline-none focus:shadow-outline border border-red-500 bg-red-500 hover:bg-red-600 text-white hover:text-white font-bold py-2 px-4 mt-4 cv">Contact Us
</button>
</div>
</div>
</div>
)
} |
Manifesto-Digital/contentful-gatsby-netlify | src/components/download-banner/styles.js | <reponame>Manifesto-Digital/contentful-gatsby-netlify
import styled from 'styled-components';
import { breakpoint } from '../theme/breakpoint';
export const Banner = styled.div`
padding: ${({ theme }) => theme.spacing.standard} 0;
margin-bottom: ${({ removeMarginBottom, theme }) =>
removeMarginBottom ? '0' : theme.spacing.large};
`;
export const Wrapper = styled.div`
overflow: hidden;
max-width: 27em;
width: 100%;
`;
export const Header = styled.p`
font-weight: bold;
`;
export const FileImage = styled.div`
display: none;
float: left;
max-width: 90px;
margin-right: ${({ theme }) => theme.spacing.standard};
${breakpoint.mobileLand`
display: block;
`};
`;
export const FileDetails = styled.div`
overflow: auto;
position: relative;
`;
|
mxc-foundation/lora-app-server | internal/grpcauth/grpcauth_test.go | package grpcauth
import (
"context"
"fmt"
"strings"
"testing"
"time"
"github.com/gofrs/uuid"
"github.com/lestrrat-go/jwx/jwa"
ljwt "github.com/lestrrat-go/jwx/jwt"
"google.golang.org/grpc/metadata"
"github.com/mxc-foundation/lpwan-app-server/internal/auth"
"github.com/mxc-foundation/lpwan-app-server/internal/jwt"
)
var (
testJWTKeyEnc = []byte("<KEY>
)
type testOTPV struct{}
func (to testOTPV) Validate(ctx context.Context, username, otp string) error {
if otp != "123456" {
return fmt.Errorf("invalid OTP")
}
return nil
}
type testStore struct{}
func (ts testStore) ApplicationOwnedByOrganization(ctx context.Context, orgID, applicationID int64) (bool, error) {
return false, nil
}
func (ts testStore) DeviceProfileOwnedByOrganization(ctx context.Context, orgID int64, deviceProfile uuid.UUID) (bool, error) {
return false, nil
}
func (ts testStore) AuthGetUser(ctx context.Context, username string) (auth.User, error) {
if username != "<EMAIL>" {
return auth.User{}, fmt.Errorf("not found")
}
return auth.User{ID: 17, Email: "<EMAIL>"}, nil
}
func (ts testStore) AuthGetOrgUser(ctx context.Context, userID, orgID int64) (auth.OrgUser, error) {
var ou auth.OrgUser
if userID == 17 && orgID == 3 {
ou.IsOrgUser = true
ou.IsOrgAdmin = true
}
return ou, nil
}
func TestAuthenticator(t *testing.T) {
jwtv := jwt.NewValidator(jwa.HS256, testJWTKeyEnc, 86400)
aliceTok, err := jwtv.SignToken(jwt.Claims{UserID: 17, Username: "<EMAIL>", Service: auth.EMAIL}, 0, []string{"lora-app-server"})
if err != nil {
t.Fatal(err)
}
tt, err := ljwt.ParseVerify(strings.NewReader(aliceTok), jwa.HS256, testJWTKeyEnc)
if err != nil {
t.Fatal(err)
}
tt.Set(ljwt.IssuedAtKey, time.Now().Add(-72*time.Hour))
tt.Set(ljwt.ExpirationKey, time.Now().Add(-48*time.Hour))
aliceExp, err := ljwt.Sign(tt, jwa.HS256, testJWTKeyEnc)
if err != nil {
t.Fatal(err)
}
bobTok, err := jwtv.SignToken(jwt.Claims{UserID: 19, Username: "<EMAIL>", Service: auth.EMAIL}, 0, []string{"registration"})
if err != nil {
t.Fatal(err)
}
tests := []struct {
name string
token string
opts *auth.Options
otp string
errExp string
creds auth.Credentials
}{
{
name: "expired token should be rejected",
token: string(aliceExp),
errExp: "exp",
},
{
name: "normal token, default options",
token: aliceTok,
creds: auth.Credentials{
UserID: 17,
Username: "<EMAIL>",
IsExisting: true,
Service: auth.EMAIL,
},
},
{
name: "normal token, with orgID option",
token: aliceTok,
opts: auth.NewOptions().WithOrgID(3),
creds: auth.Credentials{
UserID: 17,
Username: "<EMAIL>",
IsExisting: true,
OrgID: 3,
IsOrgUser: true,
IsOrgAdmin: true,
IsGatewayAdmin: true,
IsDeviceAdmin: true,
Service: auth.EMAIL,
},
},
{
name: "normal token, require OTP, with no OTP",
token: aliceTok,
opts: auth.NewOptions().WithRequireOTP(),
errExp: "OTP",
},
{
name: "normal token, require OTP, with invalid OTP",
token: aliceTok,
opts: auth.NewOptions().WithRequireOTP(),
otp: "111111",
errExp: "OTP",
},
{
name: "normal token, require OTP, with valid OTP",
token: aliceTok,
opts: auth.NewOptions().WithRequireOTP(),
otp: "123456",
creds: auth.Credentials{
UserID: 17,
Username: "<EMAIL>",
IsExisting: true,
Service: auth.EMAIL,
},
},
{
name: "non-existing user, default options",
token: bobTok,
errExp: "invalid token",
creds: auth.Credentials{
Service: auth.EMAIL,
},
},
{
name: "non-existing user, with correct audience option",
token: bobTok,
opts: auth.NewOptions().WithAudience("registration"),
errExp: "user validation",
creds: auth.Credentials{
Service: auth.EMAIL,
},
},
{
name: "non-existing user, with audience and allow non-existent options",
token: bobTok,
opts: auth.NewOptions().WithAudience("registration").WithAllowNonExisting(),
creds: auth.Credentials{
Username: "<EMAIL>",
},
},
}
ga := New(testStore{}, jwtv, testOTPV{})
for _, tc := range tests {
t.Logf("test: %s", tc.name)
opts := tc.opts
if opts == nil {
opts = auth.NewOptions()
}
md := metadata.Pairs("authorization", fmt.Sprintf("Bearer %s", tc.token))
if tc.otp != "" {
md.Append("x-otp", tc.otp)
}
ctx := metadata.NewIncomingContext(context.Background(), md)
cred, err := ga.GetCredentials(ctx, opts)
if err != nil {
if tc.errExp == "" || !strings.Contains(err.Error(), tc.errExp) {
t.Errorf("unexpected error: %v", err)
}
continue
}
if *cred != tc.creds {
t.Errorf("expected %#v, but got %#v", tc.creds, *cred)
}
}
}
|
hmrc/penalties-appeals-frontend | test/models/AppealDataSpec.scala | /*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import play.api.libs.json.{JsValue, Json}
import java.time.LocalDateTime
class AppealDataSpec extends AnyWordSpec with Matchers {
val expectedModelAsJson: JsValue = Json.parse(
"""
|{
| "type": "LATE_SUBMISSION",
| "startDate": "2020-01-01T12:00:00",
| "endDate": "2020-01-01T13:00:00",
| "dueDate": "2020-02-07T13:00:00",
| "dateCommunicationSent": "2020-02-08T13:00:00"
|}
|""".stripMargin)
val expectedModel: AppealData = AppealData(
`type` = PenaltyTypeEnum.Late_Submission,
startDate = LocalDateTime.of(
2020, 1, 1, 12, 0, 0),
endDate = LocalDateTime.of(
2020, 1, 1, 13, 0, 0),
dueDate = LocalDateTime.of(
2020, 2, 7, 13, 0, 0),
dateCommunicationSent = LocalDateTime.of(
2020, 2, 8, 13, 0, 0)
)
"AppealData" should {
"be readable to JSON" in {
val result = Json.toJson(expectedModel)
result shouldBe expectedModelAsJson
}
"be writable from JSON" in {
val result = Json.fromJson(expectedModelAsJson)(AppealData.format)
result.isSuccess shouldBe true
result.get shouldBe expectedModel
}
}
}
|
tjim/smpcc | runtime/ot/io.go | package ot
import "math/big"
type NPChans struct {
ParamChan chan *big.Int `fatchan:"reply"`
NpRecvPk chan *big.Int `fatchan:"request"`
NpSendEncs chan HashedElGamalCiph `fatchan:"reply"`
}
type ExtChans struct {
OtExtChan chan []byte `fatchan:"request"`
OtExtSelChan chan Selector `fatchan:"reply"`
}
func NewOTChansSender(npchans NPChans, extchans ExtChans) Sender {
baseReceiver := NewNPReceiver(npchans.ParamChan, npchans.NpRecvPk, npchans.NpSendEncs)
sender := NewExtendSender(extchans.OtExtChan, extchans.OtExtSelChan, baseReceiver, SEC_PARAM, NUM_PAIRS)
return sender
}
func NewOTChansReceiver(npchans NPChans, extchans ExtChans) Receiver {
baseSender := NewNPSender(npchans.ParamChan, npchans.NpRecvPk, npchans.NpSendEncs)
receiver := NewExtendReceiver(extchans.OtExtChan, extchans.OtExtSelChan, baseSender, SEC_PARAM, NUM_PAIRS)
return receiver
}
|
dplbsd/soc2013 | head/contrib/gcclibs/libgomp/config/linux/bar.c | <reponame>dplbsd/soc2013
/* Copyright (C) 2005 Free Software Foundation, Inc.
Contributed by <NAME> <<EMAIL>>.
This file is part of the GNU OpenMP Library (libgomp).
Libgomp is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
Libgomp is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with libgomp; see the file COPYING.LIB. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA. */
/* As a special exception, if you link this library with other files, some
of which are compiled with GCC, to produce an executable, this library
does not by itself cause the resulting executable to be covered by the
GNU General Public License. This exception does not however invalidate
any other reasons why the executable file might be covered by the GNU
General Public License. */
/* This is a Linux specific implementation of a barrier synchronization
mechanism for libgomp. This type is private to the library. This
implementation uses atomic instructions and the futex syscall. */
#include "libgomp.h"
#include "futex.h"
#include <limits.h>
void
gomp_barrier_wait_end (gomp_barrier_t *bar, bool last)
{
if (last)
{
bar->generation++;
futex_wake (&bar->generation, INT_MAX);
}
else
{
unsigned int generation = bar->generation;
gomp_mutex_unlock (&bar->mutex);
do
futex_wait (&bar->generation, generation);
while (bar->generation == generation);
}
if (__sync_add_and_fetch (&bar->arrived, -1) == 0)
gomp_mutex_unlock (&bar->mutex);
}
void
gomp_barrier_wait (gomp_barrier_t *barrier)
{
gomp_barrier_wait_end (barrier, gomp_barrier_wait_start (barrier));
}
|
SuperIlu/jSH | watt32-2.2dev.rel.11/src/transmit.c | <gh_stars>10-100
/*!\file transmit.c
* BSD send(), sendto(), write().
*/
/* BSD sockets functionality for Watt-32 TCP/IP
*
* Copyright (c) 1997-2002 <NAME> <<EMAIL>>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by <NAME>
* Bergen, Norway.
*
* THIS SOFTWARE IS PROVIDED BY ME (<NAME>) AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL I OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Version
*
* 0.5 : Dec 18, 1997 : <NAME> - created
*/
#include "socket.h"
#include "pcicmp6.h"
#if defined(USE_BSD_API)
static int raw_transmit (Socket *socket, const void *buf, unsigned len);
static int udp_transmit (Socket *socket, const void *buf, unsigned len);
static int tcp_transmit (Socket *socket, const void *buf, unsigned len, int flags);
static int setup_udp_raw (Socket *socket, const struct sockaddr *to, int tolen);
static int transmit (const char *func, int s, const void *buf, unsigned len,
int flags, const struct sockaddr *to, int tolen,
BOOL have_remote_addr);
int W32_CALL sendto (int s, const void *buf, int len, int flags,
const struct sockaddr *to, socklen_t tolen)
{
return transmit ("sendto", s, buf, len, flags, to, tolen, TRUE);
}
int W32_CALL send (int s, const void *buf, int len, int flags)
{
return transmit ("send", s, buf, len, flags, NULL, 0, FALSE);
}
int W32_CALL write_s (int s, const char *buf, int nbyte)
{
return transmit ("write_s", s, buf, nbyte, 0, NULL, 0, FALSE);
}
int W32_CALL writev_s (int s, const struct iovec *vector, size_t count)
{
int i, len, bytes = 0;
SOCK_DEBUGF (("\nwritev_s:%d, iovecs=%lu", s, (u_long)count));
for (i = 0; i < (int)count; i++)
{
#if (DOSX)
if (!valid_addr(vector[i].iov_base, vector[i].iov_len))
{
SOCK_DEBUGF ((", EFAULT (iovec[%d] = %p, len %d)",
i, vector[i].iov_base, vector[i].iov_len));
SOCK_ERRNO (EFAULT);
return (-1);
}
#endif
len = transmit (NULL, s, vector[i].iov_base, vector[i].iov_len,
0, NULL, 0, FALSE);
if (len < 0)
{
bytes = -1;
break;
}
bytes += len;
}
SOCK_DEBUGF ((", total %d", bytes)); /* writing 0 byte is not an error */
return (bytes);
}
/*
* sendmsg():
*/
int W32_CALL sendmsg (int s, const struct msghdr *msg, int flags)
{
const struct iovec *iov;
int count = msg->msg_iovlen;
int i, bytes, len;
SOCK_DEBUGF (("\nsendmsg:%d, iovecs=%d", s, count));
iov = msg->msg_iov;
if (!iov)
{
SOCK_DEBUGF ((", EFAULT"));
SOCK_ERRNO (EFAULT);
return (-1);
}
for (i = bytes = 0; i < count; i++)
{
#if (DOSX)
if (!valid_addr(iov[i].iov_base, iov[i].iov_len))
{
SOCK_DEBUGF ((", EFAULT (iovec[%d] = %p/%d)",
(int)i, iov[i].iov_base, iov[i].iov_len));
SOCK_ERRNO (EFAULT);
return (-1);
}
#endif
len = transmit (NULL, s, iov[i].iov_base, iov[i].iov_len,
flags, (struct sockaddr*)msg->msg_name,
msg->msg_namelen, TRUE);
if (len < 0)
{
bytes = -1;
break;
}
bytes += len;
}
SOCK_DEBUGF ((", total %d", bytes));
return (bytes);
}
/*
* Close socket if MSG_EOR specified in flags.
*/
static __inline void msg_eor_close (Socket *socket)
{
switch (socket->so_type)
{
case SOCK_STREAM:
socket->so_state |= SS_CANTSENDMORE;
sock_close ((sock_type*)socket->tcp_sock);
break;
case SOCK_DGRAM:
socket->so_state |= SS_CANTSENDMORE;
sock_close ((sock_type*)socket->udp_sock);
break;
case SOCK_RAW:
socket->so_state |= SS_CANTSENDMORE;
break;
}
}
/*
* transmit() flags:
* MSG_DONTROUTE (not supported)
* MSG_EOR Close sending side after data sent
* MSG_TRUNC (not supported)
* MSG_CTRUNC (not supported)
* MSG_OOB (not supported)
* MSG_WAITALL Wait till room in tx-buffer (not supported)
* MSG_NOSIGNAL ?? (not supported)
*/
static int transmit (const char *func, int s, const void *buf, unsigned len,
int flags, const struct sockaddr *to, int tolen,
BOOL have_remote_addr) /* for sendto() and sendmsg() */
{
Socket *socket = _socklist_find (s);
int rc;
if (func)
{
SOCK_DEBUGF (("\n%s:%d, len=%d", func, s, len));
if (flags)
SOCK_DEBUGF ((", flags 0x%X", flags));
}
if (!socket)
{
if (_sock_dos_fd(s))
{
SOCK_DEBUGF ((", ENOTSOCK"));
SOCK_ERRNO (ENOTSOCK);
return (-1);
}
SOCK_DEBUGF ((", EBADF"));
SOCK_ERRNO (EBADF);
return (-1);
}
if (flags & MSG_NOSIGNAL) /* Don't do 'raise(SIGPIPE)' */
socket->msg_nosig = TRUE;
if (socket->so_type == SOCK_STREAM || /* TCP-socket or */
(socket->so_state & SS_ISCONNECTED)) /* "connected" udp/raw */
{
/* Note: SOCK_RAW doesn't really need a local address/port, but
* makes the code more similar for all socket-types.
* Disadvantage is that SOCK_RAW ties up a local port and a bit
* more memory.
*/
if (!socket->local_addr)
{
SOCK_DEBUGF ((", no local_addr"));
SOCK_ERRNO (ENOTCONN);
return (-1);
}
if (!socket->remote_addr)
{
SOCK_DEBUGF ((", no remote_addr"));
SOCK_ERRNO (ENOTCONN);
return (-1);
}
if (socket->so_state & SS_CONN_REFUSED)
{
if (socket->so_error == ECONNRESET) /* set in tcp_sockreset() */
{
SOCK_DEBUGF ((", ECONNRESET"));
SOCK_ERRNO (ECONNRESET);
}
else
{
SOCK_DEBUGF ((", ECONNREFUSED"));
SOCK_ERRNO (ECONNREFUSED);
}
return (-1);
}
}
/* connectionless protocol setup.
* SOCK_PACKET sockets go pretty much unchecked.
*/
if (socket->so_type == SOCK_DGRAM ||
socket->so_type == SOCK_RAW)
{
size_t sa_len = (socket->so_family == AF_INET6) ?
sizeof(struct sockaddr_in6) :
(socket->so_family == AF_PACKET) ?
sizeof(struct sockaddr_ll) :
sizeof(struct sockaddr_in);
if (!have_remote_addr)
{
to = (const struct sockaddr*) socket->remote_addr;
tolen = sa_len;
}
if (!to || tolen < (int)sa_len)
{
SOCK_DEBUGF ((", illegal to-addr (tolen = %d, sa_len %d)",
tolen, (int)sa_len));
SOCK_ERRNO (EINVAL);
return (-1);
}
if (socket->so_type != SOCK_PACKET &&
setup_udp_raw(socket,to,tolen) < 0)
return (-1);
}
if (len > 0)
VERIFY_RW (buf, len);
if (socket->so_type != SOCK_DGRAM && (!buf || len == 0))
{
SOCK_DEBUGF ((", EINVAL"));
SOCK_ERRNO (EINVAL);
return (-1);
}
if (_sock_sig_setup() < 0)
{
SOCK_ERRNO (EINTR);
return (-1);
}
switch (socket->so_type)
{
case SOCK_DGRAM:
rc = udp_transmit (socket, buf, len);
break;
case SOCK_STREAM:
rc = tcp_transmit (socket, buf, len, flags);
break;
case SOCK_RAW:
rc = raw_transmit (socket, buf, len);
break;
case SOCK_PACKET:
rc = sock_packet_transmit (socket, buf, len, to, tolen);
break;
default:
SOCK_DEBUGF ((", EPROTONOSUPPORT"));
SOCK_ERRNO (EPROTONOSUPPORT);
rc = -1;
break;
}
if (rc >= 0 && (flags & MSG_EOR))
msg_eor_close (socket);
_sock_sig_restore();
return (rc);
}
/*
* Setup remote_addr for SOCK_RAW/SOCK_DGRAM (connectionless) protocols.
* Must "reconnect" socket if 'remote_addr' or 'to' address are different.
* I.e we're sending to another host/port than last time.
*/
static int setup_udp_raw (Socket *socket, const struct sockaddr *to, int tolen)
{
const struct sockaddr_in *peer = (const struct sockaddr_in*) to;
DWORD keepalive = socket->keepalive;
WORD lport = 0;
BOOL is_ip6 = (socket->so_family == AF_INET6);
BYTE *rdata = NULL;
int rc;
if (socket->so_state & SS_ISCONNECTED)
{
if (!socket->remote_addr)
{
SOCK_FATAL (("setup_udp_raw(): no remote_addr\n"));
return (-1);
}
/* No need to reconnect if same peer address/port.
*/
if (!is_ip6)
{
const struct sockaddr_in *ra = (const struct sockaddr_in*)socket->remote_addr;
if (peer->sin_addr.s_addr == ra->sin_addr.s_addr &&
peer->sin_port == ra->sin_port)
return (1);
}
#if defined(USE_IPV6)
else
{
const struct sockaddr_in6 *ra = (const struct sockaddr_in6*)socket->remote_addr;
const struct sockaddr_in6 *peer6 = (const struct sockaddr_in6*)to;
if (!memcmp(&peer6->sin6_addr, &ra->sin6_addr, sizeof(peer6->sin6_addr)) &&
peer6->sin6_port == ra->sin6_port)
return (1);
}
#endif
SOCK_DEBUGF ((", reconnecting"));
free (socket->remote_addr);
socket->remote_addr = NULL;
/* Clear any effect of previous ICMP errors etc.
*/
socket->so_state &= ~(SS_CONN_REFUSED | SS_CANTSENDMORE | SS_CANTRCVMORE);
socket->so_error = 0;
if (socket->so_type == SOCK_DGRAM)
{
lport = socket->udp_sock->myport;
rdata = socket->udp_sock->rx_data; /* preserve current data */
}
}
/* For SOCK_DGRAM, udp_close() will be called when (re)opening socket.
*/
SOCK_ENTER_SCOPE();
rc = connect (socket->fd, to, tolen);
SOCK_LEAVE_SCOPE();
if (rc < 0)
return (-1);
#if 0
if ((socket->so_state & SS_PRIV) && socket->so_type == SOCK_DGRAM)
{
SOCK_DEBUGF ((", SS_PRIV"));
/* Clear any effect of previous ICMP errors etc.
*/
socket->so_state &= ~(SS_CONN_REFUSED | SS_CANTSENDMORE | SS_CANTRCVMORE);
socket->so_error = 0;
lport = socket->udp_sock->myport;
grab_localport (lport);
}
#endif
if (rdata) /* Must be SOCK_DGRAM */
{
_udp_Socket *udp = socket->udp_sock;
/* free new rx-buffer set in connect() / _UDP_open().
*/
DISABLE();
_sock_free_rcv_buf ((sock_type*)udp);
udp->rx_data = rdata; /* reuse previous data buffer */
ENABLE();
grab_localport (lport); /* Restore free'd localport */
}
/* restore keepalive timer changed in connect()
*/
socket->keepalive = keepalive;
return (1);
}
/*
* Check for enough room in Tx-buffer for a non-blocking socket
* to transmit without waiting. Only called for SOCK_DGRAM/SOCK_STREAM
* sockets.
*
* If '*len > room', modify '*len' on output to 'room' (the size of
* bytes left in Tx-buf).
*/
static __inline BOOL check_non_block_tx (Socket *socket, unsigned *len)
{
sock_type *sk;
unsigned room;
if (socket->so_type == SOCK_DGRAM)
sk = (sock_type*) socket->udp_sock;
else sk = (sock_type*) socket->tcp_sock;
room = sock_tbleft (sk);
if (*len <= room)
return (TRUE); /* okay, enough room, '*len' unmodified */
#if 0
WATT_YIELD(); /* a small delay to clear up things */
tcp_tick (sk);
room = sock_tbleft (sk);
if (*len <= room)
return (TRUE);
#endif
/* Still no room, but cannot split up datagrams (only in IP-fragments)
*/
if (socket->so_type == SOCK_DGRAM)
return (FALSE);
/* Stream: Tx room below (or equal) low-water mark is failure.
*/
if (*len > 0 && room <= socket->send_lowat)
return (FALSE);
/* Streams may be split up, modify '*len'
*/
*len = room;
return (TRUE);
}
/*
* TCP transmitter.
*/
static int tcp_transmit (Socket *socket, const void *buf, unsigned len,
int flags)
{
sock_type *sk = (sock_type*)socket->tcp_sock;
int rc;
/* Don't timeout BSD sockets on inactivity (not sending)
*/
sk->tcp.datatimer = 0;
tcp_tick (sk);
tcp_Retransmitter (TRUE);
/** \todo Allow non-blocking sockets to send in SYNSENT state
*/
if (sk->tcp.state < tcp_StateESTAB || sk->tcp.state >= tcp_StateLASTACK)
{
socket->so_state |= SS_CANTSENDMORE;
SOCK_DEBUGF ((", EPIPE"));
SOCK_ERRNO (EPIPE); /* !! was ENOTCONN */
return (-1);
}
if (socket->so_state & SS_NBIO)
{
unsigned in_len = len;
if (!check_non_block_tx(socket,&len))
{
SOCK_DEBUGF ((", EWOULDBLOCK"));
SOCK_ERRNO (EWOULDBLOCK);
return (-1);
}
if (in_len != len)
SOCK_DEBUGF ((" [%u]", len)); /* trace "len=x [y]" */
}
#if defined(USE_IPV6)
if (socket->so_family == AF_INET6)
{
struct sockaddr_in6 *ra = (struct sockaddr_in6*) socket->remote_addr;
SOCK_DEBUGF ((", %s (%d) / TCP",
_inet6_ntoa(&ra->sin6_addr),
ntohs(socket->remote_addr->sin_port)));
ARGSUSED (ra);
}
else
#endif
SOCK_DEBUGF ((", %s (%d) / TCP",
inet_ntoa(socket->remote_addr->sin_addr),
ntohs(socket->remote_addr->sin_port)));
#if 0
if (len > sizof(sk->tcp.max_tx_data) - 1)
{
unsigned total = len;
BYTE *buffer = (BYTE*)buf;
while (1)
{
if (sock_tbused(sk) == 0) /* Tx buffer empty */
{
unsigned bytes = min (sk->tcp.max_tx_data, total);
if (bytes > 0)
{
buffer += sock_enqueue (sk, buffer, bytes);
total -= bytes;
}
else
break;
}
if (!tcp_tick(sk))
break;
}
rc = buffer - (BYTE*)buf;
}
else
#endif
rc = sock_write (sk, (const BYTE*)buf, len);
socket->keepalive = 0UL;
if (rc <= 0) /* error in tcp_write() */
{
if (sk->tcp.locflags & LF_GOT_ICMP) /* got ICMP host/port unreachable */
{
SOCK_DEBUGF ((", ECONNREFUSED")); /* !! a better code? */
SOCK_ERRNO (ECONNREFUSED);
}
else if (sk->tcp.state != tcp_StateESTAB)
{
SOCK_DEBUGF ((", EPIPE"));
SOCK_ERRNO (EPIPE); /* !! was ENOTCONN */
}
else
{
SOCK_DEBUGF ((", ENETDOWN"));
SOCK_ERRNO (ENETDOWN);
}
return (-1);
}
ARGSUSED (flags);
return (rc);
}
/*
* UDP transmitter
*/
static int udp_transmit (Socket *socket, const void *buf, unsigned len)
{
sock_type *sk = (sock_type*) socket->udp_sock;
BOOL is_ip6 = (socket->so_family == AF_INET6);
BOOL is_bcast, is_mcast;
unsigned tx_room;
int rc;
const void *dest;
if (!tcp_tick(sk))
{
socket->so_state |= SS_CANTSENDMORE;
SOCK_DEBUGF ((", EPIPE (can't send)"));
SOCK_ERRNO (EPIPE); /* !! was ENOTCONN */
return (-1);
}
tcp_Retransmitter (TRUE);
if ((socket->so_state & SS_NBIO) &&
!check_non_block_tx(socket,&len))
{
SOCK_DEBUGF ((", EWOULDBLOCK"));
SOCK_ERRNO (EWOULDBLOCK);
return (-1);
}
#if defined(USE_IPV6)
if (is_ip6)
{
const struct sockaddr_in6 *ra = (const struct sockaddr_in6*)socket->remote_addr;
dest = &ra->sin6_addr.s6_addr[0];
is_bcast = IN6_IS_ADDR_MC_GLOBAL (dest);
is_mcast = IN6_IS_ADDR_MULTICAST (dest);
SOCK_DEBUGF ((", %s (%d) / UDP %s", _inet6_ntoa(dest),
ntohs(socket->remote_addr->sin_port),
is_mcast ? "(mcast)" : ""));
}
else
#endif
{
dest = &socket->remote_addr->sin_addr.s_addr;
is_bcast = (*(DWORD*)dest == INADDR_BROADCAST ||
*(DWORD*)dest == INADDR_ANY);
is_mcast = IN_MULTICAST (ntohl(*(DWORD*)dest));
SOCK_DEBUGF ((", %s (%d) / UDP %s",
inet_ntoa(*(struct in_addr*)dest),
ntohs(socket->remote_addr->sin_port),
is_mcast ? "(mcast)" : ""));
}
if (len == 0) /* 0-byte probe packet */
return raw_transmit (socket, NULL, 0);
tx_room = sock_tbleft (sk); /* always MTU-28 */
/* Special tests for broadcast messages
*/
if (is_bcast)
{
if (len > tx_room) /* no room, fragmented broadcasts not allowed */
{
SOCK_DEBUGF ((", EMSGSIZE"));
SOCK_ERRNO (EMSGSIZE);
goto drop;
}
if (_pktserial) /* Link-layer doesn't allow broadcast */
{
SOCK_DEBUGF ((", EADDRNOTAVAIL"));
SOCK_ERRNO (EADDRNOTAVAIL);
goto drop;
}
}
/* set new TTL if setsockopt() used before sending to Class-D socket
*/
if (is_mcast)
udp_SetTTL (socket->udp_sock, socket->ip_ttl);
#if defined(USE_FRAGMENTS)
if (len > USHRT_MAX - sizeof(udp_Header))
{
SOCK_DEBUGF ((", EMSGSIZE"));
SOCK_ERRNO (EMSGSIZE);
if (!is_ip6)
STAT (ip4stats.ips_toolong++);
return (-1);
}
if (!is_ip6 && len > tx_room)
return _IP4_SEND_FRAGMENTS (sk, UDP_PROTO, *(DWORD*)dest, buf, len);
#endif
sk->udp.hisaddr = ntohl (socket->remote_addr->sin_addr.s_addr);
sk->udp.hisport = ntohs (socket->remote_addr->sin_port);
rc = sock_write (sk, (BYTE*)buf, len);
/* Patch hisaddr/hisport so that udp_demux() will handle further
* traffic as broadcast.
*/
if (socket->so_state & SS_PRIV)
{
sk->udp.hisaddr = INADDR_BROADCAST;
sk->udp.hisport = IPPORT_ANY;
}
if (rc <= 0) /* error in udp_write() */
{
if (sk->udp.locflags & LF_GOT_ICMP)
{
SOCK_DEBUGF ((", ECONNREFUSED"));
SOCK_ERRNO (ECONNREFUSED);
}
else
{
SOCK_DEBUGF ((", ENETDOWN"));
SOCK_ERRNO (ENETDOWN);
}
return (-1);
}
return (rc);
drop:
if (is_ip6)
STAT (ip6stats.ip6s_odropped++);
else STAT (ip4stats.ips_odropped++);
return (-1);
}
/**
* Raw IPv4 transmitter.
* \note
* 'tx' is always non-NULL and 'len' is always > 0.
* Except for SOCK_DGRAM probe packets (tx=NULL and len==0).
*/
static int ip4_transmit (Socket *socket, const void *tx, unsigned len)
{
eth_address eth;
u_long dest;
unsigned tx_len, tx_room;
sock_type *sk = (sock_type*) socket->raw_sock;
struct ip *ip = (struct ip*) tx;
const BYTE *buf = (const BYTE*) tx;
UINT h_len, o_len;
if (ip) /* NULL if called from udp_transmit() */
{
if ((socket->so_state & SS_NBIO) &&
sock_tbleft(sk) < (len + socket->send_lowat))
{
SOCK_DEBUGF ((", EWOULDBLOCK"));
SOCK_ERRNO (EWOULDBLOCK);
return (-1);
}
}
SOCK_DEBUGF ((", %s / Raw", inet_ntoa(socket->remote_addr->sin_addr)));
if (ip && (socket->inp_flags & INP_HDRINCL))
{
dest = ip->ip_dst.s_addr;
tx_len = len;
tx_room = _mtu;
}
else
{
dest = socket->remote_addr->sin_addr.s_addr;
tx_len = len + sizeof (*ip);
tx_room = _mtu + sizeof (*ip);
}
if (socket->ip_opt &&
socket->ip_opt->ip_dst.s_addr) /* using source routing */
dest = socket->ip_opt->ip_dst.s_addr;
if (!dest)
{
SOCK_DEBUGF ((", no dest"));
SOCK_ERRNO (EHOSTUNREACH);
STAT (ip4stats.ips_noroute++);
return (-1);
}
if (!_arp_resolve(ntohl(dest),ð))
{
SOCK_DEBUGF ((", no route"));
SOCK_ERRNO (EHOSTUNREACH);
return (-1);
}
if (socket->inp_flags & INP_HDRINCL) /* IP-header included */
{
DWORD offset = ntohs (ip->ip_off);
WORD flags = (WORD) (offset & ~IP_OFFMASK);
offset = (offset & IP_OFFMASK) << 3; /* 0 <= ip_ofs <= 65536-8 */
if ((flags & IP_DF) && /* DF requested */
tx_len > tx_room) /* tx-size above MTU */
{
SOCK_DEBUGF ((", EMSGSIZE"));
SOCK_ERRNO (EMSGSIZE);
STAT (ip4stats.ips_toolong++);
return (-1);
}
}
else if (tx_len + socket->ip_opt_len > tx_room) /* tx-size above MTU */
#if defined(USE_FRAGMENTS)
{
if (socket->ip_opt_len > 0)
((void)0); /** \todo Handle sending fragments with IP optons */
return _IP4_SEND_FRAGMENTS (sk, socket->so_proto, dest, buf, len);
}
#else
{
SOCK_DEBUGF ((", EMSGSIZE"));
SOCK_ERRNO (EMSGSIZE);
STAT (ip4stats.ips_toolong++);
return (-1);
}
#endif
/* "Normal" small (tx_len < MTU) IPv4 packets are sent below
*/
ip = (struct ip*) _eth_formatpacket (ð, IP4_TYPE);
if (socket->inp_flags & INP_HDRINCL) /* caller provided IP-header */
{
memcpy (ip, buf, len); /* SOCK_RAW can never have 0 length */
if (ip->ip_src.s_addr == 0)
{
ip->ip_src.s_addr = gethostid();
ip->ip_sum = 0;
ip->ip_sum = ~CHECKSUM ((void*)ip, ip->ip_hl << 2);
}
if (ip->ip_sum == 0) /* add header checksum if needed */
ip->ip_sum = ~CHECKSUM ((void*)ip, ip->ip_hl << 2);
}
else
{
if (socket->ip_opt && socket->ip_opt_len > 0)
{
BYTE *data;
o_len = min (socket->ip_opt_len, sizeof(socket->ip_opt->IP_opts));
h_len = sizeof(*ip) + o_len;
data = (BYTE*)ip + h_len;
memcpy (ip+1, &socket->ip_opt->IP_opts[0], o_len);
if (buf && len > 0)
memcpy (data, buf, len);
tx_len += o_len;
}
else
{
if (buf && len > 0)
memcpy (ip+1, buf, len);
h_len = sizeof (*ip);
}
ip->ip_v = IPVERSION;
ip->ip_hl = h_len >> 2;
ip->ip_tos = socket->ip_tos;
ip->ip_len = htons (tx_len);
ip->ip_id = _get_ip4_id();
ip->ip_off = 0;
ip->ip_ttl = socket->ip_ttl;
ip->ip_p = socket->so_proto;
ip->ip_src.s_addr = gethostid();
ip->ip_dst.s_addr = dest;
ip->ip_sum = 0;
ip->ip_sum = ~CHECKSUM (ip, h_len);
}
if (!_eth_send (tx_len, NULL, __FILE__, __LINE__))
{
SOCK_DEBUGF ((", ENETDOWN"));
SOCK_ERRNO (ENETDOWN);
return (-1);
}
return (len);
}
#if defined(USE_IPV6)
/**
* Raw IPv6 transmitter.
* \note
* 'tx' is always non-NULL and 'len' is always > 0.
* Except for SOCK_DGRAM probe packets (tx=NULL and len==0).
*/
static int ip6_transmit (Socket *socket, const void *tx, unsigned len)
{
eth_address eth;
ip6_address dest;
unsigned tx_len, tx_room;
sock_type *sk = (sock_type*) socket->raw_sock;
struct in6_Header *ip6 = (struct in6_Header*) tx;
struct sockaddr_in6 *ra = (struct sockaddr_in6*) socket->remote_addr;
const BYTE *buf = (const BYTE*) tx;
if ((socket->so_state & SS_NBIO) &&
sock_tbleft(sk) < (len + socket->send_lowat))
{
SOCK_DEBUGF ((", EWOULDBLOCK"));
SOCK_ERRNO (EWOULDBLOCK);
return (-1);
}
SOCK_DEBUGF ((", %s / Raw", _inet6_ntoa(&ra->sin6_addr)));
if (ip6 && (socket->inp_flags & INP_HDRINCL))
{
memcpy (dest, &ip6->destination[0], sizeof(dest));
tx_len = len;
tx_room = _mtu;
}
else
{
memcpy (dest, &ra->sin6_addr, sizeof(dest));
tx_len = len + sizeof (*ip6);
tx_room = _mtu + sizeof (*ip6);
}
if (IN6_IS_ADDR_UNSPECIFIED(&dest) || !icmp6_neigh_solic(&dest,ð))
{
SOCK_DEBUGF ((", no route"));
SOCK_ERRNO (EHOSTUNREACH);
STAT (ip6stats.ip6s_noroute++);
return (-1);
}
if (!(socket->inp_flags & INP_HDRINCL) &&
tx_len + socket->ip_opt_len > tx_room)
{
SOCK_DEBUGF ((", EMSGSIZE")); /** \todo support fragmentation */
SOCK_ERRNO (EMSGSIZE);
STAT (ip6stats.ip6s_odropped++);
return (-1);
}
ip6 = (struct in6_Header*) _eth_formatpacket (ð, IP6_TYPE);
if (socket->inp_flags & INP_HDRINCL)
{
if (buf && len > 0)
memcpy (ip6, buf, len);
if (IN6_IS_ADDR_UNSPECIFIED(&ip6->source))
memcpy (&ip6->source[0], _gethostid6(), sizeof(ip6->source));
}
else
{
#if 0 /* option header not yet supported */
if (socket->ip_opt && socket->ip_opt_len > 0)
{
BYTE *data;
int h_len;
o_len = min (socket->ip_opt_len, sizeof(socket->ip_opt->ip_opts));
h_len = sizeof(*ip) + o_len;
data = (BYTE*)ip + h_len;
memcpy (ip+1, &socket->ip_opt->ip_opts, o_len);
if (buf && len > 0)
memcpy (data, buf, len);
tx_len += o_len;
if (socket->ip_opt->ip_dst.s_addr) /* using source routing */
dest = socket->ip_opt->ip_dst.s_addr;
}
else
#endif
{
if (buf && len > 0)
memcpy (ip6+1, buf, len);
}
ip6->pri = 0;
ip6->ver = 6;
ip6->len = htons (len);
ip6->next_hdr = socket->so_proto;
ip6->hop_limit = _default_ttl;
memset (&ip6->flow_lbl[0], 0, sizeof(ip6->flow_lbl));
memcpy (&ip6->source[0], _gethostid6(), sizeof(ip6->source));
memcpy (&ip6->destination[0], dest, sizeof(ip6->destination));
}
if (!_eth_send (tx_len, NULL, __FILE__, __LINE__))
{
SOCK_DEBUGF ((", ENETDOWN"));
SOCK_ERRNO (ENETDOWN);
return (-1);
}
return (len);
}
#endif /* USE_IPV6 */
static int raw_transmit (Socket *socket, const void *buf, unsigned len)
{
#if 0
SOCK_ENTER_SCOPE();
tcp_tick (NULL); /* process other TCBs too */
tcp_Retransmitter (TRUE);
SOCK_LEAVE_SCOPE();
#endif
#if defined(USE_IPV6)
if (socket->so_family == AF_INET6)
return ip6_transmit (socket, buf, len);
#endif
if (socket->so_family == AF_INET)
return ip4_transmit (socket, buf, len);
SOCK_DEBUGF ((", EAFNOSUPPORT "));
SOCK_ERRNO (EAFNOSUPPORT );
return (-1);
}
#endif /* USE_BSD_API */
|
watchd1/ZwitscherA | src/de/bsd/zwitscher/UserDetailActivity.java | package de.bsd.zwitscher;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.text.Html;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TableLayout;
import android.widget.TableRow;
import android.widget.TextView;
import android.widget.Toast;
import de.bsd.zwitscher.account.Account;
import de.bsd.zwitscher.account.AccountHolder;
import de.bsd.zwitscher.helper.NetworkHelper;
import de.bsd.zwitscher.helper.PicHelper;
import twitter4j.User;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Show details about a user and allow to (un)follow
* the user or add it to a list.
*
* @author <NAME>
*/
public class UserDetailActivity extends Activity {
Bundle bundle;
TwitterHelper twitterHelper;
ProgressBar pg;
User theUser;
boolean weAreFollowing = false;
Button followButton ;
private long userId;
private Account account;
private MenuItem weAreFollowingMenuItem;
private String userName;
List<String> userListNames ;
boolean listsLoaded = false;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.user_detail);
followButton = (Button) findViewById(R.id.userDetail_follow_button);
if (followButton!=null)
followButton.setEnabled(false);
account = AccountHolder.getInstance(this).getAccount();
twitterHelper = new TwitterHelper(this, account);
bundle = getIntent().getExtras();
if (bundle!=null) {
userName = bundle.getString("userName");
userId = bundle.getLong("userId");
TextView userNameView = (TextView) findViewById(R.id.UserName);
userNameView.setText(userName);
}
// If the user is in the DB, show the saved state while reloading its data
if (userId!=0) {
theUser = twitterHelper.getUserById(userId,true);
if (theUser!=null)
fillDetails(theUser,false);
reload();
}
else {
reload();
}
// We don't need to show the "<-- touch" comment any more
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
prefs.edit().putBoolean("newUser",false).commit();
}
public void onResume() {
super.onResume();
userName = bundle.getString("userName");
}
private void reload() {
if (userId!=0)
new UserDetailDownloadTask(this).execute(userId);
else {
new UserDetailDownloadTask(this).execute(this.userName);
}
}
/**
* Fill data of the passed user in the form fields.
* @param user User object to display
* @param weAreFollowing True if we are following that user
*/
private void fillDetails(User user, boolean weAreFollowing) {
if (user == null) {
return;
}
TextView userNameView = (TextView) findViewById(R.id.UserName);
String uName = "<b>" + user.getName() + "</b>" + " (" + user.getScreenName() + ")";
userNameView.setText(Html.fromHtml(uName));
userId = user.getId();
View usersTweetsButton = findViewById(R.id.view_users_tweets_button);
if (usersTweetsButton!=null)
usersTweetsButton.setEnabled(true);
View viewOnWebButton = findViewById(R.id.view_user_on_web_button);
if (viewOnWebButton!=null)
viewOnWebButton.setEnabled(true);
String colorString = user.getProfileBackgroundColor();
if (colorString==null || colorString.equals("")) {
colorString = "#EFEFEF";
}
if (!colorString.startsWith("#")) // identi.ca sends the # , but twitter does not
colorString = "#" + colorString;
getWindow().setTitleColor(Color.parseColor(colorString));
String textColorString = user.getProfileTextColor();
int textColor = 0x555555;
if (textColorString!= null) {
if ( textColorString.equals(""))
textColorString = colorString;
if (!textColorString.startsWith("#"))
textColorString = "#" + textColorString;
textColor = Color.parseColor(textColorString);
}
userNameView.setTextColor(textColor);
TableLayout tl = (TableLayout) findViewById(R.id.user_table_layout);
for (int i = 0 ; i < tl.getChildCount(); i++) {
TableRow row = (TableRow) tl.getChildAt(i);
for (int j = 0 ; j < row.getChildCount(); j++) {
TextView tv = (TextView) row.getChildAt(j);
tv.setTextColor(textColor);
}
}
String backgroundColorString = user.getProfileSidebarFillColor();
if (!backgroundColorString.equals("")) {
if (!backgroundColorString.startsWith("#"))
backgroundColorString = "#" + backgroundColorString;
int backgroundColor = Color.parseColor(backgroundColorString);
tl.setBackgroundColor(backgroundColor);
userNameView.setBackgroundColor(backgroundColor);
}
PicHelper picHelper = new PicHelper();
Bitmap bitmap;
bitmap = picHelper.getBitMapForUserFromFile(user);
if (bitmap!=null) {
ImageView iv = (ImageView) findViewById(R.id.UserPictureImageView);
iv.setImageBitmap(bitmap);
}
TextView locationView = (TextView) findViewById(R.id.userDetail_location);
locationView.setText(user.getLocation());
TextView bioView = (TextView) findViewById(R.id.userDetail_bio);
bioView.setText(user.getDescription());
TextView webView = (TextView) findViewById(R.id.userDetail_web);
if (user.getURL()!=null) {
webView.setText(user.getURL());
String plc = user.getProfileBackgroundColor();
if (!plc.equals("")) {
if (!plc.startsWith("#"))
plc = "#" + plc;
webView.setTextColor(Color.parseColor(plc));
}
}
TextView tweetView = (TextView) findViewById(R.id.userDetail_tweetCount);
tweetView.setText("" + user.getStatusesCount());
TextView followersView = (TextView) findViewById(R.id.userDetail_followerCount);
followersView.setText("" + user.getFollowersCount());
TextView followingView = (TextView) findViewById(R.id.userDetail_followingCount);
followingView.setText("" + user.getFriendsCount());
TextView listedView = (TextView) findViewById(R.id.userDetail_listedCount);
listedView.setText("" + user.getListedCount());
if (followButton!=null) {
followButton.setEnabled(true);
setFollowingButton(weAreFollowing);
}
ImageButton addToListButton = (ImageButton) findViewById(R.id.userDetail_addListButton);
if (addToListButton!=null)
addToListButton.setEnabled(true);
}
/**
* Set the appropriate text on the follow button
* @param weAreFollowing Are we following that user (show 'unfollow' message in this case).
*/
private void setFollowingButton(boolean weAreFollowing) {
if (weAreFollowing)
followButton.setText(R.string.unfollow_user);
else
followButton.setText(R.string.follow_user);
}
/**
* Called from the back button to finish the activity
* @param v View object touched
*/
@SuppressWarnings("unused")
public void done(View v) {
finish();
}
/**
* Allow sending a direct message to the user.
* Called from the direct button.
* TODO check if he follows us and thus sending is possible at all.
* @param v View object touched
*/
@SuppressWarnings("unused")
public void directMessage(View v) {
Intent i = new Intent(this, NewTweetActivity.class);
i.putExtra("user",theUser);
i.putExtra("op", getString(R.string.direct));
startActivity(i);
}
/**
* Called from the followUser button
* @param v View object touched
*/
@SuppressWarnings("unused")
public void followUser(View v) {
UpdateRequest request = new UpdateRequest(UpdateType.FOLLOW_UNFOLLOW);
request.userId = theUser.getId();
request.someBool = !weAreFollowing;
UpdateStatusService.sendUpdate(this,account,request);
}
/**
* Start a browser to view user on server
* Triggered from a button
* @param v View object touched
*/
@SuppressWarnings("unused")
public void viewOnWeb(View v) {
Intent i = new Intent(Intent.ACTION_VIEW);
String u = "http://twitter.com/#!/" + theUser.getScreenName();
i.setData(Uri.parse(u));
startActivity(i);
}
/**
* View users's recent tweets
* Triggered from a button
* @param v View object touched
*/
@SuppressWarnings("unused")
public void showUserTweets(View v) {
Intent intent = new Intent().setClass(this, TweetListActivity.class);
intent.putExtra("userId",userId);
startActivity(intent);
}
/**
* Add the user to a list
* Called from the addToList button
* @param v View object touched
*/
@SuppressWarnings("unused")
public void addToList(View v) {
TweetDB tdb = TweetDB.getInstance(this);
List<String> data = new ArrayList<String>();
Set<Map.Entry<String,Integer>> userListsEntries;
int i = 0;
userListsEntries = tdb.getOwnedLists(account).entrySet();
boolean[] checked = new boolean[userListsEntries.size()];
for (Map.Entry<String,Integer> userList : userListsEntries) {
data.add(userList.getKey());
boolean found = false;
for (String listName : userListNames) {
if (listName.equals(userList.getKey())) {
found = true;
}
}
checked[i] = found;
i++;
}
Intent intent = new Intent(this,MultiSelectListActivity.class);
intent.putStringArrayListExtra("data", (ArrayList<String>) data);
// put enabled items separately
intent.putExtra("checked", checked);
intent.putExtra("mode","multiple");
startActivityForResult(intent, 1);
}
/**
* Callback that is called after the user has selected a list. We need to go thhrough
* the result and check if an item was added or removed and then fire the respective
* task with the server.
* @param requestCode selection code of the original event
* @param resultCode result (ok, not ok)
* @param data Data from the called Intent
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode==1 && resultCode==RESULT_OK) {
long[] checkedOnes = data.getLongArrayExtra("data");
TweetDB tdb = TweetDB.getInstance(getApplicationContext());
Set<Map.Entry<String,Integer>> userListsEntries;
userListsEntries = tdb.getOwnedLists(account).entrySet();
int i = 0;
for (Map.Entry<String,Integer> userList : userListsEntries) {
String listName = userList.getKey();
if (userListNames.contains(listName)) { // the stored lists contains the above userList
if (!isChecked(checkedOnes,i)) {
// not checked -> user wants to remove the list
UpdateRequest request = new UpdateRequest(UpdateType.REMOVE_FROM_LIST);
request.id = userList.getValue();
request.userId=theUser.getId();
UpdateStatusService.sendUpdate(this,account,request);
}
} else {
if (isChecked(checkedOnes,i)) {
UpdateRequest request = new UpdateRequest(UpdateType.ADD_TO_LIST);
request.id=userList.getValue();
request.userId=theUser.getId();
UpdateStatusService.sendUpdate(this,account,request);
}
}
i++;
}
}
}
private boolean isChecked(long[] checkedOnes, int pos) {
for (long tmp : checkedOnes) {
if (tmp==pos)
return true;
}
return false;
}
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.user_detail_menu,menu);
weAreFollowingMenuItem = menu.findItem(R.id.follow);
if (weAreFollowing)
weAreFollowingMenuItem.setTitle(R.string.unfollow_user);
else
weAreFollowingMenuItem.setTitle(R.string.follow_user);
ActionBar actionBar = this.getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
MenuItem pgItem = menu.findItem(R.id.ProgressBar);
pg = (ProgressBar) pgItem.getActionView();
pg.setVisibility(ProgressBar.INVISIBLE);
MenuItem listsItem = menu.findItem(R.id.addToList);
listsItem.setEnabled(listsLoaded);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
break;
case R.id.follow:
followUser(null);
break;
case R.id.addToList:
addToList(null);
break;
case R.id.direct:
directMessage(null);
break;
case R.id.viewOnWeb:
viewOnWeb(null);
break;
case R.id.usersTweets:
showUserTweets(null);
break;
case R.id.refresh:
reload();
break;
default:
Log.e("UserDetailActivity", "Unknown menu item: " + item.toString());
}
return super.onOptionsItemSelected(item);
}
/**
* Async task to download the userdata from server (or db) and
* trigger its display.
*/
private class UserDetailDownloadTask extends AsyncTask<Object,Void, Object[]> {
private Context context;
private UserDetailDownloadTask(Context context) {
this.context = context;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
if (weAreFollowingMenuItem!=null) {
weAreFollowingMenuItem.setEnabled(false);
}
if (pg!=null) {
pg.setVisibility(ProgressBar.VISIBLE);
}
getActionBar().setTitle(R.string.get_user_detail);
}
@Override
protected Object[] doInBackground(Object... params) {
Long userId;
User user;
if (params[0] instanceof Long) {
userId = (Long) params[0];
user = twitterHelper.getUserById(userId, false);
}
else if (params[0] instanceof String) {
String name= (String) params[0];
user = twitterHelper.getUserByScreenName(name, false);
if (user==null)
return new Object[]{};
userId = user.getId();
} else {
// Should not happen
return new Object[]{};
}
boolean downloadImages = new NetworkHelper(context).mayDownloadImages();
Drawable background = null;
if (downloadImages) {
String profileBackgroundImageUrl = user.getProfileBackgroundImageURL();
if (!"".equals(profileBackgroundImageUrl)) {
try {
URL url = new URL(profileBackgroundImageUrl);
InputStream is = url.openStream();
background = Drawable.createFromStream(is,"lala");
} catch (IOException e) {
e.printStackTrace();
} catch (OutOfMemoryError oome) {
oome.printStackTrace();
}
}
PicHelper picHelper = new PicHelper(); // TODO optimize
picHelper.fetchUserPic(user);
}
userListNames = twitterHelper.getListMembershipFromServer(user.getScreenName());
Boolean isFriend = twitterHelper.areWeFollowing(userId);
weAreFollowing = isFriend;
Object[] res = new Object[3];
res[0] = user;
res[1] = isFriend;
res[2] = background;
return res;
}
@Override
protected void onPostExecute(Object[] params) {
super.onPostExecute(params);
if (params==null || params.length==0 || params[0]==null) {
Toast.makeText(context,R.string.failed_to_load_user_info,Toast.LENGTH_LONG).show();
return;
}
User user = (User) params[0];
Boolean isFriend = (Boolean) params[1];
theUser = user;
fillDetails(user, isFriend);
View followButton = findViewById(R.id.userDetail_follow_button);
if (followButton!=null)
followButton.setEnabled(true);
if (weAreFollowingMenuItem!=null) {
if (isFriend)
weAreFollowingMenuItem.setTitle(R.string.unfollow_user);
else
weAreFollowingMenuItem.setTitle(R.string.follow_user);
weAreFollowingMenuItem.setEnabled(true);
}
if (pg!=null)
pg.setVisibility(ProgressBar.INVISIBLE);
getActionBar().setTitle(R.string.app_name);
Drawable background = (Drawable) params[2];
if (background!=null)
getWindow().setBackgroundDrawable(background);
listsLoaded = true;
invalidateOptionsMenu();
}
}
} |
ihotray/gomedia | mp4/trun-box.go | package mp4
import "encoding/binary"
// aligned(8) class TrackRunBox extends FullBox(‘trun’, version, tr_flags) {
// unsigned int(32) sample_count;
// // the following are optional fields
// signed int(32) data_offset;
// unsigned int(32) first_sample_flags;
// // all fields in the following array are optional
// {
// unsigned int(32) sample_duration;
// unsigned int(32) sample_size;
// unsigned int(32) sample_flags
// if (version == 0)
// {
// unsigned int(32) sample_composition_time_offset;
// }
// else
// {
// signed int(32) sample_composition_time_offset;
// }
// }[ sample_count ]
// }
type MP4_TRUN_FALG uint32
const (
TR_FLAG_DATA_OFFSET MP4_TRUN_FALG = 0x000001
TR_FLAG_DATA_FIRST_SAMPLE_FLAGS MP4_TRUN_FALG = 0x000004
TR_FLAG_DATA_SAMPLE_DURATION MP4_TRUN_FALG = 0x000100
TR_FLAG_DATA_SAMPLE_SIZE MP4_TRUN_FALG = 0x000200
TR_FLAG_DATA_SAMPLE_FLAGS MP4_TRUN_FALG = 0x000400
TR_FLAG_DATA_SAMPLE_COMPOSITION_TIME MP4_TRUN_FALG = 0x000800
)
type TrackRunBox struct {
Box *FullBox
SampleCount uint32
Dataoffset int32
FirstSampleFlags uint32
EntryList *movtrun
}
func NewTrackRunBox() *TrackRunBox {
return &TrackRunBox{
Box: NewFullBox([4]byte{'t', 'r', 'u', 'n'}, 1),
}
}
func (trun *TrackRunBox) Size() uint64 {
n := trun.Box.Size()
trunFlags := uint32(trun.Box.Flags[0])<<16 | uint32(trun.Box.Flags[1])<<8 | uint32(trun.Box.Flags[2])
if trunFlags&uint32(TR_FLAG_DATA_OFFSET) > 0 {
n += 8
}
if trunFlags&uint32(TR_FLAG_DATA_FIRST_SAMPLE_FLAGS) > 0 {
n += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_DURATION) > 0 {
n += 4 * uint64(trun.SampleCount)
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_SIZE) > 0 {
n += 4 * uint64(trun.SampleCount)
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_FLAGS) > 0 {
n += 4 * uint64(trun.SampleCount)
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_COMPOSITION_TIME) > 0 {
n += 4 * uint64(trun.SampleCount)
}
return n
}
func (trun *TrackRunBox) Decode(rh Reader) (offset int, err error) {
if offset, err = trun.Box.Decode(rh); err != nil {
return
}
needSize := trun.Box.Box.Size - 12
buf := make([]byte, needSize)
if _, err = rh.ReadAtLeast(buf); err != nil {
return 0, err
}
n := 0
trun.SampleCount = binary.BigEndian.Uint32(buf[n:])
n += 4
trunFlags := uint32(trun.Box.Flags[0])<<16 | uint32(trun.Box.Flags[1])<<8 | uint32(trun.Box.Flags[2])
if trunFlags&uint32(TR_FLAG_DATA_OFFSET) > 0 {
trun.Dataoffset = int32(binary.BigEndian.Uint32(buf[n:]))
n += 4
}
if trunFlags&uint32(TR_FLAG_DATA_FIRST_SAMPLE_FLAGS) > 0 {
trun.FirstSampleFlags = binary.BigEndian.Uint32(buf[n:])
n += 4
}
trun.EntryList = new(movtrun)
trun.EntryList.entrys = make([]trunEntry, trun.SampleCount)
for i := 0; i < int(trun.SampleCount); i++ {
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_DURATION) > 0 {
trun.EntryList.entrys[i].sampleDuration = binary.BigEndian.Uint32(buf[n:])
n += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_SIZE) > 0 {
trun.EntryList.entrys[i].sampleSize = binary.BigEndian.Uint32(buf[n:])
n += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_FLAGS) > 0 {
trun.EntryList.entrys[i].sampleFlags = binary.BigEndian.Uint32(buf[n:])
n += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_COMPOSITION_TIME) > 0 {
trun.EntryList.entrys[i].sampleCompositionTimeOffset = binary.BigEndian.Uint32(buf[n:])
n += 4
}
}
offset += n
return
}
func (trun *TrackRunBox) Encode() (int, []byte) {
trun.Box.Box.Size = trun.Size()
offset, buf := trun.Box.Encode()
binary.BigEndian.PutUint32(buf[offset:], trun.SampleCount)
offset += 4
trunFlags := uint32(trun.Box.Flags[0])<<16 | uint32(trun.Box.Flags[1])<<8 | uint32(trun.Box.Flags[2])
if trunFlags&uint32(TR_FLAG_DATA_OFFSET) > 0 {
binary.BigEndian.PutUint32(buf[offset:], trun.SampleCount)
offset += 4
}
if trunFlags&uint32(TR_FLAG_DATA_FIRST_SAMPLE_FLAGS) > 0 {
binary.BigEndian.PutUint32(buf[offset:], trun.FirstSampleFlags)
offset += 4
}
trun.EntryList = new(movtrun)
trun.EntryList.entrys = make([]trunEntry, trun.SampleCount)
for i := 0; i < int(trun.SampleCount); i++ {
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_DURATION) > 0 {
binary.BigEndian.PutUint32(buf[offset:], trun.EntryList.entrys[i].sampleDuration)
offset += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_SIZE) > 0 {
binary.BigEndian.PutUint32(buf[offset:], trun.EntryList.entrys[i].sampleSize)
offset += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_FLAGS) > 0 {
trun.EntryList.entrys[i].sampleFlags = binary.BigEndian.Uint32(buf[offset:])
offset += 4
}
if trunFlags&uint32(TR_FLAG_DATA_SAMPLE_COMPOSITION_TIME) > 0 {
trun.EntryList.entrys[i].sampleCompositionTimeOffset = binary.BigEndian.Uint32(buf[offset:])
offset += 4
}
}
return offset, buf
}
|
jacadcaps/webkitty | Source/WebCore/bindings/js/JSWorkletGlobalScopeBase.h | /*
* Copyright (C) 2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#pragma once
#if ENABLE(CSS_PAINTING_API)
#include "JSDOMGlobalObject.h"
#include "JSDOMWrapper.h"
#include "JSEventTarget.h"
namespace WebCore {
class JSWorkletGlobalScope;
class WorkletGlobalScope;
class JSWorkletGlobalScopeBase : public JSDOMGlobalObject {
public:
using Base = JSDOMGlobalObject;
template<typename, JSC::SubspaceAccess>
static void subspaceFor(JSC::VM&) { RELEASE_ASSERT_NOT_REACHED(); }
static void destroy(JSC::JSCell*);
DECLARE_INFO;
WorkletGlobalScope& wrapped() const { return *m_wrapped; }
JSC::JSProxy* proxy() const { ASSERT(m_proxy); return m_proxy.get(); }
ScriptExecutionContext* scriptExecutionContext() const;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::GlobalObjectType, StructureFlags), info());
}
static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable;
static bool supportsRichSourceInfo(const JSC::JSGlobalObject*);
static bool shouldInterruptScript(const JSC::JSGlobalObject*);
static bool shouldInterruptScriptBeforeTimeout(const JSC::JSGlobalObject*);
static JSC::RuntimeFlags javaScriptRuntimeFlags(const JSC::JSGlobalObject*);
static void queueMicrotaskToEventLoop(JSC::JSGlobalObject&, Ref<JSC::Microtask>&&);
void clearDOMGuardedObjects();
protected:
JSWorkletGlobalScopeBase(JSC::VM&, JSC::Structure*, RefPtr<WorkletGlobalScope>&&);
void finishCreation(JSC::VM&, JSC::JSProxy*);
static void visitChildren(JSC::JSCell*, JSC::SlotVisitor&);
private:
RefPtr<WorkletGlobalScope> m_wrapped;
JSC::WriteBarrier<JSC::JSProxy> m_proxy;
};
// Returns a JSWorkletGlobalScope or jsNull()
// Always ignores the execState and passed globalObject, WorkletGlobalScope is itself a globalObject and will always use its own prototype chain.
JSC::JSValue toJS(JSC::JSGlobalObject*, JSDOMGlobalObject*, WorkletGlobalScope&);
inline JSC::JSValue toJS(JSC::JSGlobalObject* lexicalGlobalObject, JSDOMGlobalObject* globalObject, WorkletGlobalScope* scope) { return scope ? toJS(lexicalGlobalObject, globalObject, *scope) : JSC::jsNull(); }
JSC::JSValue toJS(JSC::JSGlobalObject*, WorkletGlobalScope&);
inline JSC::JSValue toJS(JSC::JSGlobalObject* lexicalGlobalObject, WorkletGlobalScope* scope) { return scope ? toJS(lexicalGlobalObject, *scope) : JSC::jsNull(); }
JSWorkletGlobalScope* toJSWorkletGlobalScope(JSC::VM&, JSC::JSValue);
} // namespace WebCore
#endif // ENABLE(CSS_PAINTING_API)
|
belonk/spring5-demo | 04-spring-mvc/src/main/java/com/belonk/springmvc/config/RootConfig.java | package com.belonk.springmvc.config;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.stereotype.Controller;
/**
* 根容器,扫描除了Controller的其他所有组件。
* <p>
* Created by sun on 2020/5/3.
*
* @author <EMAIL>
* @version 1.0
* @since 1.0
*/
@Configurable
@ComponentScan(basePackages = "com.belonk", excludeFilters = {
@ComponentScan.Filter(type = FilterType.ANNOTATION,
classes = Controller.class)})
public class RootConfig {
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Static fields/constants/initializer
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Instance fields
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Constructors
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Methods
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
} |
RemnevMaksim/splinter | src/main/java/ru/yandex/autoschool/splinter/di/provider/LoggerProvider.java | <reponame>RemnevMaksim/splinter<gh_stars>1-10
package ru.yandex.autoschool.splinter.di.provider;
import org.apache.log4j.BasicConfigurator;
import org.glassfish.hk2.api.Factory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ru.yandex.autoschool.splinter.application.Splinter;
/**
* @author Etki {@literal <<EMAIL>>}
* @version %I%, %G%
* @since 1.0
*/
public class LoggerProvider implements Factory<Logger> {
public Logger provide() {
if (!org.apache.log4j.Logger.getRootLogger().getAllAppenders().hasMoreElements()) {
BasicConfigurator.configure();
}
return LoggerFactory.getLogger(Splinter.class);
}
public void dispose(Logger logger) {
// do nothing
}
}
|
svenfuchs/ripper2ruby | test/lib_test_helper.rb | $: << File.expand_path(File.dirname(__FILE__) + '/../lib')
require 'pp'
require 'ripper/ruby_builder'
require 'ripper/event_log'
require 'highlighters/ansi'
require 'erb/stripper'
require 'diff/lcs'
require 'diff/lcs/hunk'
module LibTestHelper
def build(src, filename = nil)
Ripper::RubyBuilder.build(src, filename)
end
def sexp(src)
pp Ripper::SexpBuilder.new(src).parse
end
def log(src)
Ripper::EventLog.out(src)
end
def filenames(root)
Dir["#{root}/**/*.rb"].sort
end
def read_file(filename)
src = File.read(filename)
src = File.open(filename, 'r:iso-8859-1') { |f| f.read } unless src.valid_encoding?
src = File.open(filename, 'r:ascii-8bit') { |f| f.read } unless src.valid_encoding?
src || ''
end
def read_src(filename, lib = nil)
src = read_file(filename)
src = strip_erb(src) if lib && erb?(lib, filename)
src || ''
end
def excluded?(lib, filename)
lib[:exclude].any? { |exclude| filename.index(exclude) }
end
def erb?(lib, filename)
Array(lib[:erb] || %r(/templates/|environment\.rb)).any? { |pattern| filename =~ pattern }
end
def strip_erb(src)
Erb::Stripper.new.to_ruby(src)
end
def report(errors, name, path)
errors = errors[name]
msg = if errors && !errors.empty?
# output the broken line
# suggest possible fixes
"#{errors.count} problems found in #{name}:\n" + errors.map { |e| e.gsub(path, name.to_s) }.join
else
"no problems found in #{name}"
end
puts msg
end
def highlight_diff(str)
green = Highlighters::Ansi.new(:green)
red = Highlighters::Ansi.new(:red)
str.split("\n").map do |line|
line = green.highlight(line) if line =~ /^\+/
line = red.highlight(line) if line =~ /^\-/
line
end.join("\n")
end
# some ruby cookbook
def diff(data_old, data_new, format=:unified, context_lines=3)
data_old = data_old.split(/\n/).map! { |e| e.chomp }
data_new = data_new.split(/\n/).map! { |e| e.chomp }
output = ""
diffs = Diff::LCS.diff(data_old, data_new)
return output if diffs.empty?
oldhunk = hunk = nil
file_length_difference = 0
diffs.each do |piece|
begin
hunk = Diff::LCS::Hunk.new(data_old, data_new, piece, context_lines, file_length_difference)
file_length_difference = hunk.file_length_difference
next unless oldhunk
if (context_lines > 0) and hunk.overlaps?(oldhunk)
hunk.unshift(oldhunk)
else
output << oldhunk.diff(format)
end
ensure
oldhunk = hunk
output << "\n"
end
end
output << oldhunk.diff(format) << "\n"
highlight_diff(output)
end
end |
breath-co2/hprose-nodejs | example/filter/log/logfilter.js | var hprose = require('hprose');
function log(data) {
console.log(hprose.BytesIO.toString(data));
return data;
}
module.exports = {
inputFilter: log,
outputFilter: log
};
|
andersop91/core | tests/components/homekit_controller/specific_devices/test_aqara_gateway.py | <gh_stars>1000+
"""
Regression tests for Aqara Gateway V3.
https://github.com/home-assistant/core/issues/20957
"""
from homeassistant.components.alarm_control_panel import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.components.light import SUPPORT_BRIGHTNESS, SUPPORT_COLOR
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.entity import EntityCategory
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_aqara_gateway_setup(hass):
"""Test that a Aqara Gateway can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "aqara_gateway.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
sensors = [
(
"alarm_control_panel.aqara_hub_1563",
"homekit-0000000123456789-66304",
"Aqara Hub-1563",
SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY,
None,
),
(
"light.aqara_hub_1563",
"homekit-0000000123456789-65792",
"Aqara Hub-1563",
SUPPORT_BRIGHTNESS | SUPPORT_COLOR,
None,
),
(
"number.aqara_hub_1563_volume",
"homekit-0000000123456789-aid:1-sid:65536-cid:65541",
"Aqara Hub-1563 Volume",
None,
EntityCategory.CONFIG,
),
(
"switch.aqara_hub_1563_pairing_mode",
"homekit-0000000123456789-aid:1-sid:65536-cid:65538",
"Aqara Hub-1563 Pairing Mode",
None,
EntityCategory.CONFIG,
),
]
device_ids = set()
for (entity_id, unique_id, friendly_name, supported_features, category) in sensors:
entry = entity_registry.async_get(entity_id)
assert entry.unique_id == unique_id
assert entry.entity_category == category
helper = Helper(
hass,
entity_id,
pairing,
accessories[0],
config_entry,
)
state = await helper.poll_and_get_state()
assert state.attributes["friendly_name"] == friendly_name
assert state.attributes.get("supported_features") == supported_features
device = device_registry.async_get(entry.device_id)
assert device.manufacturer == "Aqara"
assert device.name == "Aqara Hub-1563"
assert device.model == "ZHWA11LM"
assert device.sw_version == "1.4.7"
assert device.via_device_id is None
device_ids.add(entry.device_id)
# All entities should be part of same device
assert len(device_ids) == 1
async def test_aqara_gateway_e1_setup(hass):
"""Test that an Aqara E1 Gateway can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "aqara_e1.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
sensors = [
(
"alarm_control_panel.aqara_hub_e1_00a0",
"homekit-00aa00000a0-16",
"Aqara-Hub-E1-00A0",
SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY,
None,
),
(
"number.aqara_hub_e1_00a0_volume",
"homekit-00aa00000a0-aid:1-sid:17-cid:1114116",
"Aqara-Hub-E1-00A0 Volume",
None,
EntityCategory.CONFIG,
),
(
"switch.aqara_hub_e1_00a0_pairing_mode",
"homekit-00aa00000a0-aid:1-sid:17-cid:1114117",
"Aqara-Hub-E1-00A0 Pairing Mode",
None,
EntityCategory.CONFIG,
),
]
device_ids = set()
for (entity_id, unique_id, friendly_name, supported_features, category) in sensors:
entry = entity_registry.async_get(entity_id)
assert entry.unique_id == unique_id
assert entry.entity_category == category
helper = Helper(
hass,
entity_id,
pairing,
accessories[0],
config_entry,
)
state = await helper.poll_and_get_state()
assert state.attributes["friendly_name"] == friendly_name
assert state.attributes.get("supported_features") == supported_features
device = device_registry.async_get(entry.device_id)
assert device.manufacturer == "Aqara"
assert device.name == "Aqara-Hub-E1-00A0"
assert device.model == "HE1-G01"
assert device.sw_version == "3.3.0"
assert device.via_device_id is None
device_ids.add(entry.device_id)
# All entities should be part of same device
assert len(device_ids) == 1
|
juliocnsouzadev/scala_datascience | functional_program_design_in_scala/functional_program_design_in_scala/.worksheet/src/week02.StructuralInductionOnTrees.scala | package week02
object StructuralInductionOnTrees {;import org.scalaide.worksheet.runtime.library.WorksheetSupport._; def main(args: Array[String])=$execute{;$skip(67);
val x = 10;System.out.println("""x : Int = """ + $show(x ));$skip(12);
val y = 5;System.out.println("""y : Int = """ + $show(y ));$skip(16);
val s = Empty;System.out.println("""s : week02.Empty.type = """ + $show(s ));$skip(15); val res$0 =
s contains x;System.out.println("""res0: Boolean = """ + $show(res$0));$skip(27); val res$1 =
(s include x) contains x;System.out.println("""res1: Boolean = """ + $show(res$1));$skip(26); val res$2 =
(s include x) contains y;System.out.println("""res2: Boolean = """ + $show(res$2));$skip(17);
val l = Empty;System.out.println("""l : week02.Empty.type = """ + $show(l ));$skip(15);
val r = Empty;System.out.println("""r : week02.Empty.type = """ + $show(r ));$skip(42); val res$3 =
(NonEmpty(x, l, r)include x)contains x;System.out.println("""res3: Boolean = """ + $show(res$3));$skip(41); val res$4 =
(NonEmpty(y, l, r )include x)contains x;System.out.println("""res4: Boolean = """ + $show(res$4));$skip(32); val res$5 =
(Empty include y) contains x;System.out.println("""res5: Boolean = """ + $show(res$5));$skip(20);
val xs = Empty;System.out.println("""xs : week02.Empty.type = """ + $show(xs ));$skip(16);
val ys = Empty;System.out.println("""ys : week02.Empty.type = """ + $show(ys ));$skip(27); val res$6 =
(xs union ys)contains x;System.out.println("""res6: Boolean = """ + $show(res$6));$skip(15); val res$7 =
xs contains x;System.out.println("""res7: Boolean = """ + $show(res$7));$skip(15); val res$8 =
ys contains x;System.out.println("""res8: Boolean = """ + $show(res$8))}
}
|
tiff2766/DK | DK/Libs/zstd/tests/fuzz/simple_round_trip.c | <filename>DK/Libs/zstd/tests/fuzz/simple_round_trip.c
/*
* Copyright (c) 2016-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under both the BSD-style license (found in the
* LICENSE file in the root directory of this source tree) and the GPLv2 (found
* in the COPYING file in the root directory of this source tree).
*/
/**
* This fuzz target performs a zstd round-trip test (compress & decompress),
* compares the result with the original, and calls abort() on corruption.
*/
#define ZSTD_STATIC_LINKING_ONLY
#include <stddef.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "fuzz_helpers.h"
#include "zstd_helpers.h"
static const int kMaxClevel = 19;
static ZSTD_CCtx *cctx = NULL;
static ZSTD_DCtx *dctx = NULL;
static void* cBuf = NULL;
static void* rBuf = NULL;
static size_t bufSize = 0;
static uint32_t seed;
static size_t roundTripTest(void *result, size_t resultCapacity,
void *compressed, size_t compressedCapacity,
const void *src, size_t srcSize)
{
size_t cSize;
if (FUZZ_rand(&seed) & 1) {
ZSTD_inBuffer in = {src, srcSize, 0};
ZSTD_outBuffer out = {compressed, compressedCapacity, 0};
size_t err;
ZSTD_CCtx_reset(cctx, ZSTD_reset_session_only);
FUZZ_setRandomParameters(cctx, srcSize, &seed);
err = ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end);
FUZZ_ZASSERT(err);
FUZZ_ASSERT(err == 0);
cSize = out.pos;
} else {
int const cLevel = FUZZ_rand(&seed) % kMaxClevel;
cSize = ZSTD_compressCCtx(
cctx, compressed, compressedCapacity, src, srcSize, cLevel);
}
FUZZ_ZASSERT(cSize);
return ZSTD_decompressDCtx(dctx, result, resultCapacity, compressed, cSize);
}
int LLVMFuzzerTestOneInput(const uint8_t *src, size_t size)
{
size_t neededBufSize;
seed = FUZZ_seed(&src, &size);
neededBufSize = ZSTD_compressBound(size);
/* Allocate all buffers and contexts if not already allocated */
if (neededBufSize > bufSize) {
free(cBuf);
free(rBuf);
cBuf = malloc(neededBufSize);
rBuf = malloc(neededBufSize);
bufSize = neededBufSize;
FUZZ_ASSERT(cBuf && rBuf);
}
if (!cctx) {
cctx = ZSTD_createCCtx();
FUZZ_ASSERT(cctx);
}
if (!dctx) {
dctx = ZSTD_createDCtx();
FUZZ_ASSERT(dctx);
}
{
size_t const result =
roundTripTest(rBuf, neededBufSize, cBuf, neededBufSize, src, size);
FUZZ_ZASSERT(result);
FUZZ_ASSERT_MSG(result == size, "Incorrect regenerated size");
FUZZ_ASSERT_MSG(!memcmp(src, rBuf, size), "Corruption!");
}
#ifndef STATEFUL_FUZZING
ZSTD_freeCCtx(cctx); cctx = NULL;
ZSTD_freeDCtx(dctx); dctx = NULL;
#endif
return 0;
}
|
frontful/frontful-platform | packages/frontful-resolver/resolver/Resolver.js | import {Exceptions} from './Exceptions'
import {observer as mobxObserver} from 'mobx-react'
import {Promisable as PromisableClass, deferred, getDisplayName, isBrowser} from 'frontful-utils'
import {untracked, observable, reaction} from 'mobx'
import React from 'react'
let Promisable
let observer
if (isBrowser()) {
Promisable = PromisableClass
observer = mobxObserver
}
else {
Promisable = PromisableClass
observer = (Component) => (Component)
}
const errorStyle = {
backgroundColor: 'red',
color: 'white',
fontSize: '16px',
padding: '5px',
margin: '0',
}
export class Resolver {
constructor(element, context) {
this.getRequisites = this.getRequisites.bind(this)
this.context = context
this.isResolver = true
this.Component = element.type
this.props = element.props
this.resolvers = this.extractResolvers(this.Component)
this.resolversTree = this.extractResolversTree(this.resolvers, this.props)
this.data = observable({
requisites: {}
}, {
requisites: observable.ref
})
}
resolve(untracked, resolvers, ...resolverQueue) {
if (!this.resolvers) {
resolverQueue.untracked = untracked
resolvers.push(resolverQueue)
}
}
getResolveFunction(resolvers) {
const resolve = this.resolve.bind(this, false, resolvers)
resolve.untracked = this.resolve.bind(this, true, resolvers)
resolve.onDispose = (onDispose) => {
this.onDispose = onDispose
}
resolve.value = (value) => ({__value__: value})
return resolve
}
extractResolvers(Component) {
const resolvers = []
Component.__resolver_resolvable__(this.getResolveFunction(resolvers))
return resolvers
}
extractResolversTree(resolvers, props) {
const extractResolversTreeItem = (resolverQueue, props) => {
if(resolverQueue.length === 0) {
return null
}
const [resolver, ...restResolverQueue] = resolverQueue
restResolverQueue.untracked = resolverQueue.untracked
return {
resolver: resolverQueue.untracked ? (...args) => untracked(() => resolver(...args)) : resolver,
props: props,
next: extractResolversTreeItem(restResolverQueue)
}
}
return resolvers.map((resolverQueue) => {
return extractResolversTreeItem(resolverQueue, props)
})
}
resolveReturnValues(resolverResult, item, boundProcess, subResolve) {
if (subResolve) {
if (Array.isArray(resolverResult)) {
return resolverResult.reduce((promise, result) => {
return promise.then((prevRes) => {
return this.resolveReturnValues({__array__: result}, item, boundProcess, subResolve).then((newRes) => {
return prevRes.concat(newRes.__array__)
})
})
}, Promisable.resolve([]))
}
}
else {
if (resolverResult && (Array.isArray(resolverResult) || typeof resolverResult !== 'object')) {
throw new Error('[frontful-resolver] Top level resolvable should only be object')
}
}
resolverResult = {...resolverResult}
return Promisable.all(
Object.keys(resolverResult).map((key) => {
return Promisable.resolve(resolverResult[key]).then((value) => {
let processedValue = null
if (value && value.__resolver__) {
value.__resolver__.setIsDisabled(false)
item.resolvers.push(value.__resolver__)
processedValue = value
}
else if (React.isValidElement(value)) {
if (value.type.__resolver_resolved__) {
processedValue = value
}
else if (value.type.__resolver_resolvable__) {
const resolver = new Resolver(value, this.context)
item.resolvers.push(resolver)
processedValue = resolver.execute()
}
else {
processedValue = value
}
}
else if (value && value.hasOwnProperty('__value__')) {
processedValue = this.resolveReturnValues(value.__value__, item, boundProcess, true)
}
else {
processedValue = value
}
return Promisable.resolve(processedValue).then((value) => {
if (boundProcess.canceled) {
this.cancel()
}
if (value && value.error && value.component) {
if (item.next) {
throw value.error
}
else {
value = value.component
}
}
resolverResult[key] = value
return null
}).catch((error) => {
if (boundProcess.canceled) {
this.cancel()
}
else {
throw error
}
})
})
})
).then(() => {
if (subResolve) {
return resolverResult
}
else {
item.resolverResult = resolverResult
if (item.next) {
item.next.props = {...item.props, ...item.resolverResult}
return this.invokeReactivity([item.next])
}
else {
this.setRequisites()
return this.data.requisites
}
}
})
}
cancel() {
throw new Error('frontful_resolver_cancel_execution')
}
itemResolver = (item) => {
const execution = deferred()
execution.promise.isProcessing = true
const resolveProps = () => {
if (this.__DONT_EXECUTE__) {
return
}
try {
return item.resolver({
...this.definerObject,
...item.props,
getRequisites: this.getRequisites,
}) || {}
}
catch(error) {
return Promise.reject(error)
}
}
const reactToProps = (resolverResult) => {
if (this.__DONT_EXECUTE__) {
return
}
const processing = item.process && item.process.promise && item.process.promise.isProcessing
if (processing) {
item.process.canceled = true
}
if (item.resolvers && item.resolvers.length) {
item.resolvers.forEach((resolver) => {
resolver.setIsDisabled(true)
})
item.notDisposedResolvers = (item.notDisposedResolvers || []).concat(item.resolvers)
item.notDisposedResolvers.forEach((resolver) => {
resolver.__DONT_EXECUTE__ = true
})
item.resolvers = []
}
this.disposeResolversTree([item.next])
item.process = {
promise: null,
canceled: false,
}
const boundProcess = item.process
item.process.promise = Promisable.resolve(resolverResult).then((resolverResult) => {
item.resolvers = []
return this.resolveReturnValues(resolverResult, item, boundProcess)
}).then(() => {
if (item.notDisposedResolvers && item.notDisposedResolvers.length) {
item.notDisposedResolvers.forEach((notDisposedResolver) => {
if (item.resolvers.indexOf(notDisposedResolver) === -1) {
notDisposedResolver.dispose(true)
}
})
item.notDisposedResolvers = []
}
if (execution.promise.isProcessing) {
const isPromise = !!item.process.promise
execution.resolve(isPromise)
execution.promise.isProcessing = false
}
if (boundProcess.promise) {
boundProcess.promise.isProcessing = false
}
return null
}).catch((error) => {
if(error.message === 'frontful_resolver_cancel_execution') {
if (boundProcess.promise) {
boundProcess.promise.isProcessing = false
}
return
}
if (boundProcess.promise) {
boundProcess.promise.isProcessing = false
}
this.data = observable({
requisites: {}
}, {
requisites: observable.ref
})
if (execution.promise.isProcessing) {
execution.reject(error)
execution.promise.isProcessing = false
}
else {
throw error
}
})
boundProcess.promise.isProcessing = true
}
item.disposeReaction = reaction(resolveProps, reactToProps, {
fireImmediately: true,
scheduler: process.env.IS_BROWSER ? (run) => {
// const processing = item.process && item.process.promise && item.process.promise.isProcessing
// if (processing) {
// item.process.canceled = true
// }
this.disposeResolversTree([item.next])
item.process.promise.catch().then(run)
} : undefined,
})
return execution.promise
}
resolveObject(object) {
if (object) {
const keys = Object.keys(object)
return Promisable.all(keys.map((key) => object[key])).then((results) => {
return keys.reduce((object, key, idx) => {
object[key] = results[idx]
return object
}, {})
})
}
return Promisable.resolve(null)
}
invokeReactivity(resolversTree) {
if (this.isDisposed) {
return this.cancel()
}
const def = untracked(() => this.Component.__resolver_definer__ ? this.Component.__resolver_definer__(this.context, this.props) : null)
return this.resolveObject(def).then((definerObject) => {
this.definerObject = definerObject
return Promisable.all(resolversTree.map(this.itemResolver))
})
}
setRequisites() {
if (this.isDisposed) {
return
}
const extractRequisitesFromResolversTree = (resolversTree) => {
return resolversTree.reduce((result, item) => {
if (item) {
return {
...result,
...item.resolverResult,
...extractRequisitesFromResolversTree([item.next]),
}
}
else {
return result
}
}, {})
}
this.data.requisites = extractRequisitesFromResolversTree(this.resolversTree)
}
getRequisites() {
return this.data.requisites
}
dispose(full) {
if (!this.isDisposed) {
this.disposeResolversTree(this.resolversTree, true)
if (full) {
if(this.onDispose) {
this.onDispose({
...this.definerObject,
...this.props,
getRequisites: this.getRequisites,
})
}
this.isResolver = null
this.Component = null
this.props = null
this.resolvers = null
this.resolversTree = null
this.data = observable({
requisites: {}
}, {
requisites: observable.ref
})
this.data.requisites = null
this.isDisposed = true
}
}
}
disposeResolversTree(resolversTree, full) {
resolversTree.forEach((item) => {
if (item) {
if (item.next) {
this.disposeResolversTree([item.next], full)
}
if (item.disposeReaction) {
item.disposeReaction()
}
if (item.resolvers) {
item.resolvers.forEach((resolver) => {
resolver.dispose(full)
})
item.resolvers = []
}
if (item.notDisposedResolvers) {
item.notDisposedResolvers.forEach((notDisposedResolver) => {
notDisposedResolver.dispose(full)
})
item.notDisposedResolvers = []
}
}
})
}
setIsDisabled(disabled) {
this.setIsDisabledResolversTree(this.resolversTree, disabled)
this.__DONT_EXECUTE__ = disabled
}
setIsDisabledResolversTree(resolversTree, disabled) {
resolversTree.forEach((item) => {
if (item) {
if (item.next) {
this.setIsDisabledResolversTree([item.next], disabled)
}
if (item.resolvers) {
item.resolvers.forEach((resolver) => {
resolver.setIsDisabled(disabled)
})
}
}
})
}
rewind() {
this.dispose(true)
}
execute() {
return this.invokeReactivity(this.resolversTree).then(() => {
const Component = this.Component
const getRequisites = this.getRequisites.bind(this)
const result = observer(
class Resolver extends React.Component {
render() {
const requisites = getRequisites()
return (
requisites && Component && <Component resolved={requisites} {...requisites} {...this.props}/>
)
}
}
)
result.__resolver_resolved__ = true
result.__resolver__ = this
return result
}).catch((error) => {
if (error instanceof Exceptions.Cancel) {
if (!isBrowser()) {
throw error
}
return () => <React.Fragment />
}
if (isBrowser()) {
console.error(error)
}
else {
const parseError = global.frontful && global.frontful.environment && global.frontful.environment.parseError
console.log(parseError ? parseError(error).color : error)
}
class Error extends React.PureComponent {
static displayName = `Error(${getDisplayName(this.Component)})`
render() {
return <pre style={errorStyle}>{error.toString()}</pre>
}
}
return {
error: error,
component: Error,
}
})
}
}
|
Neusoft-Technology-Solutions/aws-sdk-cpp | aws-cpp-sdk-datasync/include/aws/datasync/model/SmbVersion.h | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/datasync/DataSync_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace DataSync
{
namespace Model
{
enum class SmbVersion
{
NOT_SET,
AUTOMATIC,
SMB2,
SMB3
};
namespace SmbVersionMapper
{
AWS_DATASYNC_API SmbVersion GetSmbVersionForName(const Aws::String& name);
AWS_DATASYNC_API Aws::String GetNameForSmbVersion(SmbVersion value);
} // namespace SmbVersionMapper
} // namespace Model
} // namespace DataSync
} // namespace Aws
|
Pioneer-Robotics/FTCPioneer2020-2021 | TeamCode/src/main/java/org/firstinspires/ftc/teamcode/Autonomous/SkystoneAutoBridgeSimple.java | <filename>TeamCode/src/main/java/org/firstinspires/ftc/teamcode/Autonomous/SkystoneAutoBridgeSimple.java<gh_stars>0
package org.firstinspires.ftc.teamcode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.util.ElapsedTime;
//@Autonomous(name = "SkystoneBridgeSimple", group = "ftcPio")
public class SkystoneAutoBridgeSimple extends Auto {
@Override
public void runOpMode() {
startRobot();
waitForStart();
// sleep(20000);
robot.driveByDistance(0, 0.5, 60);
robot.driveByDistance(-90, 1, 120);
robot.driveByDistance(0, 0.5, 90);
// robot.driveByDistance(95, 1, 75);
StopMovement();
StopRobot();
}
}
|
falko/zeebe | engine/src/main/java/io/zeebe/engine/state/DefaultZeebeDbFactory.java | /*
* Zeebe Workflow Engine
* Copyright © 2017 camunda services GmbH (<EMAIL>)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.zeebe.engine.state;
import io.zeebe.db.ZeebeDbFactory;
import io.zeebe.db.impl.rocksdb.ZeebeRocksDbFactory;
public final class DefaultZeebeDbFactory {
/**
* The default zeebe database factory, which is used in most of the places except for the
* exporters.
*/
public static final ZeebeDbFactory<ZbColumnFamilies> DEFAULT_DB_FACTORY =
defaultFactory(ZbColumnFamilies.class);
/**
* Returns the default zeebe database factory which is used in the broker.
*
* @param columnFamilyNamesClass the enum class, which contains the column family names
* @param <ColumnFamilyNames> the type of the enum
* @return the created zeebe database factory
*/
public static <ColumnFamilyNames extends Enum<ColumnFamilyNames>>
ZeebeDbFactory<ColumnFamilyNames> defaultFactory(
Class<ColumnFamilyNames> columnFamilyNamesClass) {
// one place to replace the zeebe database implementation
return ZeebeRocksDbFactory.newFactory(columnFamilyNamesClass);
}
}
|
jstoobz/hack-reactor-prep | Module 1/137_getLongestOfThreeWords.js | <reponame>jstoobz/hack-reactor-prep
function getLongestOfThreeWords(word1, word2, word3) {
return Array.from(arguments).reduce((acc, item) => {
return acc.length >= item.length ? acc : item;
});
}
var output = getLongestOfThreeWords('these', 'three', 'words');
console.log(output); |
JoshuaMeyers/ssbio | ssbio/test/test_databases_kegg.py | import pytest
import os.path as op
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import IUPAC
from BCBio import GFF
from ssbio.databases.kegg import KEGGProp
@pytest.fixture(scope='class')
def seq_record_loaded_from_file_example(fasta_path):
"""Original SeqRecord loaded from sequence file"""
return SeqIO.read(fasta_path, "fasta")
@pytest.fixture(scope='module')
def kegg_id():
return 'mtu:Rv0417'
@pytest.fixture(scope='module')
def fasta_file():
return 'mtu-Rv0417.faa'
@pytest.fixture(scope='module')
def txt_file():
return 'mtu-Rv0417.kegg'
@pytest.fixture(scope='module')
def fasta_path(test_files_sequences, fasta_file):
return op.join(test_files_sequences, fasta_file)
@pytest.fixture(scope='module')
def txt_path(test_files_sequences, txt_file):
return op.join(test_files_sequences, txt_file)
@pytest.fixture(scope='class')
def keggprop_with_i(kegg_id):
return KEGGProp(id=kegg_id,
seq=None)
@pytest.fixture(scope='class')
def keggprop_with_i_s_m_f(kegg_id, fasta_path, txt_path):
return KEGGProp(id=kegg_id,
seq=None,
fasta_path=fasta_path,
txt_path=txt_path)
class TestKEGGPropWithId():
"""Class to test a bare KEGGProp object with just an ID"""
def test_init(self, keggprop_with_i, kegg_id):
"""Test initializing with just an ID"""
assert keggprop_with_i.id == kegg_id
# If just an ID initialized, everything should be empty
assert keggprop_with_i.seq == None
assert keggprop_with_i.name == '<unknown name>'
assert keggprop_with_i.description == '<unknown description>'
assert len(keggprop_with_i.annotations) == 0
assert len(keggprop_with_i.letter_annotations) == 0
assert len(keggprop_with_i.features) == 0
# Files should not exist and raise errors if accessed
assert keggprop_with_i.sequence_file == None
with pytest.raises(IOError):
keggprop_with_i.sequence_dir
with pytest.raises(IOError):
keggprop_with_i.sequence_path
assert keggprop_with_i.metadata_file == None
with pytest.raises(IOError):
keggprop_with_i.metadata_dir
with pytest.raises(IOError):
keggprop_with_i.metadata_path
assert keggprop_with_i.feature_file == None
with pytest.raises(IOError):
keggprop_with_i.feature_dir
with pytest.raises(IOError):
keggprop_with_i.feature_path
def test_set_sequence_path(self, keggprop_with_i, fasta_path, fasta_file, test_files_sequences):
"""Test setting the seq attribute with a sequence file"""
keggprop_with_i.sequence_path = fasta_path
# Test that file paths are correct
assert keggprop_with_i.sequence_path == fasta_path
assert keggprop_with_i.sequence_file == fasta_file
assert keggprop_with_i.sequence_dir == test_files_sequences
def test_set_feature_path(self, keggprop_with_i, features_loaded_from_file_example,
gff_path, gff_file, test_files_sequences):
"""Test loading a feature file, and that old features are overwritten"""
# Test that the existing feature set is not the same as the new one to be loaded
assert len(keggprop_with_i.features) != len(features_loaded_from_file_example)
keggprop_with_i.feature_path = gff_path
# Test that file paths are correct
assert keggprop_with_i.feature_path == gff_path
assert keggprop_with_i.feature_file == gff_file
assert keggprop_with_i.feature_dir == test_files_sequences
# Test that features cannot be changed
with pytest.raises(ValueError):
keggprop_with_i.features = ['NOFEATURES']
# Test that number of features stored is same
assert len(keggprop_with_i.features) == len(features_loaded_from_file_example)
def test_set_metadata_path(self, keggprop_with_i, txt_path, txt_file, test_files_sequences,
txt_record_loaded_from_file_example):
keggprop_with_i.metadata_path = txt_path
# Unset sequence and feature paths
keggprop_with_i.sequence_path = None
keggprop_with_i.feature_path = None
# Test that file paths are correct
assert keggprop_with_i.metadata_path == txt_path
assert keggprop_with_i.metadata_file == txt_file
assert keggprop_with_i.metadata_dir == test_files_sequences
# Test loaded information
assert keggprop_with_i.description == txt_record_loaded_from_file_example.description
assert keggprop_with_i.bigg == None
for k in ['ecj:JW4347', 'eco:b4384']:
assert k in keggprop_with_i.kegg
for r in ['NP_418801.1', 'WP_000224877.1']:
assert r in keggprop_with_i.refseq
assert keggprop_with_i.kegg == 'mtu:Rv0417'
assert keggprop_with_i.gene_name == 'deoD'
for p in ['1A69', '1ECP', '1K9S', '1OTX', '1OTY', '1OU4', '1OUM', '1OV6', '1OVG',
'3ONV', '3OOE', '3OOH', '3OPV', '3UT6', '4TS3', '4TS9', '4TTA', '4TTI',
'4TTJ', '5I3C', '5IU6']:
assert p in keggprop_with_i.pdbs
for g in ['GO:0004731', 'GO:0005829', 'GO:0006152', 'GO:0006974', 'GO:0016020', 'GO:0019686', 'GO:0042802']:
assert g in keggprop_with_i.go
assert keggprop_with_i.pfam == ['PF01048']
assert keggprop_with_i.ec_number == None ## TODO: parse
assert keggprop_with_i.reviewed == False ## TODO: parse
for u in ['Q2M5T3', 'P09743']:
assert u in keggprop_with_i.alt_keggs
assert keggprop_with_i.taxonomy == 'Escherichia coli (strain K12)'
assert keggprop_with_i.seq_version == 2
assert keggprop_with_i.seq_date == '2007-01-23'
assert keggprop_with_i.entry_version == 106
assert keggprop_with_i.entry_date == '2017-08-30'
# Test that features are loaded directly from this metadata file
assert len(keggprop_with_i.features) == len(txt_record_loaded_from_file_example.features)
class TestKEGGPropWithIdAndFiles():
"""Class to test a bare KEGGProp object with just an ID"""
def test_init(self, keggprop_with_i_s_m_f, kegg_id,
fasta_path, txt_path, gff_path, test_files_sequences,
fasta_file, txt_file, gff_file,
seq_record_loaded_from_file_example,
features_loaded_from_file_example,
txt_record_loaded_from_file_example):
"""Test initializing with assigned files"""
assert keggprop_with_i_s_m_f.id == kegg_id
assert keggprop_with_i_s_m_f.seq == seq_record_loaded_from_file_example.seq
assert keggprop_with_i_s_m_f.name == seq_record_loaded_from_file_example.name
assert keggprop_with_i_s_m_f.description == txt_record_loaded_from_file_example.description
assert keggprop_with_i_s_m_f.annotations == {} # No annotations will be loaded from files
assert keggprop_with_i_s_m_f.letter_annotations == txt_record_loaded_from_file_example.letter_annotations
assert len(keggprop_with_i_s_m_f.features) == len(features_loaded_from_file_example)
# Files should exist
assert keggprop_with_i_s_m_f.sequence_file == fasta_file
assert keggprop_with_i_s_m_f.sequence_dir == test_files_sequences
assert keggprop_with_i_s_m_f.sequence_path == fasta_path
assert keggprop_with_i_s_m_f.metadata_file == txt_file
assert keggprop_with_i_s_m_f.metadata_dir == test_files_sequences
assert keggprop_with_i_s_m_f.metadata_path == txt_path
assert keggprop_with_i_s_m_f.feature_file == gff_file
assert keggprop_with_i_s_m_f.feature_dir == test_files_sequences
assert keggprop_with_i_s_m_f.feature_path == gff_path |
opennetworkinglab/spring-open | src/main/java/net/onrc/onos/core/datagrid/web/DatagridWebRoutable.java | <reponame>opennetworkinglab/spring-open
package net.onrc.onos.core.datagrid.web;
import net.floodlightcontroller.restserver.RestletRoutable;
import org.restlet.Context;
import org.restlet.Restlet;
import org.restlet.routing.Router;
/**
* REST API implementation for the Datagrid.
*/
public class DatagridWebRoutable implements RestletRoutable {
/**
* Create the Restlet router and bind to the proper resources.
*/
@Override
public Restlet getRestlet(Context context) {
Router router = new Router(context);
router.attach("/get/ng-events/json", GetNGEventsResource.class);
return router;
}
/**
* Set the base path for the Topology.
*/
@Override
public String basePath() {
return "/wm/onos/datagrid";
}
}
|
Miausoft/miausp-api | src/main/java/com/miausoft/miaups/paypal/discounts/WeekendDiscount.java | <filename>src/main/java/com/miausoft/miaups/paypal/discounts/WeekendDiscount.java
package com.miausoft.miaups.paypal.discounts;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import java.math.BigDecimal;
import java.time.DayOfWeek;
import java.time.LocalDateTime;
@Component
@ConditionalOnProperty(name = "app.paypal.discount", havingValue = "weekend")
public class WeekendDiscount implements Discount {
public BigDecimal apply(BigDecimal total) {
LocalDateTime localDateTime = LocalDateTime.now();
DayOfWeek dayOfWeek = localDateTime.getDayOfWeek();
if (dayOfWeek.equals(DayOfWeek.SATURDAY) ||
dayOfWeek.equals(DayOfWeek.SUNDAY)) {
return total.multiply(new BigDecimal("0.9"));
}
return total;
}
}
|
softicar/platform | platform-dom/src/main/java/com/softicar/platform/dom/elements/button/DomButton.java | <gh_stars>1-10
package com.softicar.platform.dom.elements.button;
import com.softicar.platform.common.core.i18n.IDisplayString;
import com.softicar.platform.common.core.interfaces.INullaryVoidFunction;
import com.softicar.platform.common.core.interfaces.IStaticObject;
import com.softicar.platform.common.io.resource.IResource;
import com.softicar.platform.dom.DomCssPseudoClasses;
import com.softicar.platform.dom.document.IDomDocument;
import com.softicar.platform.dom.element.DomElementTag;
import com.softicar.platform.dom.elements.DomAnchor;
import com.softicar.platform.dom.elements.DomDiv;
import com.softicar.platform.dom.elements.DomElementsCssClasses;
import com.softicar.platform.dom.elements.DomImage;
import com.softicar.platform.dom.elements.anchor.DomHiddenLinkAnchor;
import com.softicar.platform.dom.elements.interfaces.IDomLabeledElement;
import com.softicar.platform.dom.event.DomEventType;
import com.softicar.platform.dom.event.IDomClickEventHandler;
import com.softicar.platform.dom.event.IDomEnterKeyEventHandler;
import com.softicar.platform.dom.event.IDomEvent;
import com.softicar.platform.dom.event.IDomSpaceKeyEventHandler;
import com.softicar.platform.dom.input.IDomFocusable;
import com.softicar.platform.dom.input.IDomInput;
import com.softicar.platform.dom.parent.DomParentElement;
import java.util.Collections;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Supplier;
/**
* A <i>button</i> UI element.
* <p>
* Possesses an icon, a label, and a callback function to be executed when the
* button is triggered (i.e. the <i>click callback</i>).
*
* @author <NAME>
* @author <NAME>
*/
public class DomButton extends DomParentElement
implements IDomFocusable, IDomInput, IDomLabeledElement<DomButton>, IDomClickEventHandler, IDomEnterKeyEventHandler, IDomSpaceKeyEventHandler {
private Icon icon;
private Label label;
private DomAnchor linkAnchor;
private INullaryVoidFunction clickCallback;
private Supplier<Optional<IDisplayString>> confirmationMessageSupplier;
private boolean disabled;
/**
* Constructs a new {@link DomButton}.
* <p>
* Use {@link #setIcon(IResource)} and/or {@link #setLabel(IDisplayString)}
* to specify an icon or a label, respectively.
* <p>
* Use {@link #setClickCallback(INullaryVoidFunction)} to specify the
* callback function to be executed when the {@link DomButton} is triggered.
*/
public DomButton() {
this.icon = null;
this.label = null;
this.linkAnchor = null;
this.clickCallback = INullaryVoidFunction.NO_OPERATION;
this.confirmationMessageSupplier = Optional::empty;
this.disabled = false;
setAttribute("type", "button");
setTabIndex(0);
addCssClass(DomElementsCssClasses.DOM_BUTTON);
getDomEngine().setFireOnKeyUp(this, DomEventType.ENTER, true);
getDomEngine().setFireOnKeyUp(this, DomEventType.SPACE, true);
getDomEngine().setCssClassOnKeyDown(this, DomEventType.ENTER, Collections.singleton(DomCssPseudoClasses.ACTIVE));
getDomEngine().setCssClassOnKeyDown(this, DomEventType.SPACE, Collections.singleton(DomCssPseudoClasses.ACTIVE));
}
@Override
public DomElementTag getTag() {
return DomElementTag.BUTTON;
}
// -------------------- Basic Properties -------------------- //
/**
* Defines the icon to be shown on this {@link DomButton}.
*
* @param iconResource
* the {@link IResource} that represents the icon (never
* <i>null</i>)
* @return this {@link DomButton}
*/
public DomButton setIcon(IResource iconResource) {
return setIcon(iconResource, false);
}
/**
* Defines the icon to be shown on this {@link DomButton}, in a way that
* protects its colors from being altered by the theme.
* <p>
* Internally, sets {@link DomCssPseudoClasses#PRECOLORED} on the icon.
*
* @param iconResource
* the {@link IResource} that represents the pre-colored icon
* (never <i>null</i>)
* @return this {@link DomButton}
*/
public DomButton setPrecoloredIcon(IResource iconResource) {
return setIcon(iconResource, true);
}
/**
* Removes the icon to be shown on this {@link DomButton}.
* <p>
* If there is no defined icon, nothing will happen.
*
* @return this {@link DomButton}
*/
public DomButton removeIcon() {
if (icon != null) {
removeChild(icon);
this.icon = null;
}
return this;
}
/**
* Defines the label to be shown on this {@link DomButton}.
*
* @param labelString
* the label as {@link IDisplayString} (never <i>null</i>)
* @return this {@link DomButton}
*/
@Override
public DomButton setLabel(IDisplayString labelString) {
removeLabel();
this.label = appendChild(new Label(labelString));
return this;
}
/**
* Removes the label to be shown on this {@link DomButton}.
* <p>
* If there is no defined label, nothing will happen.
*
* @return this {@link DomButton}
*/
public DomButton removeLabel() {
if (label != null) {
removeChild(label);
this.label = null;
}
return this;
}
/**
* Defines the title (tool tip) to be shown on this {@link DomButton}.
*
* @param titleString
* the title as {@link IDisplayString} (never <i>null</i>)
* @return this {@link DomButton}
*/
@Override
public DomButton setTitle(IDisplayString titleString) {
super.setTitle(titleString);
return this;
}
/**
* Defines the {@link IStaticObject} marker for this node.
*
* @param marker
* the marker to set (never <i>null</i>)
* @return this {@link DomButton}
* @throws UnsupportedOperationException
* if the {@link IDomDocument} does not support marking of nodes
*/
@Override
public DomButton addMarker(IStaticObject marker) {
super.addMarker(marker);
return this;
}
// -------------------- Callback -------------------- //
/**
* Defines the callback function to be executed when this {@link DomButton}
* is triggered.
* <p>
* If this method is <b>not</b> called at all, nothing will happen when this
* {@link DomButton} is triggered.
*
* @param clickCallback
* the <i>click callback</i> function (never <i>null</i>)
* @return this {@link DomButton}
*/
public final DomButton setClickCallback(INullaryVoidFunction clickCallback) {
this.clickCallback = clickCallback;
return this;
}
/**
* The handler method that will be executed when this {@link DomButton} is
* clicked.
* <p>
* Should not be called directly.
*/
@Override
public final void handleClick(IDomEvent event) {
triggerButton();
}
/**
* The handler method that will be executed when {@code ENTER} is pressed on
* this {@link DomButton}.
* <p>
* Should not be called directly.
*/
@Override
public final void handleEnterKey(IDomEvent event) {
triggerButton();
}
/**
* The handler method that will be executed when {@code SPACE} is pressed on
* this {@link DomButton}.
* <p>
* Should not be called directly.
*/
@Override
public final void handleSpaceKey(IDomEvent event) {
triggerButton();
}
// -------------------- Enabled / Disabled -------------------- //
@Override
public DomButton setDisabled(boolean disabled) {
if (disabled != this.disabled) {
this.disabled = disabled;
if (disabled) {
setTabIndex(-1);
unlistenToEvent(DomEventType.CLICK);
unlistenToEvent(DomEventType.ENTER);
unlistenToEvent(DomEventType.SPACE);
addCssClass(DomCssPseudoClasses.DISABLED);
} else {
setTabIndex(0);
listenToEvent(DomEventType.CLICK);
listenToEvent(DomEventType.ENTER);
listenToEvent(DomEventType.SPACE);
removeCssClass(DomCssPseudoClasses.DISABLED);
}
}
return this;
}
@Override
public boolean isDisabled() {
return disabled;
}
@Override
public final DomButton setEnabled(boolean enabled) {
return setDisabled(!enabled);
}
@Override
public final boolean isEnabled() {
return !isDisabled();
}
// -------------------- Confirmation -------------------- //
/**
* Defines a {@link Supplier} of a confirmation message.
* <p>
* When a confirmation message {@link Supplier} is defined, the user will be
* prompted for confirmation after triggering the {@link DomButton}. The
* <i>click callback</i> (see
* {@link #setClickCallback(INullaryVoidFunction)}) will then only be
* executed in case the user confirms their intention to trigger the
* {@link DomButton}.
* <p>
* When the given {@link Supplier#get()} yields <i>null</i>, no confirmation
* message will be displayed to the user. In that case, the <i>click
* callback</i> will be executed without confirmation.
*
* @param confirmationMessageSupplier
* the {@link Supplier} of a confirmation message (never
* <i>null</i>; the supplied {@link IDisplayString} may be
* <i>null</i>)
* @return this {@link DomButton}
*/
public final DomButton setConfirmationMessageSupplier(Supplier<IDisplayString> confirmationMessageSupplier) {
Objects.requireNonNull(confirmationMessageSupplier);
this.confirmationMessageSupplier = () -> Optional.ofNullable(confirmationMessageSupplier.get());
return this;
}
/**
* Defines a confirmation message.
* <p>
* When a confirmation message {@link Supplier} is defined, the user will be
* prompted for confirmation after triggering the {@link DomButton}. The
* <i>click callback</i> (see
* {@link #setClickCallback(INullaryVoidFunction)}) will then only be
* executed in case the user confirms their intention to trigger the
* {@link DomButton}.
* <p>
* When <i>null</i> is given, no confirmation message will be displayed to
* the user. In that case, the <i>click callback</i> will be executed
* without confirmation.
*
* @param message
* the confirmation message (may be <i>null</i>)
* @return this {@link DomButton}
*/
public final DomButton setConfirmationMessage(IDisplayString message) {
return setConfirmationMessageSupplier(() -> message);
}
// ------------------------------ external link ------------------------------ //
/**
* Sets the hyper reference to an external URL, to be opened in a new tab
* when this {@link DomButton} is triggered.
*
* @param href
* the hyper reference, or URL (never <i>null</i>)
* @return this {@link DomButton}
*/
public DomButton setExternalLink(String href) {
appendLinkAnchor(href);
return this;
}
// ------------------------------ private ------------------------------ //
private void triggerButton() {
if (!disabled) {
Optional<IDisplayString> confirmationMessage = confirmationMessageSupplier.get();
if (confirmationMessage.isPresent()) {
executeConfirm(clickCallback::apply, confirmationMessage.get());
} else {
clickCallback.apply();
}
}
}
private void appendLinkAnchor(String href) {
if (linkAnchor == null) {
this.linkAnchor = new DomHiddenLinkAnchor()//
.setOpenInNewTab(true)
.enableEventDelegation(this);
appendChild(linkAnchor);
}
linkAnchor.setHRef(href);
}
private DomButton setIcon(IResource iconResource, boolean precolored) {
removeIcon();
this.icon = prependChild(new Icon(iconResource, precolored));
return this;
}
private class Icon extends DomImage {
public Icon(IResource iconResource, boolean precolored) {
super(iconResource);
addCssClass(DomElementsCssClasses.DOM_BUTTON_ICON);
if (precolored) {
addCssClass(DomCssPseudoClasses.PRECOLORED);
}
}
}
private class Label extends DomDiv {
public Label(IDisplayString labelString) {
addCssClass(DomElementsCssClasses.DOM_BUTTON_LABEL);
appendText(labelString);
}
}
}
|
benbenwt/lisa_docker | new/libr/asm/arch/amd29k/amd29k.h | #ifndef ASM_AMD_29K_H
#define ASM_AMD_29K_H
#include <stdint.h>
#include <r_types.h>
#ifdef __cplusplus
extern "C" {
#endif
#define CPU_29000 "29000"
#define CPU_29050 "29050"
typedef struct amd29k_instr_s {
const char* mnemonic;
ut64 op_type;
ut32 operands[6];
char type[6];
} amd29k_instr_t;
bool amd29k_instr_decode(const ut8* buffer, const ut32 buffer_size, amd29k_instr_t* instruction, const char* cpu);
void amd29k_instr_print(char* string, int string_size, ut64 address, amd29k_instr_t* instruction);
bool amd29k_instr_is_ret(amd29k_instr_t* instruction);
ut64 amd29k_instr_jump(ut64 address, amd29k_instr_t* instruction);
#ifdef __cplusplus
}
#endif
#endif /* ASM_AMD_29K_H */ |
tig/Tigger | Microsoft/SAMPLES/status/resource.h | <filename>Microsoft/SAMPLES/status/resource.h<gh_stars>1-10
#define IDM_ABOUT 100
#define IDM_FILEEXIT 101
#define IDM_NEXTMSG 102
#define IDD_TOP 200
#define IDD_BOTTOM 201
#define IDD_DESC 202
|
faraz891/gitlabhq | spec/migrations/reseed_merge_trains_enabled_spec.rb | <reponame>faraz891/gitlabhq
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20201112195322_reseed_merge_trains_enabled.rb')
RSpec.describe ReseedMergeTrainsEnabled do
describe 'migrate' do
let(:project_ci_cd_settings) { table(:project_ci_cd_settings) }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
context 'when on Gitlab.com' do
before do
namespace = namespaces.create!(name: 'hello', path: 'hello/')
project1 = projects.create!(namespace_id: namespace.id)
project2 = projects.create!(namespace_id: namespace.id)
project_ci_cd_settings.create!(project_id: project1.id, merge_pipelines_enabled: true)
project_ci_cd_settings.create!(project_id: project2.id, merge_pipelines_enabled: false)
end
it 'updates merge_trains_enabled to true for where merge_pipelines_enabled is true' do
expect { migrate! }.to change(project_ci_cd_settings.where(merge_trains_enabled: true), :count).by(1)
end
end
end
end
|
alifoliveira/rep-estudos | test/short_list.py | <reponame>alifoliveira/rep-estudos<filename>test/short_list.py
# List comprehension
# method 1 syntax:
# list = []
# for x in expression:
# list.append(x)
# Method 2 syntax:
# list(map(function, sequence))
# list(map(lambda x: x*7, range(11)))
# method 3 syntax:
# [return for x in sequence]
# [x*7 for x in range(11)]
# Method 1
quadrados = []
for x in range(11):
quadrados.append(x**2)
# Method 2
quadrado = list(map(lambda y: y**2, range(11)))
# Method 3
qua = [z**2 for z in range(11)]
print(f'Standard: {quadrados}')
print(f'Method 1: {quadrado}')
print(f'Method 2: {qua}')
|
ityuhui/client-c | model/v1_iscsi_persistent_volume_source.c | #include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "v1_iscsi_persistent_volume_source.h"
v1_iscsi_persistent_volume_source_t *v1_iscsi_persistent_volume_source_create(
int chapAuthDiscovery,
int chapAuthSession,
char *fsType,
char *initiatorName,
char *iqn,
char *iscsiInterface,
int lun,
list_t *portals,
int readOnly,
v1_secret_reference_t *secretRef,
char *targetPortal
) {
v1_iscsi_persistent_volume_source_t *v1_iscsi_persistent_volume_source_local_var = malloc(sizeof(v1_iscsi_persistent_volume_source_t));
if (!v1_iscsi_persistent_volume_source_local_var) {
return NULL;
}
v1_iscsi_persistent_volume_source_local_var->chapAuthDiscovery = chapAuthDiscovery;
v1_iscsi_persistent_volume_source_local_var->chapAuthSession = chapAuthSession;
v1_iscsi_persistent_volume_source_local_var->fsType = fsType;
v1_iscsi_persistent_volume_source_local_var->initiatorName = initiatorName;
v1_iscsi_persistent_volume_source_local_var->iqn = iqn;
v1_iscsi_persistent_volume_source_local_var->iscsiInterface = iscsiInterface;
v1_iscsi_persistent_volume_source_local_var->lun = lun;
v1_iscsi_persistent_volume_source_local_var->portals = portals;
v1_iscsi_persistent_volume_source_local_var->readOnly = readOnly;
v1_iscsi_persistent_volume_source_local_var->secretRef = secretRef;
v1_iscsi_persistent_volume_source_local_var->targetPortal = targetPortal;
return v1_iscsi_persistent_volume_source_local_var;
}
void v1_iscsi_persistent_volume_source_free(v1_iscsi_persistent_volume_source_t *v1_iscsi_persistent_volume_source) {
listEntry_t *listEntry;
free(v1_iscsi_persistent_volume_source->fsType);
free(v1_iscsi_persistent_volume_source->initiatorName);
free(v1_iscsi_persistent_volume_source->iqn);
free(v1_iscsi_persistent_volume_source->iscsiInterface);
list_ForEach(listEntry, v1_iscsi_persistent_volume_source->portals) {
free(listEntry->data);
}
list_free(v1_iscsi_persistent_volume_source->portals);
v1_secret_reference_free(v1_iscsi_persistent_volume_source->secretRef);
free(v1_iscsi_persistent_volume_source->targetPortal);
free(v1_iscsi_persistent_volume_source);
}
cJSON *v1_iscsi_persistent_volume_source_convertToJSON(v1_iscsi_persistent_volume_source_t *v1_iscsi_persistent_volume_source) {
cJSON *item = cJSON_CreateObject();
// v1_iscsi_persistent_volume_source->chapAuthDiscovery
if(v1_iscsi_persistent_volume_source->chapAuthDiscovery) {
if(cJSON_AddBoolToObject(item, "chapAuthDiscovery", v1_iscsi_persistent_volume_source->chapAuthDiscovery) == NULL) {
goto fail; //Bool
}
}
// v1_iscsi_persistent_volume_source->chapAuthSession
if(v1_iscsi_persistent_volume_source->chapAuthSession) {
if(cJSON_AddBoolToObject(item, "chapAuthSession", v1_iscsi_persistent_volume_source->chapAuthSession) == NULL) {
goto fail; //Bool
}
}
// v1_iscsi_persistent_volume_source->fsType
if(v1_iscsi_persistent_volume_source->fsType) {
if(cJSON_AddStringToObject(item, "fsType", v1_iscsi_persistent_volume_source->fsType) == NULL) {
goto fail; //String
}
}
// v1_iscsi_persistent_volume_source->initiatorName
if(v1_iscsi_persistent_volume_source->initiatorName) {
if(cJSON_AddStringToObject(item, "initiatorName", v1_iscsi_persistent_volume_source->initiatorName) == NULL) {
goto fail; //String
}
}
// v1_iscsi_persistent_volume_source->iqn
if (!v1_iscsi_persistent_volume_source->iqn) {
goto fail;
}
if(cJSON_AddStringToObject(item, "iqn", v1_iscsi_persistent_volume_source->iqn) == NULL) {
goto fail; //String
}
// v1_iscsi_persistent_volume_source->iscsiInterface
if(v1_iscsi_persistent_volume_source->iscsiInterface) {
if(cJSON_AddStringToObject(item, "iscsiInterface", v1_iscsi_persistent_volume_source->iscsiInterface) == NULL) {
goto fail; //String
}
}
// v1_iscsi_persistent_volume_source->lun
if (!v1_iscsi_persistent_volume_source->lun) {
goto fail;
}
if(cJSON_AddNumberToObject(item, "lun", v1_iscsi_persistent_volume_source->lun) == NULL) {
goto fail; //Numeric
}
// v1_iscsi_persistent_volume_source->portals
if(v1_iscsi_persistent_volume_source->portals) {
cJSON *portals = cJSON_AddArrayToObject(item, "portals");
if(portals == NULL) {
goto fail; //primitive container
}
listEntry_t *portalsListEntry;
list_ForEach(portalsListEntry, v1_iscsi_persistent_volume_source->portals) {
if(cJSON_AddStringToObject(portals, "", (char*)portalsListEntry->data) == NULL)
{
goto fail;
}
}
}
// v1_iscsi_persistent_volume_source->readOnly
if(v1_iscsi_persistent_volume_source->readOnly) {
if(cJSON_AddBoolToObject(item, "readOnly", v1_iscsi_persistent_volume_source->readOnly) == NULL) {
goto fail; //Bool
}
}
// v1_iscsi_persistent_volume_source->secretRef
if(v1_iscsi_persistent_volume_source->secretRef) {
cJSON *secretRef_local_JSON = v1_secret_reference_convertToJSON(v1_iscsi_persistent_volume_source->secretRef);
if(secretRef_local_JSON == NULL) {
goto fail; //model
}
cJSON_AddItemToObject(item, "secretRef", secretRef_local_JSON);
if(item->child == NULL) {
goto fail;
}
}
// v1_iscsi_persistent_volume_source->targetPortal
if (!v1_iscsi_persistent_volume_source->targetPortal) {
goto fail;
}
if(cJSON_AddStringToObject(item, "targetPortal", v1_iscsi_persistent_volume_source->targetPortal) == NULL) {
goto fail; //String
}
return item;
fail:
if (item) {
cJSON_Delete(item);
}
return NULL;
}
v1_iscsi_persistent_volume_source_t *v1_iscsi_persistent_volume_source_parseFromJSON(cJSON *v1_iscsi_persistent_volume_sourceJSON){
v1_iscsi_persistent_volume_source_t *v1_iscsi_persistent_volume_source_local_var = NULL;
// v1_iscsi_persistent_volume_source->chapAuthDiscovery
cJSON *chapAuthDiscovery = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "chapAuthDiscovery");
if (chapAuthDiscovery) {
if(!cJSON_IsBool(chapAuthDiscovery))
{
goto end; //Bool
}
}
// v1_iscsi_persistent_volume_source->chapAuthSession
cJSON *chapAuthSession = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "chapAuthSession");
if (chapAuthSession) {
if(!cJSON_IsBool(chapAuthSession))
{
goto end; //Bool
}
}
// v1_iscsi_persistent_volume_source->fsType
cJSON *fsType = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "fsType");
if (fsType) {
if(!cJSON_IsString(fsType))
{
goto end; //String
}
}
// v1_iscsi_persistent_volume_source->initiatorName
cJSON *initiatorName = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "initiatorName");
if (initiatorName) {
if(!cJSON_IsString(initiatorName))
{
goto end; //String
}
}
// v1_iscsi_persistent_volume_source->iqn
cJSON *iqn = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "iqn");
if (!iqn) {
goto end;
}
if(!cJSON_IsString(iqn))
{
goto end; //String
}
// v1_iscsi_persistent_volume_source->iscsiInterface
cJSON *iscsiInterface = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "iscsiInterface");
if (iscsiInterface) {
if(!cJSON_IsString(iscsiInterface))
{
goto end; //String
}
}
// v1_iscsi_persistent_volume_source->lun
cJSON *lun = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "lun");
if (!lun) {
goto end;
}
if(!cJSON_IsNumber(lun))
{
goto end; //Numeric
}
// v1_iscsi_persistent_volume_source->portals
cJSON *portals = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "portals");
list_t *portalsList;
if (portals) {
cJSON *portals_local;
if(!cJSON_IsArray(portals)) {
goto end;//primitive container
}
portalsList = list_create();
cJSON_ArrayForEach(portals_local, portals)
{
if(!cJSON_IsString(portals_local))
{
goto end;
}
list_addElement(portalsList , strdup(portals_local->valuestring));
}
}
// v1_iscsi_persistent_volume_source->readOnly
cJSON *readOnly = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "readOnly");
if (readOnly) {
if(!cJSON_IsBool(readOnly))
{
goto end; //Bool
}
}
// v1_iscsi_persistent_volume_source->secretRef
cJSON *secretRef = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "secretRef");
v1_secret_reference_t *secretRef_local_nonprim = NULL;
if (secretRef) {
secretRef_local_nonprim = v1_secret_reference_parseFromJSON(secretRef); //nonprimitive
}
// v1_iscsi_persistent_volume_source->targetPortal
cJSON *targetPortal = cJSON_GetObjectItemCaseSensitive(v1_iscsi_persistent_volume_sourceJSON, "targetPortal");
if (!targetPortal) {
goto end;
}
if(!cJSON_IsString(targetPortal))
{
goto end; //String
}
v1_iscsi_persistent_volume_source_local_var = v1_iscsi_persistent_volume_source_create (
chapAuthDiscovery ? chapAuthDiscovery->valueint : 0,
chapAuthSession ? chapAuthSession->valueint : 0,
fsType ? strdup(fsType->valuestring) : NULL,
initiatorName ? strdup(initiatorName->valuestring) : NULL,
strdup(iqn->valuestring),
iscsiInterface ? strdup(iscsiInterface->valuestring) : NULL,
lun->valuedouble,
portals ? portalsList : NULL,
readOnly ? readOnly->valueint : 0,
secretRef ? secretRef_local_nonprim : NULL,
strdup(targetPortal->valuestring)
);
return v1_iscsi_persistent_volume_source_local_var;
end:
return NULL;
}
|
hangilc/myclinic-spring | winutil/src/main/java/jp/chang/myclinic/winutil/main/CreateShortcut.java | <filename>winutil/src/main/java/jp/chang/myclinic/winutil/main/CreateShortcut.java
package jp.chang.myclinic.winutil.main;
import jp.chang.myclinic.winutil.ShellLink;
import java.nio.file.Path;
import java.nio.file.Paths;
public class CreateShortcut {
public static void main(String[] args) {
if ( !(args.length >= 2 && args.length <= 4) ) {
System.err.println("Usage: create-shortcut SAVE-PATH TARGET [ARGS] [WORKING-DIR]");
System.exit(1);
}
String savePath = args[0];
String target = args[1];
String arguments = null;
String workDir = null;
if( args.length >= 3 ){
arguments = args[2];
if( args.length >= 4 ){
workDir = args[3];
}
}
System.err.println("savePath: " + savePath);
System.err.println("target: " + target);
System.err.println("arguments: " + arguments);
System.err.println("workDir: " + workDir);
ShellLink shellLink = new ShellLink();
Path targetPath = Paths.get(target);
shellLink.setPath(targetPath.toAbsolutePath().toString());
if( arguments != null ) {
shellLink.setArguments(arguments);
}
if( workDir != null ) {
shellLink.setWorkingDirectory(workDir);
}
shellLink.save(savePath);
shellLink.close();
}
}
|
amallya18/TwitterRedux | app/src/main/java/com/github/anmallya/twitterredux/data/DbHelper.java | package com.github.anmallya.twitterredux.data;
import com.github.anmallya.twitterredux.models.Media;
import com.github.anmallya.twitterredux.models.Media_Table;
import com.raizlabs.android.dbflow.sql.language.SQLite;
import java.util.List;
/**
* Created by anmallya on 10/29/2016.
*/
public class DbHelper {
public static List<Media> getMediaForTweet(long id){
List<Media> mediaList = SQLite.select().from(Media.class).where(Media_Table.tweetId.is(id)).queryList();
return mediaList;
}
}
|
shenkevin/B2CStore | app/src/main/java/cn/mstar/store/entity/ProIdAndNums.java | package cn.mstar.store.entity;
/**
* Created by Administrator on 2015/8/12.
*/
public class ProIdAndNums {
public int proId;
public int number;
}
|
tvallin/helidon-build-tools | dev-loop/dev-loop/src/main/java/io/helidon/build/devloop/maven/MavenGoalReferenceResolver.java | /*
* Copyright (c) 2020, 2021 Oracle and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.helidon.build.devloop.maven;
import java.util.List;
import io.helidon.build.common.Log;
import org.apache.maven.lifecycle.NoGoalSpecifiedException;
import static java.util.Objects.requireNonNull;
/**
* Utility to map a Maven goal reference to a {@link MavenGoal}. References are resolved in the context of the specified project.
* <br><br>
* References may be fully qualified:
* <br><br>
* <pre>
* ${groupId}:${artifactId}:${version}:${goal}@${executionId}
* </pre>
* The version will be ignored since it can only be resolved to the plugin configured in the current project. If not provided,
* a <a href="http://maven.apache.org/guides/mini/guide-default-execution-ids.html">default executionId</a> will be used. For
* example, with the {@code compile} goal the default execution id is {@code default-compile}.
* <br><br>
* A <a href="https://maven.apache.org/guides/introduction/introduction-to-plugin-prefix-mapping.html">plugin prefix</a> may
* be used as an alias for the {@code groupId} and {@code artifactId}, for example {@code compiler} in the following reference:
* <br><br>
* <pre>
* compiler:compile
* </pre>
* Finally, any lifecycle phase (e.g. {@code process-resources}) may be used as a reference, and will expand to the corresponding
* list of goals.
* <br><br>
* <h3>Example References</h3>
* <ol>
* <li>{@code org.apache.maven.plugins:maven-exec-plugin:3.0.0:exec@compile-sass}</li>
* <li>{@code org.apache.maven.plugins:maven-exec-plugin:exec@compile-sass}</li>
* <li>{@code exec:exec@compile-sass}</li>
* <li>{@code compiler:compile}</li>
* <li>{@code compile}</li>
* </ol>
* References #1-3 are equivalent, #4 executes only the 'compile' goal and #5 executes all goals in the 'compile' lifecycle.
*/
public class MavenGoalReferenceResolver {
private final MavenEnvironment environment;
/**
* Constructor.
*
* @param environment The Maven environment.
*/
public MavenGoalReferenceResolver(MavenEnvironment environment) {
this.environment = environment;
}
/**
* Resolve a list of references.
*
* @param references The references.
* @param goals The goals list to append to.
* @return The goals list.
* @throws Exception If an error occurs.
*/
public List<MavenGoal> resolve(List<String> references, List<MavenGoal> goals) throws Exception {
for (String reference : references) {
resolve(reference, goals);
}
return goals;
}
/**
* Resolve a reference.
*
* @param reference The reference.
* @param goals The goals list to append to.
* @return The goals list.
* @throws Exception If an error occurs.
*/
public List<MavenGoal> resolve(String reference, List<MavenGoal> goals) throws Exception {
int index = requireNonNull(reference).indexOf('@');
String executionId = null;
if (index == 0) {
throw new NoGoalSpecifiedException(reference);
} else if (index > 0) {
executionId = reference.substring(index + 1);
reference = reference.substring(0, index);
}
final String[] components = reference.split(":");
switch (components.length) {
case 1:
if (executionId != null) {
Log.warn("Ignoring executionId %s in %s", executionId, reference);
}
goals.addAll(environment.phase(components[0]));
break;
case 2:
goals.add(environment.goal(components[0], components[1], executionId));
break;
case 3:
goals.add(MavenGoal.create(components[0], components[1], components[2], executionId, environment));
break;
default: // >= 4
Log.warn("Ignoring version in %s", reference);
goals.add(MavenGoal.create(components[0], components[1], components[3], executionId, environment));
break;
}
Log.debug("%s resolved to %s", reference, goals);
return goals;
}
/**
* Asserts that the given phase is valid.
*
* @param phase The phase.
* @throws Exception If not a valid phase.
*/
public void assertValidPhase(String phase) throws Exception {
environment.lifecycle(phase);
}
}
|
kal727/l5r-sandbox | server/game/cards/locations/01/plazaofpunishment.js | const DrawCard = require('../../../drawcard.js');
class PlazaOfPunishment extends DrawCard {
setupCardAbilities(ability) {
this.reaction({
when: {
afterChallenge: ({challenge}) => challenge.winner === this.controller && challenge.challengeType === 'power'
},
cost: ability.costs.kneelSelf(),
target: {
activePromptTitle: 'Select a character',
cardCondition: card => card.location === 'play area' && card.getType() === 'character' && card.attachments.size() === 0
},
handler: context => {
this.untilEndOfPhase(ability => ({
match: context.target,
effect: [
ability.effects.modifyStrength(-2),
ability.effects.killByStrength
]
}));
this.game.addMessage('{0} kneels {1} to give {2} -2 STR until the end of the phase and kill it if its STR is 0',
this.controller, this, context.target);
}
});
}
}
PlazaOfPunishment.code = '01173';
module.exports = PlazaOfPunishment;
|
ma6yu/Kratos | applications/RANSApplication/custom_elements/convection_diffusion_reaction_element.h | <reponame>ma6yu/Kratos
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME>
//
#if !defined(KRATOS_CONVECTION_DIFFUSION_REACTION_ELEMENT_H_INCLUDED)
#define KRATOS_CONVECTION_DIFFUSION_REACTION_ELEMENT_H_INCLUDED
// System includes
// External includes
// Project includes
#include "includes/define.h"
#include "includes/element.h"
// Application includes
namespace Kratos
{
///@name Kratos Classes
///@{
template <
unsigned int TDim,
unsigned int TNumNodes,
class TConvectionDiffusionReactionData>
class ConvectionDiffusionReactionElement : public Element
{
public:
///@name Type Definitions
///@{
using BaseType = Element;
/// Node type (default is: Node<3>)
using NodeType = Node<3>;
/// Geometry type (using with given NodeType)
using GeometryType = Geometry<NodeType>;
/// Definition of nodes container type, redefined from GeometryType
using NodesArrayType = Geometry<NodeType>::PointsArrayType;
/// Vector type for local contributions to the linear system
using VectorType = Vector;
/// Matrix type for local contributions to the linear system
using MatrixType = Matrix;
using IndexType = std::size_t;
using EquationIdVectorType = std::vector<IndexType>;
using DofsVectorType = std::vector<Dof<double>::Pointer>;
/// Type for an array of shape function gradient matrices
using ShapeFunctionDerivativesArrayType = GeometryType::ShapeFunctionsGradientsType;
using PropertiesType = typename BaseType::PropertiesType;
using ConvectionDiffusionReactionDataType = TConvectionDiffusionReactionData;
using CurrentElementType =
ConvectionDiffusionReactionElement<TDim, TNumNodes, TConvectionDiffusionReactionData>;
///@}
///@name Pointer Definitions
/// Pointer definition of ConvectionDiffusionReactionElement
KRATOS_CLASS_POINTER_DEFINITION(ConvectionDiffusionReactionElement);
///@}
///@name Life Cycle
///@{
/**
* Constructor.
*/
explicit ConvectionDiffusionReactionElement(
IndexType NewId = 0)
: Element(NewId)
{
}
/**
* Constructor using an array of nodes
*/
ConvectionDiffusionReactionElement(
IndexType NewId,
const NodesArrayType& ThisNodes)
: Element(NewId, ThisNodes)
{
}
/**
* Constructor using Geometry
*/
ConvectionDiffusionReactionElement(
IndexType NewId,
GeometryType::Pointer pGeometry)
: Element(NewId, pGeometry)
{
}
/**
* Constructor using Properties
*/
ConvectionDiffusionReactionElement(
IndexType NewId,
GeometryType::Pointer pGeometry,
typename PropertiesType::Pointer pProperties)
: Element(NewId, pGeometry, pProperties)
{
}
/**
* Copy Constructor
*/
ConvectionDiffusionReactionElement(
ConvectionDiffusionReactionElement const& rOther)
: Element(rOther)
{
}
/**
* Destructor
*/
~ConvectionDiffusionReactionElement() override = default;
///@}
///@name Operations
///@{
/**
* ELEMENTS inherited from this class have to implement next
* Create and Clone methods: MANDATORY
*/
/**
* creates a new element pointer
* @param NewId: the ID of the new element
* @param ThisNodes: the nodes of the new element
* @param pProperties: the properties assigned to the new element
* @return a Pointer to the new element
*/
Element::Pointer Create(
IndexType NewId,
NodesArrayType const& ThisNodes,
typename PropertiesType::Pointer pProperties) const override
{
KRATOS_TRY
return Kratos::make_intrusive<CurrentElementType>(
NewId, Element::GetGeometry().Create(ThisNodes), pProperties);
KRATOS_CATCH("");
}
/**
* creates a new element pointer
* @param NewId: the ID of the new element
* @param pGeom: the geometry to be employed
* @param pProperties: the properties assigned to the new element
* @return a Pointer to the new element
*/
Element::Pointer Create(
IndexType NewId,
GeometryType::Pointer pGeom,
typename PropertiesType::Pointer pProperties) const override
{
KRATOS_TRY
return Kratos::make_intrusive<CurrentElementType>(NewId, pGeom, pProperties);
KRATOS_CATCH("");
}
/**
* creates a new element pointer and clones the previous element data
* @param NewId: the ID of the new element
* @param ThisNodes: the nodes of the new element
* @param pProperties: the properties assigned to the new element
* @return a Pointer to the new element
*/
Element::Pointer Clone(
IndexType NewId,
NodesArrayType const& ThisNodes) const override
{
KRATOS_TRY
return Kratos::make_intrusive<CurrentElementType>(
NewId, Element::GetGeometry().Create(ThisNodes), Element::pGetProperties());
KRATOS_CATCH("");
}
void EquationIdVector(
EquationIdVectorType& rResult,
const ProcessInfo& CurrentProcessInfo) const override;
/**
* determines the elemental list of DOFs
* @param ElementalDofList: the list of DOFs
* @param rCurrentProcessInfo: the current process info instance
*/
void GetDofList(
DofsVectorType& rElementalDofList,
const ProcessInfo& CurrentProcessInfo) const override;
void GetValuesVector(
Vector& rValues,
int Step = 0) const override;
void GetFirstDerivativesVector(
Vector& rValues,
int Step = 0) const override;
void GetSecondDerivativesVector(
Vector& rValues,
int Step = 0) const override;
/**
* ELEMENTS inherited from this class have to implement next
* CalculateLocalSystem, CalculateLeftHandSide and CalculateRightHandSide
* methods they can be managed internally with a private method to do the
* same calculations only once: MANDATORY
*/
/**
* this is called during the assembling process in order
* to calculate all elemental contributions to the global system
* matrix and the right hand side
* @param rLeftHandSideMatrix: the elemental left hand side matrix
* @param rRightHandSideVector: the elemental right hand side
* @param rCurrentProcessInfo: the current process info instance
*/
void CalculateLocalSystem(
MatrixType& rLeftHandSideMatrix,
VectorType& rRightHandSideVector,
const ProcessInfo& rCurrentProcessInfo) override;
/**
* this is called during the assembling process in order
* to calculate the elemental right hand side vector only
* @param rRightHandSideVector: the elemental right hand side vector
* @param rCurrentProcessInfo: the current process info instance
*/
void CalculateRightHandSide(
VectorType& rRightHandSideVector,
const ProcessInfo& rCurrentProcessInfo) override;
/**
* @brief CalculateLocalVelocityContribution Calculate the local contribution in terms of velocity and pressure.
* @param rDampMatrix Local finite element system matrix (output)
* @param rRightHandSideVector Local finite element residual vector (output)
* @param rCurrentProcessInfo Current ProcessInfo values (input)
*/
void CalculateLocalVelocityContribution(
MatrixType& rDampingMatrix,
VectorType& rRightHandSideVector,
const ProcessInfo& rCurrentProcessInfo) override;
/**
* ELEMENTS inherited from this class must implement this methods
* if they need to add dynamic element contributions
* note: second derivatives means the accelerations if the displacements are the dof of the analysis
* note: time integration parameters must be set in the rCurrentProcessInfo before calling these methods
* CalculateSecondDerivativesContributions,
* CalculateSecondDerivativesLHS, CalculateSecondDerivativesRHS methods are : OPTIONAL
*/
/**
* this is called during the assembling process in order
* to calculate the elemental mass matrix
* @param rMassMatrix: the elemental mass matrix
* @param rCurrentProcessInfo: the current process info instance
*/
void CalculateMassMatrix(
MatrixType& rMassMatrix,
const ProcessInfo& rCurrentProcessInfo) override;
/**
* this is called during the assembling process in order
* to calculate the elemental damping matrix
* @param rDampingMatrix: the elemental damping matrix
* @param rCurrentProcessInfo: the current process info instance
*/
void CalculateDampingMatrix(
MatrixType& rDampingMatrix,
const ProcessInfo& rCurrentProcessInfo) override;
/**
* This method provides the place to perform checks on the completeness of the input
* and the compatibility with the problem options as well as the contitutive laws selected
* It is designed to be called only once (or anyway, not often) typically at the beginning
* of the calculations, so to verify that nothing is missing from the input
* or that no common error is found.
* @param rCurrentProcessInfo
* this method is: MANDATORY
*/
int Check(const ProcessInfo& rCurrentProcessInfo) const override;
GeometryData::IntegrationMethod GetIntegrationMethod() const override;
///@}
///@name Input and output
///@{
/// Turn back information as a string.
std::string Info() const override
{
std::stringstream buffer;
buffer << "ConvectionDiffusionReactionElement #" << Id();
return buffer.str();
}
/// Print information about this object.
void PrintInfo(std::ostream& rOStream) const override
{
rOStream << "CDR" << TConvectionDiffusionReactionData::GetName();
}
///@}
protected:
///@name Protected operations
///@{
/**
* @brief Get the Values Array
*
* @param rValues Return values array
* @param Step Step
*/
void GetValuesArray(
BoundedVector<double, TNumNodes>& rValues,
const int Step = 0) const;
/**
* @brief Get the Divergence Operator object
*
* Calculates divergence of a vector at a gauss point
*
* @param rVariable Vector variable
* @param rShapeDerivatives Shape derivatives at gauss point
* @param Step time step
* @return double Divergence of the variable
*/
double GetDivergenceOperator(
const Variable<array_1d<double, 3>>& rVariable,
const Matrix& rShapeDerivatives,
const int Step = 0) const;
/**
* @brief Get the Convection Operator object
*
* Calculates convection operator given by following equation
*
* \[
* w_i\frac{\partial N^a}{\partial x_i}
* \]
*
* $w_i$ being the $i^{th}$ dimension of $\underline{w}$ vector, $N^a$ being the
* shape function of $a^{th}$ node, $x_i$ being the $i^{th}$ dimension
* of local coordinates
*
* @param rOutput Vector of results
* @param rVector Input vector (i.e. $\underline{w}$)
* @param rShapeDerivatives Shape function derivatives w.r.t. physical coordinates
*/
void GetConvectionOperator(
BoundedVector<double, TNumNodes>& rOutput,
const array_1d<double, 3>& rVector,
const Matrix& rShapeDerivatives) const;
/**
* @brief Calculates shape function data for this element
*
* @param rGaussWeights Gauss point weights list
* @param rNContainer Shape function values. Each row contains shape functions for respective gauss point
* @param rDN_DX List of matrices containing shape function derivatives for each gauss point
*/
virtual void CalculateGeometryData(
Vector& rGaussWeights,
Matrix& rNContainer,
ShapeFunctionDerivativesArrayType& rDN_DX) const;
void AddLumpedMassMatrix(
Matrix& rMassMatrix,
const double Mass) const;
void AddDampingMatrixGaussPointContributions(
Matrix& rDampingMatrix,
const double ReactionTerm,
const double EffectiveKinematicViscosity,
const Vector& rVelocityConvectiveTerms,
const double GaussWeight,
const Vector& rGaussShapeFunctions,
const Matrix& rGaussdNa_dNb) const;
///@}
private:
///@name Serialization
///@{
friend class Serializer;
void save(Serializer& rSerializer) const override
{
KRATOS_TRY
KRATOS_SERIALIZE_SAVE_BASE_CLASS(rSerializer, Element);
KRATOS_CATCH("");
}
void load(Serializer& rSerializer) override
{
KRATOS_TRY
KRATOS_SERIALIZE_LOAD_BASE_CLASS(rSerializer, Element);
KRATOS_CATCH("");
}
///@}
}; // Class ConvectionDiffusionReactionElement
///@}
///@name Input and output
///@{
template <unsigned int TDim, unsigned int TNumNodes, class TConvectionDiffusionReactionData>
inline std::istream& operator>>(
std::istream& rIStream,
ConvectionDiffusionReactionElement<TDim, TNumNodes, TConvectionDiffusionReactionData>& rThis);
/// output stream function
template <unsigned int TDim, unsigned int TNumNodes, class TConvectionDiffusionReactionData>
inline std::ostream& operator<<(
std::ostream& rOStream,
const ConvectionDiffusionReactionElement<TDim, TNumNodes, TConvectionDiffusionReactionData>& rThis)
{
rThis.PrintInfo(rOStream);
rOStream << " : " << std::endl;
rThis.PrintData(rOStream);
return rOStream;
}
///@}
} // namespace Kratos.
#endif // KRATOS_CONVECTION_DIFFUSION_REACTION_ELEMENT_H_INCLUDED defined
|
Sage-Bionetworks/Genie | genie_registry/cna.py | <gh_stars>1-10
import logging
import os
import pandas as pd
import synapseclient
from genie.example_filetype_format import FileTypeFormat
from genie import process_functions
logger = logging.getLogger(__name__)
def validateSymbol(gene, bedDf, returnMappedDf=True):
'''
Validate gene symbol
Args:
gene: Gene name
bedDf: Bed pandas dataframe
returnMappedDf: Return a mapped gene. Defaults to True
Returns:
gene name or boolean for whether a gene is valid
'''
valid = False
if sum(bedDf['Hugo_Symbol'] == gene) > 0:
valid = True
elif sum(bedDf['ID'] == gene) > 0:
mismatch = bedDf[bedDf['ID'] == gene]
mismatch.drop_duplicates(inplace=True)
logger.info("{} will be remapped to {}".format(
gene, mismatch['Hugo_Symbol'].values[0]))
gene = mismatch['Hugo_Symbol'].values[0]
else:
logger.warning(
"{} cannot be remapped and will not be released. The symbol "
"must exist in your seq assay ids (bed files) and must be "
"mappable to a gene.".format(gene))
gene = float('nan')
if returnMappedDf:
return(gene)
else:
return(valid)
def makeCNARow(row, symbols):
'''
Make CNA Row (Deprecated function)
CNA values are no longer stored in the database
Args:
row: one row in the CNA file
symbols: list of Gene symbols
'''
totalrow = "{symbols}\n{values}".format(
symbols=",".join(symbols),
values=",".join(row.astype(str)))
totalrow = totalrow.replace(".0", "")
return(totalrow)
def mergeCNAvalues(x):
"""Merge CNA values, make sure if there are two rows that are the
same gene, the values are merged"""
# Change into its own series, because sometimes doing an apply
# will cause there to be a missing index value which will
# cause dropna() to fail.
values = pd.Series(x.values)
values.dropna(inplace=True)
uniqueValues = set(values.unique())
if len(uniqueValues) == 1:
returnVal = x.tolist()[0]
elif len(uniqueValues) <= 2:
uniqueValues.discard(0)
if len(uniqueValues) == 1:
returnVal = list(uniqueValues)[0]
else:
returnVal = float('nan')
else:
returnVal = float('nan')
return(returnVal)
def checkIfOneZero(x):
assert len(set(x.tolist())) == 1, "Can only be one unique value"
class cna(FileTypeFormat):
_fileType = "cna"
_process_kwargs = ["newPath", 'databaseToSynIdMappingDf']
_validation_kwargs = ['nosymbol_check', 'project_id']
# VALIDATE FILENAME
def _validateFilename(self, filePath):
assert os.path.basename(filePath[0]) == \
"data_CNA_{}.txt".format(self.center)
def _process(self, cnaDf, databaseToSynIdMappingDf):
cnaDf.rename(columns={
cnaDf.columns[0]: cnaDf.columns[0].upper()}, inplace=True)
cnaDf.rename(columns={
"HUGO_SYMBOL": "Hugo_Symbol"}, inplace=True)
index = [i for i, col in enumerate(cnaDf.columns)
if col.upper() == "ENTREZ_GENE_ID"]
if len(index) > 0:
del cnaDf[cnaDf.columns[index][0]]
bedSynId = databaseToSynIdMappingDf.Id[
databaseToSynIdMappingDf['Database'] == "bed"][0]
bed = self.syn.tableQuery(
"select Hugo_Symbol, ID from {} where CENTER = '{}'" .format(
bedSynId, self.center))
bedDf = bed.asDataFrame()
cnaDf['Hugo_Symbol'] = \
cnaDf['Hugo_Symbol'].apply(lambda x: validateSymbol(x, bedDf))
order = cnaDf.columns
cnaDf = cnaDf[~cnaDf['Hugo_Symbol'].isnull()]
# cnaDf = cnaDf.applymap(str)
duplicatedGenes = pd.DataFrame()
duplicated_symbols = cnaDf['Hugo_Symbol'][
cnaDf['Hugo_Symbol'].duplicated()].unique()
for i in duplicated_symbols:
dups = cnaDf[cnaDf['Hugo_Symbol'] == i]
newVal = dups[dups.columns[dups.columns != "Hugo_Symbol"]].apply(
mergeCNAvalues)
temp = pd.DataFrame(newVal).transpose()
temp['Hugo_Symbol'] = i
duplicatedGenes = duplicatedGenes.append(temp, sort=False)
cnaDf.drop_duplicates('Hugo_Symbol', keep=False, inplace=True)
cnaDf = cnaDf.append(duplicatedGenes, sort=False)
cnaDf = cnaDf[order]
cnaDf.columns = [
process_functions.checkGenieId(i, self.center)
if i != "Hugo_Symbol" else i for i in cnaDf.columns]
return(cnaDf)
def process_steps(self, cnaDf, newPath, databaseToSynIdMappingDf):
newCNA = self._process(cnaDf, databaseToSynIdMappingDf)
centerMafSynId = databaseToSynIdMappingDf.Id[
databaseToSynIdMappingDf['Database'] == "centerMaf"][0]
if not newCNA.empty:
cnaText = process_functions.removePandasDfFloat(newCNA)
# Replace blank with NA's
cnaText = cnaText.replace(
"\t\t", "\tNA\t").replace(
"\t\t", "\tNA\t").replace(
'\t\n', "\tNA\n")
with open(newPath, "w") as cnaFile:
cnaFile.write(cnaText)
self.syn.store(synapseclient.File(newPath, parent=centerMafSynId))
return(newPath)
def _validate(self, cnvDF, nosymbol_check, project_id):
total_error = ""
warning = ""
cnvDF.columns = [col.upper() for col in cnvDF.columns]
if cnvDF.columns[0] != "HUGO_SYMBOL":
total_error += "Your cnv file's first column must be Hugo_Symbol\n"
haveColumn = process_functions.checkColExist(cnvDF, "HUGO_SYMBOL")
if haveColumn:
keepSymbols = cnvDF["HUGO_SYMBOL"]
cnvDF.drop("HUGO_SYMBOL", axis=1, inplace=True)
# if sum(cnvDF.apply(lambda x: sum(x.isnull()))) > 0:
# total_error += "Your cnv file must not have any empty values\n"
if process_functions.checkColExist(cnvDF, "ENTREZ_GENE_ID"):
del cnvDF['ENTREZ_GENE_ID']
# cnvDF = cnvDF.fillna('')
allowed_values = ['-2.0', '-2', '-1.5', '-1.0', '-1',
'0.0', '0', '0.5', '1.0', '1', '1.5',
'2', '2.0', 'nan']
if not all(cnvDF.applymap(lambda x: str(x) in allowed_values).all()):
total_error += (
"All values must be NA/blank, -2, -1.5, -1, -0.5, "
"0, 0.5, 1, 1.5, or 2.\n")
else:
cnvDF['HUGO_SYMBOL'] = keepSymbols
if haveColumn and not nosymbol_check:
databaseToSynIdMappingDf = process_functions.get_synid_database_mappingdf(self.syn, project_id)
bedSynId = process_functions.getDatabaseSynId(self.syn, "bed",
databaseToSynIdMappingDf=databaseToSynIdMappingDf)
bed = self.syn.tableQuery(
"select Hugo_Symbol, ID from {} where "
"CENTER = '{}'".format(bedSynId, self.center))
bedDf = bed.asDataFrame()
cnvDF['remapped'] = cnvDF['HUGO_SYMBOL'].apply(
lambda x: validateSymbol(x, bedDf))
cnvDF = cnvDF[~cnvDF['remapped'].isnull()]
# Do not allow any duplicated genes after symbols
# have been remapped
if sum(cnvDF['remapped'].duplicated()) > 0:
duplicated = cnvDF['remapped'].duplicated(keep=False)
total_error += (
"Your CNA file has duplicated Hugo_Symbols "
"(After remapping of genes): {} -> {}.\n".format(
",".join(cnvDF['HUGO_SYMBOL'][duplicated]),
",".join(cnvDF['remapped'][duplicated])))
return(total_error, warning)
|
ladaegorova18/trik-studio | qrutils/interpreter/blocks/preconditionalLoopBlock.cpp | /* Copyright 2019 CyberTech Labs Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include "preconditionalLoopBlock.h"
using namespace qReal::interpretation::blocks;
PreconditionalLoopBlock::PreconditionalLoopBlock()
{
}
void PreconditionalLoopBlock::run()
{
const bool expressionValue = eval<bool>("Condition");
if (!errorsOccured()) {
emit done(expressionValue ? mLoopStartBlockId : mNextBlockId);
}
}
bool PreconditionalLoopBlock::initNextBlocks()
{
bool conditionFound = false;
bool nextFound = false;
const auto & links = mGraphicalModelApi->graphicalRepoApi().outgoingLinks(id());
for (auto && linkId : links) {
const auto & targetBlockId = mGraphicalModelApi->graphicalRepoApi().otherEntityFromLink(linkId, id());
if (targetBlockId.isNull()) {
error(tr("Outgoing link is not connected"));
return false;
}
const auto & guard = stringProperty(linkId, "Guard").toLower();
if (guard == "iteration") {
if (!conditionFound) {
mLoopStartBlockId = targetBlockId;
conditionFound = true;
} else {
error(tr("Two links marked as \"body\" found"));
return false;
}
} else if (guard == "") {
if (!nextFound) {
mNextBlockId = targetBlockId;
nextFound = true;
} else {
error(tr("There must be a link with \"body\" marker on it"));
return false;
}
}
}
if (!conditionFound) {
error(tr("There must be a link with \"body\" marker on it"));
return false;
}
if (!nextFound) {
error(tr("There must be a non-marked outgoing link"));
return false;
}
return true;
}
|
fernandoporazzi/brazilian-utils | utilities/cities/cities.go | <reponame>fernandoporazzi/brazilian-utils<filename>utilities/cities/cities.go<gh_stars>0
package cities
import (
"fmt"
"log"
"github.com/fernandoporazzi/brazilian-utils/data"
"github.com/fernandoporazzi/brazilian-utils/utilities/states"
)
// GetCities returns all Brazilian cities if no state is passed as argument
func GetCities(params ...string) []string {
length := len(params)
if length > 1 {
log.Fatal("Expected zero or one argument as State, received " + fmt.Sprintf("%v", len(params)))
}
if length == 1 {
state := params[0]
states := states.GetStates()
found := ""
for _, s := range states {
if s.Code == state || s.Name == state {
found = s.Code
break
}
}
return data.Cities[found]
}
allCities := []string{}
for _, cities := range data.Cities {
allCities = append(allCities, cities...)
}
return allCities
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.